text stringlengths 0 1.05M | meta dict |
|---|---|
from functools import partial
from pprint import pformat
from threading import local
import time
import requests
import requests.defaults
from django.utils.translation import ugettext_lazy as _, ngettext
from django.template.defaultfilters import truncatechars
from debug_toolbar.panels import DebugPanel
# Retain, because it won't be retrievable after monkey-patching.
from debug_toolbar_requests.models import ResponseTimer
original_thread_class = requests.models.Request
class RequestsDebugPanel(DebugPanel):
"""
A panel to display HTTP requests made by the `requests` library.
"""
name = 'Requests'
template = 'debug_toolbar/panels/requests.html'
has_content = True
def receive_response(self, index, response):
self.thread_locals.response_timers[index].end_time = time.time()
self.thread_locals.response_timers[index].response = response
def receive_request(self, index, request):
self.thread_locals.response_timers[index].start_time = time.time()
def __init__(self, *args, **kwargs):
super(RequestsDebugPanel, self).__init__(*args, **kwargs)
self.thread_locals = local()
self.thread_locals.response_timers = []
debug_panel = self
class TrackedRequest(original_thread_class):
def __init__(self, *args, **kwargs):
super(TrackedRequest, self).__init__(*args, **kwargs)
response_timer = ResponseTimer()
next_index = len(debug_panel.thread_locals.response_timers)
debug_panel.thread_locals.response_timers.append(response_timer)
self.register_hook('pre_request',
hook=partial(debug_panel.receive_request, next_index))
self.register_hook('response',
hook=partial(debug_panel.receive_response, next_index))
# TODO: in the interest of forward-compatibility, can this be done
# more safely dynamically; e.g. by looking for use of the `Request`
# object in all package modules?
requests.models.Request = TrackedRequest
requests.Request = TrackedRequest
requests.sessions.Request = TrackedRequest
def nav_title(self):
return _('HTTP Requests')
def title(self):
return _('HTTP Requests')
def nav_subtitle(self):
request_count = len(self.thread_locals.response_timers)
return ngettext("%d request", "%d requests", request_count) % request_count
def url(self):
return ''
def process_response(self, _request, _response): # unused params
response_timers = self.thread_locals.response_timers
for response_timer in response_timers:
# Tack template-specific information on to the response timer
# objects to save some boilerplate in the template.
response = response_timer.response
response_timer.response.template_items = (
(_("URL"), response.url),
(_("Status"), u"{code} {reason}".format(
code=response.status_code, reason=response.reason)),
(_("Headers"), pformat(response.headers)),
(_("Body"), truncatechars(response.text, 1024)),
)
request = response_timer.request
response_timer.request.template_items = (
(_("URL"), request.url),
(_("Method"), request.method),
(_("Headers"), pformat(request.headers)),
(_("Parameters"), request.params),
# TODO: it would be nice to get the actual raw body
(_("Data"), request.data),
(_("Files"), request.files),
)
# TODO: this desperately needs tests
# TODO: the browser replay functionality calls for extraction
# into its own module.
def check_browser_compatible_headers(request):
# We only have access to the resulting headers. To verify
# that the standard `requests` headers are being sent (which
# themselves are browser-compatible), we check that the
# headers sent are exactly equivalent to the default headers
# sent by `requests`.
# As an exception, we can also support a request if it only
# adds a `Content-Type` header to the defaults sent by
# `requests`. However, we only support that header if it
# contains one of the two encodings supported by HTML4.
browser_supported_enctypes = (
# automatically sent by browser for every POST form
'application/x-www-form-urlencoded',
# sent by POST forms with `enctype` set to this
'multipart/form-data'
)
headers = request.headers.copy() # don't corrupt the original
header_name = 'Content-Type'
content_type_header = headers.get(header_name, '')
for enctype in browser_supported_enctypes:
# `startswith` is used because we might have a trailing
# semicolon: multipart/form-data; boundary=foobar
if content_type_header.startswith(enctype):
# TODO: need much safer parsing for this, find header lib
# TODO: also matches 'multipart/form-data-foo`
# TODO: messy
del headers[header_name]
return headers == requests.defaults.defaults['base_headers']
# The template displays a button in-browser allowing the user to
# repeat the call. Because this is done through a form, we cannot
# allow this for some more complex requests. Multiple conditions
# are required to determine this, and they are kept in a dict
# instead of a serial condition for traceability (for debugging,
# or to show why request can't be displayed in the template).
response_timer.request.browser_repeatability_conditions = dict(
is_get_or_post = request.method in ('GET', 'POST'),
# The browser can't send its own headers. We must ensure
# that the headers sent only use headers that won't make
# the meaning of the request semantically different, or
# headers that we can support using forms (e.g. 'enctype'
# can emulate some values of the'Content-Type' header.)
has_browser_compatible_headers = check_browser_compatible_headers(request),
# Can't repeat GET requests with anything in the body. The
# browser will just tack it on to the URL instead of using
# a GET body. (Not that GET bodies have semantic meaning in
# HTTP, but people still do strange things.)
is_not_get_with_body = any((
(request.method == 'POST'),
((not request.data) and (not request.files)),
)),
# In POST requests, you can send multipart and non-multipart
# data separately. Once browser forms have an encoding of
# `multipart/form-data`, however, every parameter will be
# sent as multipart data.
is_not_data_and_files = not (request.data and request.files),
# For POST bodies, the browser only do key-value bodies and
# not other payloads, such as strings.
is_key_value_body = isinstance(request.data, dict),
)
response_timer.request.is_browser_repeatable = all(
response_timer.request.browser_repeatability_conditions.values()
)
self.record_stats({
'response_timers': response_timers,
})
| {
"repo_name": "enginoid/django-debug-toolbar-requests",
"path": "debug_toolbar_requests/panel.py",
"copies": "1",
"size": "8012",
"license": "bsd-2-clause",
"hash": 1047601562721030400,
"line_mean": 43.5111111111,
"line_max": 91,
"alpha_frac": 0.5929855217,
"autogenerated": false,
"ratio": 4.77757901013715,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.007017310858744118,
"num_lines": 180
} |
from functools import partial
from pubsub import pub
from threading import Lock
from threading import Thread
import wx
from spacq.gui.tool.box import Dialog, OK_BACKGROUND_COLOR, MessageDialog
from spacq.interface.units import Quantity, IncompatibleDimensions
from spacq.interface.resources import AcquisitionThread
"""
Magnet control front panel.
Dev notes: -Temporary design. Magnet control is planned to be integrated with
the resource automation functionality of this program.
-spacq.devices.iqc.gui.voltage_source.py was used as a guide
in creating this.
-the live aspects of this gui are derived from gui.display.plot.live.scalar.py
"""
class Model4GChannelPanel(wx.Panel):
def __init__(self, parent, global_store, subdevice, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.delay = Quantity(1.0, 's')
# This lock blocks the acquisition thread from acquiring.
self.running_lock = Lock()
self.channel_subdevice = subdevice
self.displays = {}
self.control_state_displays = {}
self.readout_displays = {}
self.control_state_list = ['persistent_switch_heater','virt_sync_currents']
self.readout_list = [ 'magnet_current','power_supply_current',
'persistent_switch_heater', 'high_limit','low_limit','sweep',
'rate_0','rate_1','rate_2','rate_3','rate_4', 'virt_sync_currents']
self.measurement_resources = []
for name in self.readout_list:
self.displays[name] = []
self.measurement_resources.append((name, self.channel_subdevice.resources[name]))
# A list to save acquired data to before outputting to GUI.
self.measurements = [None] * len(self.measurement_resources)
# Main Box.
main_box = wx.BoxSizer(wx.VERTICAL)
# Channel Header Box.
channel_header_box = wx.BoxSizer(wx.HORIZONTAL)
main_box.Add(channel_header_box, flag=wx.EXPAND)
self.channel_button = wx.ToggleButton(self, label='Channel {0} Toggle'.format(self.channel_subdevice.channel))
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnChannelToggle, self.channel_button)
self.channel_button.SetValue(False)
channel_header_box.Add(self.channel_button)
## Control states.
control_state_grid = wx.FlexGridSizer(rows=2, cols=2, hgap=1)
channel_header_box.Add((0, 0), 1, wx.EXPAND)
channel_header_box.Add(control_state_grid, flag=wx.ALIGN_RIGHT, border = 20)
for control_state_name in self.control_state_list:
control_state_display = wx.TextCtrl(self, size=(100, -1), style=wx.TE_READONLY)
control_state_display.BackgroundColour = wx.LIGHT_GREY
control_state_grid.Add(control_state_display, flag=wx.ALIGN_RIGHT)
self.displays[control_state_name].append(control_state_display)
self.control_state_displays[control_state_name] = control_state_display
# reverse our dictionary for key retrieval by item.
self.inv_control_state_displays = dict((v,k) for k, v in self.control_state_displays.iteritems())
# Readouts.
readout_static_box = wx.StaticBox(self, label = 'Readouts')
readout_box = wx.StaticBoxSizer(readout_static_box, wx.VERTICAL)
main_box.Add(readout_box, flag=wx.EXPAND, proportion=1)
# readout_grid = wx.FlexGridSizer(rows=len(self.readout_list), cols=2, hgap=1)
readout_grid = wx.FlexGridSizer(rows=len(self.readout_list), cols=3, hgap=1) #TODO: for debugging model4g GUI...replace when no longer needed.
readout_box.Add(readout_grid, flag=wx.ALIGN_RIGHT)
self.checkboxes = {}
## Setup individual labels + displays
for resource_name in self.readout_list:
### Checkbox. #TODO: for debugging model4g GUI...remove when no longer needed.
checkbox = wx.CheckBox(self)
readout_grid.Add(checkbox, flag = wx.ALIGN_LEFT)
self.checkboxes[resource_name] = checkbox
### Label.
label = resource_name.replace('_',' ').title()
readout_grid.Add(wx.StaticText(self, label=label + ':'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
### Display.
display = wx.TextCtrl(self, size=(100, -1), style=wx.TE_READONLY)
display.BackgroundColour = wx.LIGHT_GREY
self.displays[resource_name].append(display)
self.readout_displays[resource_name] = display
### Connect display to GUI.
readout_grid.Add(self.displays[resource_name][-1], flag=wx.ALIGN_RIGHT)
# reverse our dictionary for key retrieval by item.
self.inv_readout_displays = dict((v,k) for k, v in self.readout_displays.iteritems())
# Controls.
self.control_static_box = wx.StaticBox(self, label='Controls')
self.control_box=wx.StaticBoxSizer(self.control_static_box, wx.VERTICAL)
main_box.Add(self.control_box, flag=wx.EXPAND)
## Persistent Heater Switch.
heater_box = wx.BoxSizer(wx.HORIZONTAL)
self.control_box.Add(heater_box, flag=wx.ALIGN_RIGHT)
heater_box.Add(wx.StaticText(self, label='Persistent Switch Heater:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.heater_toggle = wx.ToggleButton(self, label='on/off', size=(100,-1))
initial_state = self.channel_subdevice.persistent_switch_heater
self.heater_toggle.SetValue(True if initial_state == 1 else 0)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnHeaterToggle, self.heater_toggle)
heater_box.Add(self.heater_toggle,flag=wx.ALIGN_RIGHT)
## Sweeper Control Box.
sweeper_static_box = wx.StaticBox(self, label = 'Sweep')
sweeper_box = wx.StaticBoxSizer(sweeper_static_box, wx.VERTICAL)
self.control_box.Add(sweeper_box, flag=wx.EXPAND)
sweep_buttons_box = wx.BoxSizer(wx.HORIZONTAL)
sweeper_box.Add(sweep_buttons_box, flag = wx.CENTER|wx.ALL)
### Sweep buttons.
sweep_buttons_grid = wx.FlexGridSizer(rows=2, cols=2, hgap=1)
sweep_buttons_box.Add(sweep_buttons_grid, flag=wx.CENTER|wx.ALL)
sweepup_button = wx.Button(self, label='up')
sweepzero_button = wx.Button(self, label='zero')
sweepdown_button = wx.Button(self, label='down')
sweeppause_button = wx.Button(self, label='pause')
self.Bind(wx.EVT_BUTTON, self.OnSweepUp, sweepup_button)
self.Bind(wx.EVT_BUTTON, self.OnSweepZero, sweepzero_button)
self.Bind(wx.EVT_BUTTON, self.OnSweepDown, sweepdown_button)
self.Bind(wx.EVT_BUTTON, self.OnSweepPause, sweeppause_button)
sweep_buttons_grid.Add(sweepup_button)
sweep_buttons_grid.Add(sweepzero_button)
sweep_buttons_grid.Add(sweepdown_button)
sweep_buttons_grid.Add(sweeppause_button)
### Current syncing.
####some space
sync_button = wx.Button(self, label='sync currents')
self.Bind(wx.EVT_BUTTON, self.OnSyncCurrents, sync_button)
sweep_buttons_box.Add(sync_button, flag=wx.LEFT|wx.CENTER, border = 20)
## Limits.
limit_static_box = wx.StaticBox(self, label = 'Limits')
limit_box = wx.StaticBoxSizer(limit_static_box, wx.VERTICAL)
self.control_box.Add(limit_box,flag=wx.EXPAND)
limits_grid = wx.FlexGridSizer(rows=2, cols=3, hgap=1)
limits_grid.AddGrowableCol(1,1)
limit_box.Add(limits_grid, flag=wx.ALIGN_RIGHT)
### High Limit
limits_grid.Add(wx.StaticText(self, label='High Limit:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
set_hilim_button = wx.Button(self, label='Set', style=wx.BU_EXACTFIT)
self.Bind(wx.EVT_BUTTON, self.OnSetHighLimit, set_hilim_button)
limits_grid.Add(set_hilim_button,flag=wx.ALIGN_RIGHT)
self.hilim_input = wx.TextCtrl(self, size=(100, -1))
limits_grid.Add(self.hilim_input, flag=wx.EXPAND)
### Low Limit
limits_grid.Add(wx.StaticText(self, label='Low Limit:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
set_lolim_button = wx.Button(self, label='Set', style=wx.BU_EXACTFIT)
self.Bind(wx.EVT_BUTTON, self.OnSetLowLimit, set_lolim_button)
limits_grid.Add(set_lolim_button,flag=wx.ALIGN_RIGHT)
self.lolim_input = wx.TextCtrl(self, size=(100, -1))
limits_grid.Add(self.lolim_input)
## Rates.
rates_static_box = wx.StaticBox(self, label = 'Rates')
rates_box = wx.StaticBoxSizer(rates_static_box, wx.VERTICAL)
self.control_box.Add(rates_box,flag=wx.EXPAND)
## used to have content of rates box all right aligned.
rates_inner_box = wx.BoxSizer(wx.HORIZONTAL)
rates_box.Add(rates_inner_box, flag=wx.ALIGN_RIGHT)
menu_items = []
for resource in self.readout_list:
if resource.startswith('rate_'):
menu_items.append(resource)
self.rates_menu = wx.ComboBox(self,choices = menu_items, style=wx.CB_READONLY)
self.rates_menu.SetStringSelection(menu_items[0])
rates_inner_box.Add(self.rates_menu, flag=wx.ALIGN_RIGHT)
set_rate_button = wx.Button(self, label='Set', style=wx.BU_EXACTFIT)
self.Bind(wx.EVT_BUTTON, self.OnSetRate, set_rate_button)
rates_inner_box.Add(set_rate_button,flag=wx.ALIGN_RIGHT)
self.rate_input = wx.TextCtrl(self, size=(100, -1))
rates_inner_box.Add(self.rate_input, flag=wx.ALIGN_RIGHT)
# Finish GUI building.
self.SetSizerAndFit(main_box)
# Default behaviour.
## start with...
### ...the threads locked out of acquisition.
self.running_lock.acquire()
### ...controls disabled.
self.RecursiveEnableSizer(self.control_box,False)
# Threading.
self.acqthreads = []
#TODO: implement with a normal thread instead, to avoid the use of a dummy call to a resource.
guicallback = partial(wx.CallAfter, self.Update)
self.guiupdatethread = AcquisitionThread(self.delay, guicallback, resource=self.channel_subdevice.resources['persistent_switch_heater'], running_lock=self.running_lock)
self.acqthreads.append(self.guiupdatethread)
self.guiupdatethread.daemon = True
self.guiupdatethread.start()
def __del__(self):
try:
# if self.channel_button.GetValue() == False:
# self.running_lock.release()
for thread in self.acqthreads:
thread.resource = None
thread.done = True
thread.join()
del thread
# self.close()
except Exception:
pass
def UpdateReadouts(self, resource_name, value):
"""
Update appropriate readouts with a new resource value.
Also update button permissions.
"""
if resource_name in self.readout_displays.keys():
if self.checkboxes[resource_name].Value == False: #TODO: for debugging model4g GUI...remove when no longer needed.
return
for display in self.displays[resource_name]:
#perform alterations to output based on where the resource is being readout in GUI.
inv_cont_dict = self.inv_control_state_displays
if display in inv_cont_dict.keys():
if inv_cont_dict[display] == 'persistent_switch_heater':
if value == 'on':
display.BackgroundColour = OK_BACKGROUND_COLOR
elif value == 'off':
display.BackgroundColour = wx.LIGHT_GREY
value_readout = 'heater {0}'.format(value)
elif inv_cont_dict[display] == 'virt_sync_currents':
if value == 'synced':
display.BackgroundColour = OK_BACKGROUND_COLOR
elif value == 'not synced':
display.BackgroundColour = wx.LIGHT_GREY
#display as-is
value_readout = value
elif display in self.inv_readout_displays.keys():
#display as-is
value_readout = value
display.SetValue(str(value_readout))
# User Permissions
# if currents don't match, heater toggle should be disabled.
if display in inv_cont_dict.keys():
if inv_cont_dict[display] == 'virt_sync_currents':
if value == 'synced':
self.heater_toggle.Enable()
else:
self.heater_toggle.Disable()
def Update(self, dummyval):
"""
Acquire data and then update the GUI with this data.
Note: code taken from sweep controller.
"""
#this loop sets up separate threads for each resource.
thrs = []
for i, (name, resource) in enumerate(self.measurement_resources):
if resource is not None:
def save_callback(value, i=i):
self.measurements[i] = value
callback = partial(wx.CallAfter, partial(self.read_resource, name, resource, save_callback))
thr = Thread(target=callback)
thrs.append(thr)
thr.daemon = True
thr.start()
for thr in thrs:
thr.join()
#this code saves the values to the GUI readouts.
for i, (name,_) in enumerate(self.measurement_resources):
self.UpdateReadouts(name, self.measurements[i])
def read_resource(self, name, resource, save_callback):
"""
Read a value from a resource and handle exceptions.
Note: code taken from sweep controller.
"""
if name in self.readout_displays.keys():
if self.checkboxes[name].Value == False: #TODO: for debugging model4g GUI...remove when no longer needed.
return
try:
value = resource.value
except Exception as e:
if self.resource_exception_handler is not None:
self.resource_exception_handler(name, e, write=False)
return
save_callback(value)
def OnChannelToggle(self, evt=None):
toggle = self.channel_button.GetValue()
if toggle == True:
self.running_lock.release()
elif toggle == False:
self.running_lock.acquire()
self.RecursiveEnableSizer(self.control_box, toggle)
#permission defaults.
self.heater_toggle.Disable()
def OnSweepDown(self, evt=None):
self.channel_subdevice.resources['sweep'].value = 'down'
def OnSweepUp(self, evt=None):
self.channel_subdevice.resources['sweep'].value = 'up'
def OnSweepZero(self, evt=None):
self.channel_subdevice.resources['sweep'].value = 'zero'
def OnSweepPause(self, evt=None):
self.channel_subdevice.resources['sweep'].value = 'pause'
def OnSetRate(self, evt=None):
try:
Quantity(self.rate_input.GetValue())
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return False
range_id = self.rates_menu.GetCurrentSelection()
resource = self.channel_subdevice.resources['rate_{0}'.format(range_id)]
new_value = self.rate_input.GetValue()
try:
resource.value = resource.convert(new_value)
except IncompatibleDimensions:
MessageDialog(self, ValueError('Expected dimensions to match "{0}"'.format(resource.units))).Show()
def OnHeaterToggle(self, evt=None):
if self.heater_toggle.GetValue() == True:
new_value = 'on'
if self.heater_toggle.GetValue() == False:
new_value = 'off'
self.channel_subdevice.resources['persistent_switch_heater'].value = new_value
def OnSetHighLimit(self, evt=None):
try:
Quantity(self.hilim_input.GetValue())
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return False
new_value = self.hilim_input.GetValue()
resource = self.channel_subdevice.resources['high_limit']
try:
resource.value = resource.convert(new_value)
except IncompatibleDimensions:
MessageDialog(self, str(ValueError('Expected dimensions to match "{0}"'.format(resource.units)))).Show()
def OnSetLowLimit(self, evt=None):
try:
Quantity(self.lolim_input.GetValue())
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return False
new_value = self.lolim_input.GetValue()
resource = self.channel_subdevice.resources['low_limit']
try:
resource.value = resource.convert(new_value)
except IncompatibleDimensions:
MessageDialog(self, ValueError('Expected dimensions to match "{0}"'.format(resource.units))).Show()
def OnSyncCurrents(self, evt=None):
self.channel_subdevice.resources['virt_sync_currents'].value = 'start'
def close(self):
"""
Perform cleanup.
"""
# Ensure the threads exits.
if self.channel_button.GetValue() == False:
self.running_lock.release()
for thread in self.acqthreads:
thread.resource = None
thread.done = True
thread.join()
del thread
def RecursiveEnableSizer(self,wx_sizer, toggle):
'''
Helper function that accesses all subwindows of a wxPython
sizer, and enables or disables them based on toggle.
'''
children = wx_sizer.GetChildren()
for item in children:
window = item.GetWindow()
sizer = item.GetSizer()
if sizer:
#recurse
self.RecursiveEnableSizer(sizer,toggle)
elif window:
window.Enable(toggle)
class Model4GFrontPanel(wx.Panel):
"""
GUI for controlling the magnet.
"""
def __init__(self, parent, global_store, model4g, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.model4g = model4g
self.running = False
# Main Panel.
panel_box = wx.BoxSizer(wx.VERTICAL)
## Channels box.
channels_box = wx.BoxSizer(wx.HORIZONTAL)
panel_box.Add(channels_box)
### Channel boxes.
self.channel_panels = []
for channel_subdevice in self.model4g.channels:
if channel_subdevice is not None:
channel_static_box = wx.StaticBox(self)
channel_box_sizer = wx.StaticBoxSizer(channel_static_box, wx.VERTICAL)
#### Channel Inputs/Outputs.
channel_panel = Model4GChannelPanel(self, global_store, channel_subdevice)
channel_box_sizer.Add(channel_panel)
channels_box.Add(channel_box_sizer)
self.channel_panels.append(channel_panel)
self.SetSizerAndFit(panel_box)
def close(self):
#TODO: wxPython would probably have a nicer way of sending a close down through children.
for channel_panel in self.channel_panels:
channel_panel.close()
class Model4GFrontPanelDialog(Dialog):
"""
A wrapper for Model4GFrontPanel.
"""
def __init__(self, parent, global_store, model4g_name, *args, **kwargs):
# If the device doesn't exist, give up.
try:
model4g = global_store.devices[model4g_name].device
except (KeyError, AttributeError):
self.Destroy()
return
Dialog.__init__(self, parent, title='Model4G Front Panel', *args, **kwargs)
self.model4g_name = model4g_name
# Dialog.
dialog_box = wx.BoxSizer(wx.VERTICAL)
## Settings panel.
self.panel = Model4GFrontPanel(self, global_store, model4g)
dialog_box.Add(self.panel)
self.SetSizerAndFit(dialog_box)
self.Bind(wx.EVT_CLOSE, self.OnClose)
# Subscriptions.
pub.subscribe(self.msg_device, 'device.added')
pub.subscribe(self.msg_device, 'device.removed')
def msg_device(self, name, value=None):
if name == self.model4g_name:
# Device has changed, so we can't trust it anymore.
self.Destroy()
return
def OnClose(self, evt):
self.panel.close()
evt.Skip() | {
"repo_name": "ghwatson/SpanishAcquisitionIQC",
"path": "spacq/devices/cryomagnetics/gui/model4g.py",
"copies": "1",
"size": "18135",
"license": "bsd-2-clause",
"hash": -1488419721314145800,
"line_mean": 31.3857142857,
"line_max": 170,
"alpha_frac": 0.7039426523,
"autogenerated": false,
"ratio": 3.077902240325866,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8506191997064944,
"avg_score": 0.15513057911218436,
"num_lines": 560
} |
from functools import partial
from pubsub import pub
from threading import Thread
from time import sleep
import wx
from wx.lib.agw.floatspin import FloatSpin
from spacq.gui.tool.box import load_csv, save_csv, Dialog, MessageDialog
from spacq.interface.units import Quantity
"""
Configuration for a ch6VoltageSource.
"""
class ch6VoltageSourceTunerDialog(Dialog):
"""
A dialog for tuning a voltage source port.
"""
def __init__(self, parent, global_store, ok_callback, port, *args, **kwargs):
Dialog.__init__(self, parent, title='Port {0} tuning'.format(port.num))
self.global_store = global_store
self.ok_callback = ok_callback
self.port = port
# Dialog.
dialog_box = wx.BoxSizer(wx.VERTICAL)
## Self-calibration.
calibration_static_box = wx.StaticBox(self, label='DAC self-calibration')
calibration_box = wx.StaticBoxSizer(calibration_static_box, wx.VERTICAL)
dialog_box.Add(calibration_box, flag=wx.EXPAND|wx.ALL, border=5)
self.calibrate_button = wx.Button(self, label='Self-calibrate')
self.Bind(wx.EVT_BUTTON, self.OnCalibrate, self.calibrate_button)
calibration_box.Add(self.calibrate_button, flag=wx.EXPAND)
## Tuning.
tuning_static_box = wx.StaticBox(self, label='Tuning')
tuning_box = wx.StaticBoxSizer(tuning_static_box, wx.VERTICAL)
dialog_box.Add(tuning_box, flag=wx.EXPAND)
### Autotune.
autotuning_static_box = wx.StaticBox(self, label='Autotuning')
autotuning_box = wx.StaticBoxSizer(autotuning_static_box, wx.VERTICAL)
tuning_box.Add(autotuning_box, flag=wx.EXPAND|wx.ALL, border=5)
autotuning_sizer = wx.FlexGridSizer(rows=3, cols=2, hgap=5)
autotuning_box.Add(autotuning_sizer, flag=wx.CENTER)
autotuning_sizer.Add(wx.StaticText(self, label='Resource name:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.resource_name_input = wx.TextCtrl(self, size=(300,-1))
autotuning_sizer.Add(self.resource_name_input)
autotuning_sizer.Add(wx.StaticText(self, label='Max:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.automax_input = FloatSpin(self, value=1, min_val=-10, max_val=10, increment=1,
digits=5)
autotuning_sizer.Add(self.automax_input)
autotuning_sizer.Add(wx.StaticText(self, label='Min:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.automin_input = FloatSpin(self, value=-1, min_val=-10, max_val=10, increment=1,
digits=5)
autotuning_sizer.Add(self.automin_input)
self.autotune_button = wx.Button(self, label='Autotune')
self.Bind(wx.EVT_BUTTON, self.OnAutotune, self.autotune_button)
autotuning_box.Add(self.autotune_button, flag=wx.EXPAND)
### Manual tune.
tuning_sizer = wx.FlexGridSizer(rows=2, cols=2, hgap=5)
tuning_box.Add(tuning_sizer, flag=wx.CENTER)
tuning_sizer.Add(wx.StaticText(self, label='Gain:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.gain_input = FloatSpin(self, value=0, min_val=-1e6, max_val=1e6, increment=1,
digits=5)
tuning_sizer.Add(self.gain_input)
tuning_sizer.Add(wx.StaticText(self, label='Offset:'),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.offset_input = FloatSpin(self, value=0, min_val=-1e6, max_val=1e6, increment=1,
digits=5)
tuning_sizer.Add(self.offset_input)
## End buttons.
button_box = wx.BoxSizer(wx.HORIZONTAL)
dialog_box.Add(button_box, flag=wx.CENTER|wx.ALL, border=5)
ok_button = wx.Button(self, wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.OnOk, ok_button)
button_box.Add(ok_button)
cancel_button = wx.Button(self, wx.ID_CANCEL)
button_box.Add(cancel_button)
self.SetSizerAndFit(dialog_box)
def autotune(self, resource):
gain, offset = self.port.autotune(resource, set_result=False,
min_value=self.automin_input.GetValue(),
max_value=self.automax_input.GetValue())
wx.CallAfter(self.gain_input.SetValue, gain)
wx.CallAfter(self.offset_input.SetValue, offset)
wx.CallAfter(self.autotune_button.Enable)
def self_calbrate(self):
self.port.apply_settings(calibrate=True)
sleep(self.port.calibration_delay)
wx.CallAfter(self.calibrate_button.Enable)
def SetValue(self, gain, offset):
self.gain_input.SetValue(gain)
self.offset_input.SetValue(offset)
def GetValue(self):
return (self.gain_input.GetValue(), self.offset_input.GetValue())
def OnAutotune(self, evt=None):
name = self.resource_name_input.Value
if not name:
MessageDialog(self, 'No resource provided').Show()
return
try:
resource = self.global_store.resources[name]
except KeyError:
MessageDialog(self, name, 'Missing resource').Show()
return
if not resource.readable:
MessageDialog(self, name, 'Unreadable resource').Show()
return
self.autotune_button.Disable()
thr = Thread(target=self.autotune, args=(resource,))
thr.daemon = True
thr.start()
def OnCalibrate(self, evt=None):
self.calibrate_button.Disable()
thr = Thread(target=self.self_calbrate)
thr.daemon = True
thr.start()
def OnOk(self, evt=None):
self.ok_callback(self)
self.Destroy()
class ch6VoltageSourceSettingsPanel(wx.Panel):
"""
All the settings for a voltage source.
"""
def __init__(self, parent, global_store, vsrc, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.vsrc = vsrc
self.port_value_inputs = []
self.port_buttons = []
# Panel.
panel_box = wx.BoxSizer(wx.VERTICAL)
## Ports.
ports_box = wx.FlexGridSizer(rows=3, cols=2)
panel_box.Add(ports_box)
for port in xrange(6):
port_static_box = wx.StaticBox(self, label='Port {0} '.format(port))
port_box = wx.StaticBoxSizer(port_static_box, wx.HORIZONTAL)
ports_box.Add(port_box, flag=wx.ALL, border=5)
spin = FloatSpin(self, value=0, min_val=-5, max_val=5, increment=1, digits=6)
self.port_value_inputs.append(spin)
port_box.Add(spin)
port_box.Add(wx.StaticText(self, label='V'))
set_button = wx.Button(self, label='Set', style=wx.BU_EXACTFIT)
set_button.Bind(wx.EVT_BUTTON, partial(self.OnSetVoltage, port))
port_box.Add(set_button)
tune_button = wx.Button(self, label='Tune...', style=wx.BU_EXACTFIT)
tune_button.Bind(wx.EVT_BUTTON, partial(self.OnTune, port))
port_box.Add(tune_button)
self.port_buttons.append((set_button, tune_button))
## All ports.
button_static_box = wx.StaticBox(self, label='All ports')
button_box = wx.StaticBoxSizer(button_static_box, wx.HORIZONTAL)
panel_box.Add(button_box, flag=wx.CENTER)
### Zero.
zero_all_button = wx.Button(self, label='Zero')
self.Bind(wx.EVT_BUTTON, self.OnZeroAll, zero_all_button)
button_box.Add(zero_all_button, flag=wx.CENTER)
### Self-calibrate.
self.calibrate_all_button = wx.Button(self, label='Self-calibrate')
self.Bind(wx.EVT_BUTTON, self.OnCalibrateAll, self.calibrate_all_button)
button_box.Add(self.calibrate_all_button, flag=wx.CENTER)
### Load tuning.
tuning_data_static_box = wx.StaticBox(self, label='Tuning data')
tuning_data_box = wx.StaticBoxSizer(tuning_data_static_box, wx.HORIZONTAL)
button_box.Add(tuning_data_box)
#### Save.
tuning_data_save_button = wx.Button(self, label='Save...')
self.Bind(wx.EVT_BUTTON, self.OnSave, tuning_data_save_button)
tuning_data_box.Add(tuning_data_save_button)
#### Load.
tuning_data_load_button = wx.Button(self, label='Load...')
self.Bind(wx.EVT_BUTTON, self.OnLoad, tuning_data_load_button)
tuning_data_box.Add(tuning_data_load_button)
self.SetSizer(panel_box)
def self_calbrate_all(self):
delay = 0 # s
for port in self.vsrc.ports:
# Use the largest delay.
if port.calibration_delay > delay:
delay = port.calibration_delay
port.apply_settings(calibrate=True)
sleep(delay)
wx.CallAfter(self.calibrate_all_button.Enable)
def zero_all(self):
for port in self.vsrc.ports:
port.voltage = Quantity(0.0, 'V')
def OnSetVoltage(self, port_num, evt=None):
try:
self.vsrc.ports[port_num].voltage = Quantity(self.port_value_inputs[port_num].GetValue(), 'V')
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
def OnTune(self, port_num, evt=None):
port = self.vsrc.ports[port_num]
def ok_callback(dlg):
port.gain, port.offset = dlg.GetValue()
dlg = ch6VoltageSourceTunerDialog(self, self.global_store, ok_callback, port)
dlg.SetValue(port.gain, port.offset)
dlg.Show()
def OnCalibrateAll(self, evt=None):
self.calibrate_all_button.Disable()
thr = Thread(target=self.self_calbrate_all)
thr.daemon = True
thr.start()
def OnZeroAll(self, evt=None):
thr = Thread(target=self.zero_all)
thr.daemon = True
thr.start()
def OnSave(self, evt=None):
values = [[port.gain, port.offset] for port in self.vsrc.ports]
try:
save_csv(self, values)
except IOError as e:
MessageDialog(self, str(e), 'Save error').Show()
return
def OnLoad(self, evt=None):
try:
result = load_csv(self)
if result is None:
return
has_header, values, _ = result
if has_header:
port_values = values[1:]
else:
port_values = values
if len(port_values) != len(self.vsrc.ports):
raise ValueError('Invalid number of ports.')
for i, port_value in enumerate(port_values):
if len(port_value) != 2:
raise ValueError('Invalid number of settings for port {0}.'.format(i))
try:
float(port_value[0])
float(port_value[1])
except TypeError:
raise ValueError('Not a number for port {0}.'.format(i))
except (IOError, ValueError) as e:
MessageDialog(self, str(e), 'Load error').Show()
return
for port, values in zip(self.vsrc.ports, port_values):
port.gain = float(values[0])
port.offset = float(values[1])
class ch6VoltageSourceSettingsDialog(Dialog):
"""
A wrapper for ch6VoltageSourceSettingsPanel.
"""
def __init__(self, parent, global_store, vsrc_name, *args, **kwargs):
# If the device doesn't exist, give up.
try:
vsrc = global_store.devices[vsrc_name].device
except (KeyError, AttributeError):
self.Destroy()
return
Dialog.__init__(self, parent, title='Six channel voltage source settings', *args, **kwargs)
self.vsrc_name = vsrc_name
# Dialog.
dialog_box = wx.BoxSizer(wx.VERTICAL)
## Settings panel.
self.panel = ch6VoltageSourceSettingsPanel(self, global_store, vsrc)
dialog_box.Add(self.panel)
self.SetSizerAndFit(dialog_box)
# Subscriptions.
pub.subscribe(self.msg_device, 'device.added')
pub.subscribe(self.msg_device, 'device.removed')
def msg_device(self, name, value=None):
if name == self.vsrc_name:
# Device has changed, so we can't trust it anymore.
self.Destroy()
return
| {
"repo_name": "ghwatson/SpanishAcquisitionIQC",
"path": "spacq/devices/iqc/gui/ch6_voltage_source.py",
"copies": "1",
"size": "10565",
"license": "bsd-2-clause",
"hash": -7618734881786568000,
"line_mean": 28.3472222222,
"line_max": 97,
"alpha_frac": 0.7063890204,
"autogenerated": false,
"ratio": 2.7979343220338984,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8843366363606464,
"avg_score": 0.032191395765486866,
"num_lines": 360
} |
from functools import partial
from PyQt5 import QtWidgets
from ui_main_view import Ui_MainWindow
from list_view import ListView
from nqueens.nqueens_view import NQueensView
from divide_and_conquer.dc_view import DCView
from greedy.greedy_view import GreedyView
from spelling_checker.sp_view import SpCheckerView
from probabilistic.probabilistic_view import ProbabilisticView
class MainView(QtWidgets.QMainWindow):
WIDGETS = {
'QU': NQueensView,
'DC': DCView,
'GR': GreedyView,
'SP': SpCheckerView,
'PR': ProbabilisticView,
}
def __init__(self, controller, *args, **kwargs):
super(MainView, self).__init__(*args, **kwargs)
self.controller = controller
self._old_size = None
self.setup_ui()
def switch_to(self, widget_key):
instance = self.instances[widget_key]
if instance != self.ui.stackedWidget.currentWidget():
if self._old_size:
self.resize(self._old_size)
self._old_size = None
instance.controller.pre_switch()
self.ui.stackedWidget.setCurrentWidget(instance)
def setup_ui(self):
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.instances = {key: widget.as_view(self)
for key, widget in self.WIDGETS.items()}
for instance in self.instances.values():
self.ui.stackedWidget.addWidget(instance)
func = {widget: partial(self.switch_to, widget)
for widget in self.WIDGETS}
self.ui.action_queens.triggered.connect(func['QU'])
self.ui.action_dc.triggered.connect(func['DC'])
self.ui.action_greedy.triggered.connect(func['GR'])
self.ui.action_sp.triggered.connect(func['SP'])
self.ui.action_probabilistic.triggered.connect(func['PR'])
# Let's add the list view!
lview = ListView.as_view(self)
self.ui.stackedWidget.addWidget(lview)
self.ui.stackedWidget.setCurrentWidget(lview)
| {
"repo_name": "PereBal/advanced-algorithms",
"path": "main_view.py",
"copies": "1",
"size": "2035",
"license": "mit",
"hash": 8755938036915716000,
"line_mean": 31.8225806452,
"line_max": 66,
"alpha_frac": 0.6412776413,
"autogenerated": false,
"ratio": 3.6799276672694394,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.981913748392097,
"avg_score": 0.00041356492969396195,
"num_lines": 62
} |
from functools import partial
from pyramid.exceptions import ConfigurationError
from pyramid.path import DottedNameResolver
from rutter.urlmap import URLMap
from rutter.urlmap import _default_not_found_app
from rutter.urlmap import _normalize_url
from rutter.urlmap import _parse_path_expression
import logging
import threading
logger = logging.getLogger(__name__)
class App(object):
"""
A wrapper for reload an application based on a monitor
"""
resolve = staticmethod(DottedNameResolver(None).maybe_resolve)
reload_lock = threading.Lock()
def __init__(self, loader):
self.loader = loader
self.checks = {}
self.current_app = None
self.app_loader = None
def __call__(self, environ, start_response):
if self.current_app is None:
raise RuntimeError("No registered app")
return self.current_app(environ, start_response)
def init_app(self, conf_list, global_conf):
try:
_, spec = next((name, spec) for name, spec in conf_list if name == 'app')
except StopIteration:
raise ConfigurationError("App name and specification required"
" for prismconf.reloadable.App")
self.app_loader = partial(self.loader.get_app, spec, global_conf=global_conf)
app = self.current_app = self.app_loader()
return app
def load_monitors(self, conf_list, global_conf):
mon_specs = ((name, spec) for name, spec in conf_list \
if name != 'app')
for name, spec in mon_specs:
mon = self.resolve(spec)(self, global_conf)
yield name, mon
@classmethod
def factory(cls, loader, global_conf, **local_conf):
app = cls(loader)
conf_list = local_conf.items()
inner_app = app.init_app(conf_list, global_conf)
checks = {name:mon for name, mon\
in app.load_monitors(conf_list, global_conf)}
app.checks.update(checks)
logger.debug("%s %s", inner_app, checks)
return app
factory = App.factory
class URLMap(URLMap):
"""
Reloadable composite app
"""
_default_not_found_app = _default_not_found_app
def __init__(self, not_found_app=None):
self.applications = []
self.not_found_application = not_found_app or self._default_not_found_app
self.loaders = {}
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
hostport = host + ':' + port
path_info = environ.get('PATH_INFO')
path_info = _normalize_url(path_info, False)[1]
for dom_url, app in self.applications:
domain, app_url = dom_url
if domain and domain != host and domain != hostport:
continue
if (path_info == app_url
or path_info.startswith(app_url + '/')):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
if not_found_app is not None:
urlmap = URLMap(not_found_app=not_found_app)
else:
urlmap = URLMap()
for path, app_name in local_conf.items():
path = _parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
urlmap.loaders[path] = (loader, global_conf, threading.Lock())
return urlmap
| {
"repo_name": "whitmo/prism",
"path": "prism/reloadable.py",
"copies": "1",
"size": "4139",
"license": "bsd-3-clause",
"hash": -3857991809290986500,
"line_mean": 33.4916666667,
"line_max": 85,
"alpha_frac": 0.5989369413,
"autogenerated": false,
"ratio": 3.79029304029304,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.488922998159304,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pyramid_swagger_router.driver import Driver as _Driver
from pyramid_swagger_router.codegen import Codegen
from pyramid_swagger_router.codegen import Context
from swagger_marshmallow_codegen.langhelpers import clsname_from_path
class UnRepr(object):
def __init__(self, value):
self.value = value
def __repr__(self):
return self.value
class MyContext(Context):
SCHEMA_MODULE_NAME = "schema"
def build_view_setting(self, pattern, route, method, here, renderer="vjson"):
here.from_(".", self.SCHEMA_MODULE_NAME)
here.from_("toybox.swagger", "withswagger")
d = super().build_view_setting(pattern, route, method, renderer=renderer)
cls_prefix = clsname_from_path(pattern)
input = "{}.{}Input".format(self.SCHEMA_MODULE_NAME, cls_prefix)
output = "{}.{}Output".format(self.SCHEMA_MODULE_NAME, cls_prefix)
d["decorator"] = UnRepr("withswagger({input}, {output})".format(input=input, output=output))
return d
class Driver(_Driver):
codegen_factory = partial(Codegen, context_factory=MyContext)
| {
"repo_name": "podhmo/toybox",
"path": "examples/swagger2/driver.py",
"copies": "1",
"size": "1128",
"license": "mit",
"hash": 8588219853086148000,
"line_mean": 35.3870967742,
"line_max": 100,
"alpha_frac": 0.6906028369,
"autogenerated": false,
"ratio": 3.710526315789474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9895598308495692,
"avg_score": 0.001106168838756417,
"num_lines": 31
} |
from functools import partial
from PySide2.QtCore import *
from PySide2.QtWidgets import *
import maya.cmds as cmds
import maya.mel as mel
import nebula_libs.nconstants as const
import nebula_ui.nCoreui.toolButton as button
from nebula_libs.nicon import Icon as Icon
class RigUtilsToolBar(QToolBar):
def __init__(self, parent=None):
QToolBar.__init__(self, parent=parent)
self.setIconSize(QSize(16,16))
########################
### Main buttons on toolbar
self.deleteHistory = self.addAction(Icon('iconmonstr-x-mark-5-240'), 'DelHist')
self._setActionInfo(self.deleteHistory, 'Delete History')
self.deleteHistory.triggered.connect(deleteHistory)
self.selHrc = self.addAction(Icon('iconmonstr-sort-26-240'), 'SelHrc')
self._setActionInfo(self.selHrc, 'Select hierarchy')
self.selHrc.triggered.connect(selHrc)
self.centerPiv = self.addAction(Icon('iconmonstr-crosshair-4-240'),'CenterPiv')
self._setActionInfo(self.centerPiv, 'Center Pivots')
self.centerPiv.triggered.connect(centerPiv)
self.freezeXfrm = self.addAction(Icon('iconmonstr-crosshair-1-240'),'FrzXform')
self._setActionInfo(self.freezeXfrm, 'Freeze Transform')
self.freezeXfrm.triggered.connect(freezeXfrm)
########################
## Modify Menu
self.modifyMenu = button.ToolButton(self, 'Modify')
self.addWidget(self.modifyMenu)
self.modifyAddAttr = self.modifyMenu.menu.addAction('Add Attrib')
self.modifyAddAttr.setIcon(Icon('iconmonstr-plus-4-240'))
self.modifyAddAttr.triggered.connect(partial(melEval, "AddAttribute;"))
self.modifySearchReplace = self.modifyMenu.menu.addAction('Search Replace')
self.modifySearchReplace.setIcon(Icon('iconmonstr-help-4-240'))
self.modifySearchReplace.triggered.connect(partial(melEval, "performSearchReplaceNames 1;"))
########################
## Utils Menu
self.utilsMenu = button.ToolButton(self, 'Utils')
self.addWidget(self.utilsMenu)
## Utils - Snap
self.snapsLabel = QWidgetAction(self)
self.snapsLabel.setDefaultWidget(QLabel('Snap:'))
self.addSnapsLabel = self.utilsMenu.menu.addAction(self.snapsLabel)
self.snapToEdgeLoopCenter = self.utilsMenu.menu.addAction('Snp Sel to edgelp center')
self.snapToEdgeLoopCenter.setIcon(Icon('iconmonstr-log-out-4-240'))
self._setActionInfo(self.snapToEdgeLoopCenter, 'Select item, then edge to snap to edgeloop center.')
self.snapToEdgeLoopCenter.triggered.connect(snapToEdgeLoopCenter)
self.snapToVertCenter = self.utilsMenu.menu.addAction('Snp Sel to vertPair center')
self.snapToVertCenter.setIcon(Icon('iconmonstr-log-out-4-240'))
self._setActionInfo(self.snapToVertCenter, 'Select item, then 2 verts to snap to the item to the center of.')
self.snapToVertCenter.triggered.connect(dosnapToVertCenter)
self.sep03 = self.utilsMenu.menu.addSeparator()
## Utils - General
self.generalLabel = QWidgetAction(self)
self.generalLabel.setDefaultWidget(QLabel('General:'))
self.addgeneralLabel = self.utilsMenu.menu.addAction(self.generalLabel)
## Naming Menu
self.namingLabel = QWidgetAction(self)
self.namingLabel.setDefaultWidget(QLabel('Naming:'))
self.addnamingLabel = self.utilsMenu.menu.addAction(self.namingLabel)
self.renamer = self.utilsMenu.menu.addAction('Quick Rename')
self.renamer.setIcon(Icon('iconmonstr-text-21-240'))
self.renamer.triggered.connect(dorename)
self.geoSuffix = self.utilsMenu.menu.addAction('Add _geo suffix')
self.geoSuffix.triggered.connect(partial(addGeoSuffix))
self.geoSuffix.setIcon(Icon('iconmonstr-text-21-240'))
self.sep05 = self.utilsMenu.menu.addSeparator()
self.charLabel = QWidgetAction(self)
self.charLabel.setDefaultWidget(QLabel('Char:'))
self.addcharLabel = self.utilsMenu.menu.addAction(self.charLabel)
########################
## Lock Menu
self.lockMenu = button.ToolButton(self, 'Lock/Hide')
self.addWidget(self.lockMenu)
self.lockMenuAll = self.lockMenu.menu.addAction(Icon("iconmonstr-lock-30-240"), 'ALL')
self.lockMenuAll.triggered.connect(partial(lockchans, channels=const.ALLCHANS, lock=True, keyable=False))
self.sep01 = self.lockMenu.menu.addSeparator()
self.lockMenuTranslate = self.lockMenu.menu.addAction(Icon("iconmonstr-lock-30-240"), 'Translate')
self.lockMenuTranslate.triggered.connect(partial(lockchans, channels=const.TRANS, lock=True, keyable=False))
self.lockMenuRotate = self.lockMenu.menu.addAction(Icon("iconmonstr-lock-30-240"), 'Rotate')
self.lockMenuRotate.triggered.connect(partial(lockchans, channels=const.ROT, lock=True, keyable=False))
self.lockMenuScale = self.lockMenu.menu.addAction(Icon("iconmonstr-lock-30-240"), 'Scale')
self.lockMenuScale.triggered.connect(partial(lockchans, channels=const.SCALE, lock=True, keyable=False))
self.lockMenuVis = self.lockMenu.menu.addAction(Icon("iconmonstr-lock-30-240"), 'Vis')
self.lockMenuVis.triggered.connect(partial(lockchans, channels=const.VIS, lock=True, keyable=False))
########################
## Build the UnLock Menu
self.unlockMenu = button.ToolButton(self, 'unLock/unHide')
self.addWidget(self.unlockMenu)
self.unlockMenuAll = self.unlockMenu.menu.addAction(Icon("iconmonstr-lock-28-240"), 'ALL')
self.unlockMenuAll.triggered.connect(partial(lockchans, channels=const.ALLCHANS, lock=False, keyable=True))
self.sep02 = self.unlockMenu.menu.addSeparator()
self.unlockMenuTranslate = self.unlockMenu.menu.addAction(Icon("iconmonstr-lock-28-240"), 'Translate')
self.unlockMenuTranslate.triggered.connect(partial(lockchans, channels=const.TRANS, lock=False, keyable=True))
self.unlockMenuRotate = self.unlockMenu.menu.addAction(Icon("iconmonstr-lock-28-240"), 'Rotate')
self.unlockMenuRotate.triggered.connect(partial(lockchans, channels=const.ROT, lock=False, keyable=True))
self.unlockMenuScale = self.unlockMenu.menu.addAction(Icon("iconmonstr-lock-28-240"), 'Scale')
self.unlockMenuScale.triggered.connect(partial(lockchans, channels=const.SCALE, lock=False, keyable=True))
self.unlockMenuVis = self.unlockMenu.menu.addAction(Icon("iconmonstr-lock-28-240"), 'Vis')
self.unlockMenuVis.triggered.connect(partial(lockchans, channels=const.VIS, lock=False, keyable=True))
def _setActionInfo(self, action, info):
action.setToolTip(info)
action.setStatusTip(info)
action.hovered.connect(partial(self._hoverStatusTip, info))
def _hoverStatusTip(self, info):
if hasattr(self.parent(), 'statusBar'):
self.parent().statusBar().showMessage(info)
## TEMP OLD SMELLY CMDS STUFF
def deleteHistory():
"""
Deletes construction history for selected items
"""
sel = cmds.ls(sl=True)
for obj in sel:
cmds.delete(obj, constructionHistory=True)
def selHrc():
"""
Select the hrc for currently selected items
"""
cmds.select(hi=True)
def lockchans(nodeName=None, channels=None, **kwargs):
if nodeName:
for eachChannel in channels:
try:
cmds.setAttr('{}.{}'.format(nodeName, eachChannel), **kwargs)
except RuntimeError:
pass
else:
for nodeName in cmds.ls(sl=True):
for eachChannel in channels:
try:
cmds.setAttr('{}.{}'.format(nodeName, eachChannel), **kwargs)
except RuntimeError:
pass
def centerPiv():
sel = cmds.ls(sl=True)
for obj in sel:
cmds.xform(obj, centerPivots=True)
def melEval(melString=None):
mel.eval(melString)
def snapToEdgeLoopCenter():
"""
Select the item to snap, then an edge of the loop to center the item to
This will snap the first selected item into the center of the loop that the edge is a part of.
"""
selList = []
#Select locator, then the edge (that can be looped properly) to snap to middle of loop.
sel = cmds.ls(sl=True)
for obj in sel:
selList.append(obj)
cmds.polySelect(edgeLoop=int(selList[1].split('.e')[-1].split('[')[1].split(']')[0]))
edges = cmds.ls(sl=True)
verts = cmds.polyListComponentConversion(edges,fe=1,tv=1);
cmds.select(clear=True)
for vert in verts:
cmds.select(vert, add=True)
cmds.cluster(n='tmpCluster')
cmds.pointConstraint('tmpClusterHandle', str(selList[0]), n='tempPoint', w=1, mo=False)
cmds.delete('tmpClusterHandle')
def dosnapToVertCenter():
"""
Select the item to snap, then two verts
This will snap the first selected item into the center of the two verts.
"""
#or select the locator, then two Vertex to snap to middle of verts
selList = []
sel = cmds.ls(sl=True)
for obj in sel:
selList.append(obj)
cmds.select(clear=True)
cmds.select(selList[1], selList[2])
cmds.cluster(n='tmpCluster')
cmds.pointConstraint('tmpClusterHandle', str(selList[0]), n='tempPoint', w=1, mo=False)
cmds.delete('tmpClusterHandle')
def dorename():
"""
Quick and dirty hash renamer
"""
newName = QInputDialog()
newName.setWindowFlags(Qt.WindowStaysOnTopHint)
newName.setWindowTitle('Rename Selection:')
newName.setTextValue('Format: name_###_geo')
newName.setLabelText ('Enter new name:')
newName.show()
newName.accepted.connect(partial(renameNode, newName))
def renameNode(newName):
"""
Quick and dirty hash renamer
"""
name = str(newName.textValue())
if name.count('#') != 0:
padding = "%0" + str(name.count('#')) + "d"
curSel = cmds.ls(sl=True, long=True)
countSel = len(curSel)
for x in range (1, len(curSel)+1):
orig = curSel[countSel-1]
finalName = name.split('#')[0] + str(padding % countSel) + name.split('#')[-1].replace(' ', '_')
if cmds.objExists(finalName):
raise Exception('Name Exists!', 'Name already exists in scene.')
cmds.rename(orig, finalName)
countSel = countSel - 1
else:
raise Exception('Missing ##', 'You need some ## in to specify numbering.')
def addGeoSuffix(suffix='geo'):
"""
Will add an _geo to the end of any selected geo
"""
sel = cmds.ls(sl=True)
for obj in sel:
if not obj.split('_')[-1] == suffix:
cmds.rename(obj, '{}_{}'.format(obj, suffix))
def freezeXfrm():
cmds.makeIdentity(apply=True, t=True, r=True, s=True, n=False, pn=False)
| {
"repo_name": "jamesbdunlop/Nebula",
"path": "nebula_ui/nMayaui_rig/tb_rigUtils.py",
"copies": "2",
"size": "10904",
"license": "apache-2.0",
"hash": -8650098886530366000,
"line_mean": 40.4600760456,
"line_max": 118,
"alpha_frac": 0.663701394,
"autogenerated": false,
"ratio": 3.580952380952381,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004285993250218907,
"num_lines": 263
} |
from functools import partial
from random import randint
import discord
from discord.ext import commands
from utils import str_split, search, r34, urban_def, channel_check
def make_lyrics_embed(response, lyrics):
splitted = []
data = discord.Embed(
color=discord.Color(value='16727871'),
description='Lyrics are from genius.com'
)
data.set_author(name='Lyrics for ' + response['title'])
if len(lyrics) < 1200:
data.add_field(name='\0', value=lyrics)
else:
lyrics = str_split(lyrics)
for i in lyrics:
splitted.append(i.replace('`', ''))
return data, splitted
return [data, splitted if len(splitted) > 0 else False]
class Fun:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def lyrics(self, ctx, *, song):
response = await self.bot.loop.run_in_executor(self.bot.thread_pool,
partial(search, self.bot.tokens["genius_token"],
search_term=song))
if 'lyrics' in response:
lyrics = response['lyrics']
data = make_lyrics_embed(response, lyrics)
try:
await self.bot.say(embed=data[0])
if data[1]:
for i in data[1]:
await self.bot.say(i)
except discord.HTTPException:
await self.bot.say('I need to be able to send embedded links')
else:
await self.bot.say(response)
@commands.command(pass_context=True)
@channel_check('nsfw')
async def rule34(self, ctx, *, term):
future = await self.bot.loop.run_in_executor(self.bot.thread_pool, partial(r34, query=term))
if not future:
await self.bot.say('Result not found for the specified search term')
else:
try:
data = discord.Embed(description='Result taken from danbooru.donmai.us',
color=discord.Colour(value='16727871'))
data.set_author(name='Search result for ' + term,
url='http://danbooru.donmai.us/posts/' + str(future['id']))
data.set_image(url='http://danbooru.donmai.us/' + future['large_file_url'])
await self.bot.say(embed=data)
except:
await self.bot.say('http://danbooru.donmai.us/posts/' + str(future['id']))
@commands.command(pass_context=True)
async def roll(self, ctx, end: int = 100):
num = randint(1, end)
await self.bot.say("The rolled number is: %s" % num)
@commands.command(pass_context=True)
async def define(self, ctx, *, word):
try:
definition, example = await self.bot.loop.run_in_executor(self.bot.thread_pool, partial(urban_def, word=word))
except IndexError:
await self.bot.say('no definition found for this word')
return
data = discord.Embed(colour=discord.Colour(value='16727871'))
data.add_field(name='Definition', value=str(definition), inline=False)
data.add_field(name='Example', value=str(example))
data.set_footer(text='Definition from Urban Dictionary',
icon_url='https://encrypted-tbn3.gstatic.com/images?q='
'tbn:ANd9GcRo8KLHlAQXYao2X7D1G5rFS03GUG59KMNOP22RYHPqvmmBHREKctHRog')
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say('I need to be able to send embedded links')
def setup(bot):
bot.add_cog(Fun(bot))
| {
"repo_name": "initzx/ignore-pls",
"path": "cogs/public/fun.py",
"copies": "1",
"size": "3708",
"license": "mit",
"hash": 8889376075433226000,
"line_mean": 38.4468085106,
"line_max": 122,
"alpha_frac": 0.5755124056,
"autogenerated": false,
"ratio": 3.7117117117117115,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47872241173117114,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from drivers.drivers_base import BaseSeleniumBrowser, DriverChoices, BaseRequests
class SeleniumPhantom(BaseSeleniumBrowser):
'''
'''
def __init__(self):
'''
'''
super().__init__('PhantomJS')
self._headers={}
self._header_name=''
def phantom_command(self):
'''
'''
script_for_status="""
this.onResourceReceived = function(request) {
this.request_response=request
}.bind(this);
"""
#add phantomjs execute endpoint
phantom_exc_uri='/session/$sessionId/phantom/execute'
cmds=self.browser.command_executor._commands
cmds['executePhantomScript'] = ('POST', phantom_exc_uri)
self.browser.execute('executePhantomScript',
{'script': script_for_status, 'args': []})
def driver_script(self, script, args=[]):
'''
run scripts with phantom internal
'''
return self.phantom_call({'script': script, 'args': args})
def set_header(self, confObject):
'''
'''
headersObj=[h for h in confObject.driver.headers.all()]
if not len(headersObj):return
self._headers={h.field_name:h.field_value
for h in headersObj
#Accept-Encoding - avoid phantom bug
if h.field_name not in ['Accept-Encoding']}
self._header_name=headersObj[0].header_name
header_scrit="""
this.customHeaders = {headers};
""".format(headers=str(self._headers))
self.driver_script(header_scrit)
def load_confs(self, confObject):
'''
'''
#prepare phantomjs driver call
self.phantom_command()
self.phantom_call=partial(self.browser.execute, 'executePhantomScript')
#load headers
self.set_header(confObject)
#specific confs
self.browser.set_window_size(1124, 850)
self.pid=self.browser.service.process.pid
def get_headers(self):
'''
** Cookie from response + Request headers **
'''
cookie_script="""
return this.cookies;
"""
if 'Cookie' in self._headers:return self._headers
cookies=self.driver_script(cookie_script)['value']
cookie_string=' ;'.join(['{}={}'.format(c['name'],c['value'])
for c in cookies])
self._headers.update({'Cookie':cookie_string})
return self._headers
def xpathToaction(self, xpathSelector):
"""
"""
return self.browser.find_elements_by_xpath(xpathSelector)
class SeleniumRC(BaseSeleniumBrowser):
'''
'''
def __init__(self):
'''
'''
super().__init__('Remote')
self._port=4444
self._host='127.0.0.1'
self._command_executor=None
self._exec_str='http://{host}:{port}/wd/hub'
self._remote_type=DesiredCapabilities.FIREFOX
def load_confs(self, confObject):
'''
'''
if confObject.driver.port:
self._port=confObject.driver.port
if confObject.driver.host:
self._host=confObject.driver.host
if confObject.driver.remote_browser_type:
rbt=confObject.driver.remote_browser_type.upper()
self._remote_type=getattr(DesiredCapabilities, rbt)
self._command_executor=self._exec_str.format(host=self._host,
port=self._port)
def build_driver(self, proxy_port=None):
'''
'''
if proxy_port:
raise NotImplemented('[-] Proxy not working \
with remote server yet')
if not self._command_executor:
self._command_executor=self._exec_str.format(host=self._host,
port=self._port)
self.browser=getattr(self._driver, self._driver_name)(
command_executor=self._command_executor,
desired_capabilities=self._remote_type)
class LeanRequests(BaseRequests):
'''
'''
def __init__(self):
'''
'''
super().__init__()
self._headers={}
self._header_name=''
def load_confs(self, confObject):
'''
'''
headersObj=[h for h in confObject.driver.headers.all()]
if not len(headersObj):return
headers={h.field_name:h.field_value
for h in headersObj}
self.set_header(**headers)
self._header_name=headersObj[0].header_name
def set_header(self, **kwargs):
'''
'''
self._headers=kwargs
class SeleniumChrome(BaseSeleniumBrowser):
'''
'''
def __init__(self):
'''
'''
options = webdriver.ChromeOptions()
options.add_argument("headless")
super().__init__('Chrome', chrome_options=options)
self._headers={}
self._header_name=''
def set_header(self, confObject):
'''
'''
headersObj=[h for h in confObject.driver.headers.all()]
if not len(headersObj):return
self._headers={h.field_name:h.field_value
for h in headersObj
#Accept-Encoding - avoid phantom bug
if h.field_name not in ['Accept-Encoding']}
self._header_name=headersObj[0].header_name
header_scrit="""
this.customHeaders = {headers};
""".format(headers=str(self._headers))
self.driver_script(header_scrit)
def load_confs(self, confObject):
'''
'''
#load headers
self.set_header(confObject)
#specific confs
self.browser.set_window_size(1124, 850)
self.pid=self.browser.service.process.pid
def xpathToaction(self, xpathSelector):
"""
"""
return self.browser.find_elements_by_xpath(xpathSelector)
DriverChoices.register(SeleniumPhantom)
DriverChoices.register(LeanRequests)
DriverChoices.register(SeleniumRC)
DriverChoices.register(SeleniumChrome)
| {
"repo_name": "VulcanoAhab/delphi",
"path": "drivers/browsers.py",
"copies": "1",
"size": "6195",
"license": "mit",
"hash": 659652685528700500,
"line_mean": 29.6683168317,
"line_max": 81,
"alpha_frac": 0.5699757869,
"autogenerated": false,
"ratio": 4.135514018691588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5205489805591588,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from six import reraise
from characteristic import attributes
from effect import (
sync_performer, Effect,
ComposedDispatcher, TypeDispatcher, base_dispatcher)
# This is from https://github.com/radix/effect/pull/46
@attributes(['results', 'exc_info'], apply_immutable=True)
class SequenceFailed(Exception, object):
"""
Raised if an effect in a :class:``Sequence`` fails.
:ivar list results: The list of successful results.
:ivar error: The error result of the last run effect.
"""
def __str__(self):
# Exception has a custom __str__ that looks at arguments pass to it's
# init. Since we don't pass any, it is useless. The following will
# duplicate the class name in the traceback, but is better than
# otherwise.
return repr(self)
@attributes(["effects"], apply_with_init=False, apply_immutable=True)
class Sequence(object):
"""
Runs a sequence of effects serially.
:returns list: The list of results of the effects.
:raises SequenceFailed: If one of the effects fails.
"""
def __init__(self, effects):
"""
:param effects: The list of effects to execute in sequence.
"""
self.effects = effects
def sequence(effects):
"""
Given multiple Effects, return one Effect that represents the sequence of
all of their effects. The result of the aggregate Effect will be a list of
their results, in the same order as the input to this function. If any
child effect fails, the first such failure will be propagated as a
:class:`SequenceFailed` exception.
:param effects: Effects which should be performed in sequence.
:return: An Effect that results in a list of results, or which fails with
a :class:`SequenceFailed`.
"""
return Effect(Sequence(list(effects)))
@sync_performer
def perform_sequence(dispatcher, intent):
"""Performer for :class:`Sequence`."""
effects = list(intent.effects)
if not effects:
return []
results = []
def succeed(next_effect, result):
results.append(result)
return next_effect
def fail(result):
reraise(SequenceFailed,
SequenceFailed(results=results, exc_info=result),
result[2])
def reducer(next_effect, effect):
return effect.on(success=partial(succeed, next_effect),
error=fail)
return reduce(reducer, reversed(effects), results)
dispatcher = ComposedDispatcher([
TypeDispatcher({
Sequence: perform_sequence,
}),
base_dispatcher,
])
| {
"repo_name": "jml/flocker",
"path": "flocker/provision/_effect.py",
"copies": "15",
"size": "2630",
"license": "apache-2.0",
"hash": 601124805304247200,
"line_mean": 28.8863636364,
"line_max": 79,
"alpha_frac": 0.6634980989,
"autogenerated": false,
"ratio": 4.276422764227642,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from six.moves import UserDict
from topik.singleton_registry import _base_register_decorator
class InputRegistry(UserDict, object):
"""Uses Borg design pattern. Core idea is that there is a global registry for each step's
possible methods
"""
__shared_state = {}
def __init__(self):
self.__dict__ = self.__shared_state
super(InputRegistry, self).__init__()
class OutputRegistry(UserDict, object):
"""Uses Borg design pattern. Core idea is that there is a global registry for each step's
possible methods
"""
__shared_state = {}
def __init__(self):
self.__dict__ = self.__shared_state
super(OutputRegistry, self).__init__()
# a nicer, more pythonic handle to our singleton instance
registered_inputs = InputRegistry()
registered_outputs = OutputRegistry()
# fill in the registration function
register_input = partial(_base_register_decorator, registered_inputs)
register_output = partial(_base_register_decorator, registered_outputs)
| {
"repo_name": "ContinuumIO/topik",
"path": "topik/fileio/_registry.py",
"copies": "1",
"size": "1046",
"license": "bsd-3-clause",
"hash": 1416954289072006000,
"line_mean": 29.7647058824,
"line_max": 94,
"alpha_frac": 0.6998087954,
"autogenerated": false,
"ratio": 4.2693877551020405,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003070175438596491,
"num_lines": 34
} |
from functools import partial
from sklearn_pmml import pmml
from pyxb.utils.domutils import BindingDOMSupport as bds
import numpy as np
estimator_to_converter = {}
def find_converter(estimator):
# TODO: do the search here
return estimator_to_converter.get(estimator.__class__, None)
def pmml_row(**columns):
"""
Creates pmml.row element with columns
:param columns: key-value pairs to be inserted into the row
:return: pmml.row element
"""
r = pmml.row()
for name, value in columns.items():
el = bds().createChildElement(name)
bds().appendTextChild(value, el)
r.append(el)
return r
class DerivedFeatureTransformations(object):
"""
A helper for building Derived Feature transformations. Creates both transformation and the DerivedFeature content.
Typical usage of the methods:
DerivedFeature(
RealNumericFeature('my_derived_feature'),
**DerivedFeatureTransformations.field_in_list('input_feature', ['A', 'B', 'C'])
)
"""
TRANSFORMATION = 'transformation'
FUNCTION = 'function'
@staticmethod
def field_in_list(field, values):
mv = pmml.MapValues(outputColumn='output', defaultValue=0)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for v in values:
it.append(pmml_row(input=v, output=1))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION: lambda df: reduce(np.logical_or, [df[field] == _ for _ in values])
}
@staticmethod
def field_not_in_list(field, values):
mv = pmml.MapValues(outputColumn='output', defaultValue=1)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for v in values:
it.append(pmml_row(input=v, output=0))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION: lambda df: reduce(np.logical_and, [df[field] != _ for _ in values])
}
@staticmethod
def map_values(field, value_map, default_value):
mv = pmml.MapValues(outputColumn='output', default_value=default_value)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for k, v in value_map.items():
it.append(pmml_row(input=k, output=v))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION:
lambda df: np.vectorize(partial(value_map.get, default_value))(df[field])
}
@staticmethod
def arithmetics(tree):
"""
Takes an arithmetic operations tree (Lisp-styled) as an input
"""
def basic_function(func_name, args):
expr = pmml.Apply(function=func_name)
for a in args:
expr.append(a)
return expr
def mod_function(args):
expr = pmml.Apply(function='-')
expr.append(args[0])
mul = pmml.Apply(function='*')
mul.append(args[1])
floor = pmml.Apply(function='floor')
mul.append(floor)
div = pmml.Apply(function='/')
floor.append(div)
div.append(args[0])
div.append(args[1])
return expr
# TODO: test me
def greedy_evaluation(node):
if isinstance(node, str):
# field reference
return (lambda df: df[node]), pmml.FieldRef(field=node)
elif isinstance(node, (tuple, list)):
# eval arguments
args = map(greedy_evaluation, node[1:])
functions = {
'*': lambda df: np.multiply(*[_[0](df) for _ in args]),
'-': lambda df: np.subtract(*[_[0](df) for _ in args]),
'+': lambda df: np.add(*[_[0](df) for _ in args]),
'/': lambda df: np.divide(*[_[0](df) for _ in args]),
'%': lambda df: np.mod(*[_[0](df) for _ in args]),
}
assert isinstance(node[0], str), 'First element should be a code of operation'
assert node[0] in functions, 'Unknown function code {}. Supported codes: {}'.format(node[0], functions.keys())
expr = {
'*': partial(basic_function, '*'),
'-': partial(basic_function, '-'),
'+': partial(basic_function, '+'),
'/': partial(basic_function, '/'),
'%': mod_function
}.get(node[0])([a[1] for a in args])
func = functions[node[0]]
return func, expr
else:
# numeric terminal
return lambda df: node, pmml.Constant(node, dataType='double')
function, transformation = greedy_evaluation(tree)
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: function
}
@staticmethod
def replace_value(field, original, replacement):
if original is not None:
transformation = pmml.Apply(function='if')
cond = pmml.Apply(function='equals')
cond.append(pmml.FieldRef(field=field))
cond.append(pmml.Constant(original))
transformation.append(pmml.Constant(replacement))
transformation.append(pmml.FieldRef(field=field))
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: lambda df: np.where(df[field] == original, replacement, df[field])
}
else:
transformation = pmml.Apply(function='+', mapMissingTo=replacement)
transformation.append(pmml.Constant(0))
transformation.append(pmml.FieldRef(field=field))
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: lambda df: np.where(df[field].isnull(), replacement, df[field])
}
| {
"repo_name": "Snazz2001/sklearn-pmml",
"path": "sklearn_pmml/convert/utils.py",
"copies": "3",
"size": "6372",
"license": "mit",
"hash": -152616838365985180,
"line_mean": 37.8536585366,
"line_max": 126,
"alpha_frac": 0.5786252354,
"autogenerated": false,
"ratio": 4.1511400651465795,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6229765300546579,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sklearn_pmml import pmml
from sklearn_pmml.convert.features import Feature, FeatureType
from pyxb.utils.domutils import BindingDOMSupport as bds
import numpy as np
estimator_to_converter = {}
def find_converter(estimator):
# TODO: do the search here
return estimator_to_converter.get(estimator.__class__, None)
def pmml_row(**columns):
"""
Creates pmml.row element with columns
:param columns: key-value pairs to be inserted into the row
:return: pmml.row element
"""
r = pmml.row()
for name, value in columns.items():
el = bds().createChildElement(name)
bds().appendTextChild(value, el)
r.append(el)
return r
class DerivedFeatureTransformations(object):
"""
A helper for building Derived Feature transformations. Creates both transformation and the DerivedFeature content.
Typical usage of the methods:
DerivedFeature(
RealNumericFeature('my_derived_feature'),
**DerivedFeatureTransformations.field_in_list('input_feature', ['A', 'B', 'C'])
)
"""
TRANSFORMATION = 'transformation'
FUNCTION = 'function'
@staticmethod
def field_in_list(field, values):
mv = pmml.MapValues(outputColumn='output', defaultValue=0)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for v in values:
it.append(pmml_row(input=v, output=1))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION: lambda df: reduce(np.logical_or, [df[field] == _ for _ in values])
}
@staticmethod
def field_not_in_list(field, values):
mv = pmml.MapValues(outputColumn='output', defaultValue=1)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for v in values:
it.append(pmml_row(input=v, output=0))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION: lambda df: reduce(np.logical_and, [df[field] != _ for _ in values])
}
@staticmethod
def map_values(field, value_map, default_value):
mv = pmml.MapValues(outputColumn='output', default_value=default_value)
mv.append(pmml.FieldColumnPair(field=field, column='input'))
it = pmml.InlineTable()
for k, v in value_map.items():
it.append(pmml_row(input=k, output=v))
mv.append(it)
return {
DerivedFeatureTransformations.TRANSFORMATION: mv,
DerivedFeatureTransformations.FUNCTION:
lambda df: np.vectorize(partial(value_map.get, default_value))(df[field])
}
@staticmethod
def arithmetics(tree):
"""
Takes an arithmetic operations tree (Lisp-styled) as an input
"""
def basic_function(func_name, args):
expr = pmml.Apply(function=func_name)
for a in args:
expr.append(a)
return expr
def mod_function(args):
expr = pmml.Apply(function='-')
expr.append(args[0])
mul = pmml.Apply(function='*')
mul.append(args[1])
floor = pmml.Apply(function='floor')
mul.append(floor)
div = pmml.Apply(function='/')
floor.append(div)
div.append(args[0])
div.append(args[1])
return expr
# TODO: test me
def greedy_evaluation(node):
if isinstance(node, str):
# field reference
return (lambda df: df[node]), pmml.FieldRef(field=node)
elif isinstance(node, (tuple, list)):
# eval arguments
args = map(greedy_evaluation, node[1:])
functions = {
'*': lambda df: np.multiply(*[_[0](df) for _ in args]),
'-': lambda df: np.subtract(*[_[0](df) for _ in args]),
'+': lambda df: np.add(*[_[0](df) for _ in args]),
'/': lambda df: np.divide(*[_[0](df) for _ in args]),
'%': lambda df: np.mod(*[_[0](df) for _ in args]),
}
assert isinstance(node[0], str), 'First element should be a code of operation'
assert node[0] in functions, 'Unknown function code {}. Supported codes: {}'.format(node[0], functions.keys())
expr = {
'*': partial(basic_function, '*'),
'-': partial(basic_function, '-'),
'+': partial(basic_function, '+'),
'/': partial(basic_function, '/'),
'%': mod_function
}.get(node[0])([a[1] for a in args])
func = functions[node[0]]
return func, expr
else:
# numeric terminal
return lambda df: node, pmml.Constant(node, dataType='double')
function, transformation = greedy_evaluation(tree)
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: function
}
@staticmethod
def replace_value(field, original, replacement):
if original is not None:
transformation = pmml.Apply(function='if')
cond = pmml.Apply(function='equals')
cond.append(pmml.FieldRef(field=field))
cond.append(pmml.Constant(original))
transformation.append(pmml.Constant(replacement))
transformation.append(pmml.FieldRef(field=field))
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: lambda df: np.where(df[field] == original, replacement, df[field])
}
else:
transformation = pmml.Apply(function='+', mapMissingTo=replacement)
transformation.append(pmml.Constant(0))
transformation.append(pmml.FieldRef(field=field))
return {
DerivedFeatureTransformations.TRANSFORMATION: transformation,
DerivedFeatureTransformations.FUNCTION: lambda df: np.where(df[field].isnull(), replacement, df[field])
}
def assert_equal(feature, expected, actual):
"""
Compare expected and actual values for the feature and raise an exception if they are not equal
:type feature: Feature
:type expected: np.array
:type actual: np.array
"""
# if the feature has the transformation included and the result data is passed, we can compare them
if feature.data_type == FeatureType.STRING:
assert all(actual == expected), \
'Some passed values of "{}" don\'t match the evaluated results'.format(feature.full_name)
else:
np.testing.assert_almost_equal(
actual,
expected,
err_msg='Some passed values of "{}" don\'t match the evaluated results'.format(feature.full_name)
) | {
"repo_name": "alex-pirozhenko/sklearn-pmml",
"path": "sklearn_pmml/convert/utils.py",
"copies": "2",
"size": "7187",
"license": "mit",
"hash": -4955962304250806000,
"line_mean": 38.2786885246,
"line_max": 126,
"alpha_frac": 0.5885626826,
"autogenerated": false,
"ratio": 4.178488372093023,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5767051054693023,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from slm_lab import ROOT_DIR
from slm_lab.lib import logger, util
import os
import pydash as ps
import torch
import torch.nn as nn
NN_LOWCASE_LOOKUP = {nn_name.lower(): nn_name for nn_name in nn.__dict__}
logger = logger.get_logger(__name__)
class NoOpLRScheduler:
'''Symbolic LRScheduler class for API consistency'''
def __init__(self, optim):
self.optim = optim
def step(self, epoch=None):
pass
def get_lr(self):
return self.optim.defaults['lr']
def build_sequential(dims, activation):
'''Build the Sequential model by interleaving nn.Linear and activation_fn'''
assert len(dims) >= 2, 'dims need to at least contain input, output'
dim_pairs = list(zip(dims[:-1], dims[1:]))
layers = []
for in_d, out_d in dim_pairs:
layers.append(nn.Linear(in_d, out_d))
layers.append(get_activation_fn(activation))
model = nn.Sequential(*layers)
return model
def get_activation_fn(activation):
'''Helper to generate activation function layers for net'''
nn_name = NN_LOWCASE_LOOKUP.get(activation) or NN_LOWCASE_LOOKUP['relu']
ActivationClass = getattr(nn, nn_name)
return ActivationClass()
def get_loss_fn(cls, loss_spec):
'''Helper to parse loss param and construct loss_fn for net'''
LossClass = getattr(nn, loss_spec['name'])
loss_spec = ps.omit(loss_spec, 'name')
loss_fn = LossClass(**loss_spec)
return loss_fn
def get_optim(cls, optim_spec):
'''Helper to parse optim param and construct optim for net'''
OptimClass = getattr(torch.optim, optim_spec['name'])
optim_spec = ps.omit(optim_spec, 'name')
optim = OptimClass(cls.parameters(), **optim_spec)
return optim
def get_lr_scheduler(cls, lr_scheduler_spec):
'''Helper to parse lr_scheduler param and construct Pytorch optim.lr_scheduler'''
if ps.is_empty(lr_scheduler_spec):
lr_scheduler = NoOpLRScheduler(cls.optim)
else:
LRSchedulerClass = getattr(torch.optim.lr_scheduler, lr_scheduler_spec['name'])
lr_scheduler_spec = ps.omit(lr_scheduler_spec, 'name')
lr_scheduler = LRSchedulerClass(cls.optim, **lr_scheduler_spec)
return lr_scheduler
def get_policy_out_dim(body):
'''Helper method to construct the policy network out_dim for a body according to is_discrete, action_type'''
action_dim = body.action_dim
if body.is_discrete:
if body.action_type == 'multi_discrete':
assert ps.is_list(action_dim), action_dim
policy_out_dim = action_dim
else:
assert ps.is_integer(action_dim), action_dim
policy_out_dim = action_dim
else:
if body.action_type == 'multi_continuous':
assert ps.is_list(action_dim), action_dim
raise NotImplementedError('multi_continuous not supported yet')
else:
assert ps.is_integer(action_dim), action_dim
if action_dim == 1:
policy_out_dim = 2 # singleton stay as int
else:
policy_out_dim = action_dim * [2]
return policy_out_dim
def get_out_dim(body, add_critic=False):
'''Construct the NetClass out_dim for a body according to is_discrete, action_type, and whether to add a critic unit'''
policy_out_dim = get_policy_out_dim(body)
if add_critic:
if ps.is_list(policy_out_dim):
out_dim = policy_out_dim + [1]
else:
out_dim = [policy_out_dim, 1]
else:
out_dim = policy_out_dim
return out_dim
def init_layers(net, init_fn):
if init_fn is None:
return
if init_fn == 'xavier_uniform_':
try:
gain = nn.init.calculate_gain(net.hid_layers_activation)
except ValueError:
gain = 1
init_fn = partial(nn.init.xavier_uniform_, gain=gain)
elif 'kaiming' in init_fn:
assert net.hid_layers_activation in ['relu', 'leaky_relu'], f'Kaiming initialization not supported for {net.hid_layers_activation}'
init_fn = nn.init.__dict__[init_fn]
init_fn = partial(init_fn, nonlinearity=net.hid_layers_activation)
else:
init_fn = nn.init.__dict__[init_fn]
net.apply(partial(init_parameters, init_fn=init_fn))
def init_parameters(module, init_fn):
'''
Initializes module's weights using init_fn, which is the name of function from from nn.init
Initializes module's biases to either 0.01 or 0.0, depending on module
The only exception is BatchNorm layers, for which we use uniform initialization
'''
bias_init = 0.01
classname = module.__class__.__name__
if 'BatchNorm' in classname:
init_fn(module.weight)
nn.init.constant_(module.bias, bias_init)
elif 'GRU' in classname:
for name, param in module.named_parameters():
if 'weight' in name:
init_fn(param)
elif 'bias' in name:
nn.init.constant_(param, 0.0)
elif 'Linear' in classname or ('Conv' in classname and 'Net' not in classname):
init_fn(module.weight)
nn.init.constant_(module.bias, bias_init)
# params methods
def save(net, model_path):
'''Save model weights to path'''
torch.save(net.state_dict(), util.smart_path(model_path))
logger.info(f'Saved model to {model_path}')
def save_algorithm(algorithm, ckpt=None):
'''Save all the nets for an algorithm'''
agent = algorithm.agent
net_names = algorithm.net_names
prepath = util.get_prepath(agent.spec, agent.info_space, unit='session')
if ckpt is not None:
prepath = f'{prepath}_ckpt-{ckpt}'
logger.info(f'Saving algorithm {util.get_class_name(algorithm)} nets {net_names}')
for net_name in net_names:
net = getattr(algorithm, net_name)
model_path = f'{prepath}_{net_name}_model.pth'
save(net, model_path)
optim_path = f'{prepath}_{net_name}_optim.pth'
save(net.optim, optim_path)
def load(net, model_path):
'''Save model weights from a path into a net module'''
device = None if torch.cuda.is_available() else 'cpu'
net.load_state_dict(torch.load(util.smart_path(model_path), map_location=device))
logger.info(f'Loaded model from {model_path}')
def load_algorithm(algorithm):
'''Save all the nets for an algorithm'''
agent = algorithm.agent
net_names = algorithm.net_names
if util.in_eval_lab_modes():
# load specific model in eval mode
prepath = agent.info_space.eval_model_prepath
else:
prepath = util.get_prepath(agent.spec, agent.info_space, unit='session')
logger.info(f'Loading algorithm {util.get_class_name(algorithm)} nets {net_names}')
for net_name in net_names:
net = getattr(algorithm, net_name)
model_path = f'{prepath}_{net_name}_model.pth'
load(net, model_path)
optim_path = f'{prepath}_{net_name}_optim.pth'
load(net.optim, optim_path)
def copy(src_net, tar_net):
'''Copy model weights from src to target'''
tar_net.load_state_dict(src_net.state_dict())
def polyak_update(src_net, tar_net, beta=0.5):
'''Polyak weight update to update a target tar_net'''
tar_params = tar_net.named_parameters()
src_params = src_net.named_parameters()
src_dict_params = dict(src_params)
for name, tar_param in tar_params:
if name in src_dict_params:
src_dict_params[name].data.copy_(beta * tar_param.data + (1 - beta) * src_dict_params[name].data)
tar_net.load_state_dict(src_dict_params)
def to_assert_trained():
'''Condition for running assert_trained'''
return os.environ.get('PY_ENV') == 'test' or util.get_lab_mode() == 'dev'
def gen_assert_trained(pre_model):
'''
Generate assert_trained function used to check weight updates
@example
assert_trained = gen_assert_trained(model)
# ...
loss.backward()
optim.step()
assert_trained(model, loss)
'''
pre_weights = [param.clone() for param in pre_model.parameters()]
def assert_trained(post_model, loss):
post_weights = [param.clone() for param in post_model.parameters()]
if loss == 0:
# TODO if without momentum, weights should not change too
for p_name, param in post_model.named_parameters():
assert param.grad.norm() == 0
else:
assert not all(torch.equal(w1, w2) for w1, w2 in zip(pre_weights, post_weights)), f'Model parameter is not updated in training_step(), check if your tensor is detached from graph. loss: {loss}'
min_norm = 0
max_norm = 1e5
for p_name, param in post_model.named_parameters():
try:
assert min_norm < param.grad.norm() < max_norm, f'Gradient norm fails the extreme value check {min_norm} < {p_name}:{param.grad.norm()} < {max_norm}, which is bad. Loss: {loss}. Check your network and loss computation. Consider using the "clip_grad_val" net parameter.'
except Exception as e:
logger.warn(e)
logger.debug('Passed network weight update assertation in dev lab_mode.')
return assert_trained
def get_grad_norms(algorithm):
'''Gather all the net's grad norms of an algorithm for debugging'''
grad_norms = []
for net_name in algorithm.net_names:
net = getattr(algorithm, net_name)
if net.grad_norms is not None:
grad_norms.extend(net.grad_norms)
return grad_norms
| {
"repo_name": "kengz/Unity-Lab",
"path": "slm_lab/agent/net/net_util.py",
"copies": "1",
"size": "9495",
"license": "mit",
"hash": 7949138095175364000,
"line_mean": 35.3793103448,
"line_max": 289,
"alpha_frac": 0.6392838336,
"autogenerated": false,
"ratio": 3.564189189189189,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4703473022789189,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sqlalchemy import create_engine
from sqlalchemy.pool import StaticPool
def create_mem_db(metadata, db):
"""Replace the Session class with an in-memory test Session class
Very useful for fast and non-destructive tests (avoid hitting real DB)
The operation is very tricky because it requires replacing and later
restoring the engine in the guts of Flask-SQLAlchemy.
The engine is accessible through a get_engine() method that must be
replaced and later restored.
To achieve that the object is viciously manipulated:
1. the original get_engine() method is replaced with a lambda function that returns the in-memory engine
2. a new method is added to the db object called restore_engine(). This method restored the original
get_engine() method and deletes itself for good measure to leave the db object as pristine as it was before
NOTE: must call db.restore_engine() in the tearDown() method of your test
"""
def _restore_engine(self, original_get_engine):
self.get_engine = original_get_engine
delattr(self, 'restore_engine')
engine = create_engine('sqlite:///:memory:',
echo=False,
connect_args={'check_same_thread': False},
poolclass=StaticPool)
metadata.create_all(engine)
original_get_engine = db.get_engine
db.restore_engine = partial(_restore_engine, db, original_get_engine)
db.get_engine = lambda x=None, y=None: engine
session = db.create_scoped_session()
return session
| {
"repo_name": "the-gigi/over-achiever",
"path": "tests/test_util.py",
"copies": "1",
"size": "1609",
"license": "mit",
"hash": -6109028499283462000,
"line_mean": 34.7555555556,
"line_max": 114,
"alpha_frac": 0.6917339963,
"autogenerated": false,
"ratio": 4.384196185286103,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.022830972747207294,
"num_lines": 45
} |
from functools import partial
from sqlalchemy import event
from backend.utils import slugify as _slugify, was_decorated_without_parenthesis
# EVENTS DOCS
# http://docs.sqlalchemy.org/en/rel_1_1/core/event.html
# ORM EVENTS DOCS
# http://docs.sqlalchemy.org/en/rel_1_1/orm/events.html
class _SQLAlchemyEvent(object):
"""Private helper class for the @attach_events and @on decorators"""
ATTR = '_sqlalchemy_event'
def __init__(self, field_name, event_name, listen_kwargs=None):
self.field_name = field_name
self.event_name = event_name
self.listen_kwargs = listen_kwargs or {}
def attach_events(*args):
"""Class decorator for SQLAlchemy models to attach listeners on class
methods decorated with :func:`.on`
Usage::
@attach_events
class User(Model):
email = Column(String(50))
@on('email', 'set')
def lowercase_email(self, new_value, old_value, initiating_event):
self.email = new_value.lower()
"""
def wrapper(cls):
for name, fn in cls.__dict__.items():
if not name.startswith('__') and hasattr(fn, _SQLAlchemyEvent.ATTR):
e = getattr(fn, _SQLAlchemyEvent.ATTR)
if e.field_name:
event.listen(getattr(cls, e.field_name), e.event_name, fn,
**e.listen_kwargs)
else:
event.listen(cls, e.event_name, fn, **e.listen_kwargs)
return cls
if was_decorated_without_parenthesis(args):
return wrapper(args[0])
return wrapper
def on(*args, **listen_kwargs):
"""Class method decorator for SQLAlchemy models. Must be used in
conjunction with the :func:`.attach_events` class decorator
Usage::
@attach_events
class Post(Model):
uuid = Column(String(36))
post_tags = relationship('PostTag', back_populates='post') # m2m
# instance event (only one positional argument, the event name)
# kwargs are passed on to the sqlalchemy.event.listen function
@on('init', once=True)
def generate_uuid(self, args, kwargs):
self.uuid = str(uuid.uuid4())
# attribute event (two positional args, field name and event name)
@on('post_tags', 'append')
def set_tag_order(self, post_tag, initiating_event):
if not post_tag.order:
post_tag.order = len(self.post_tags) + 1
"""
if len(args) == 1:
field_name, event_name = (None, args[0])
elif len(args) == 2:
field_name, event_name = args
else:
raise NotImplementedError('@on accepts only one or two positional arguments')
def wrapper(fn):
setattr(fn, _SQLAlchemyEvent.ATTR,
_SQLAlchemyEvent(field_name, event_name, listen_kwargs))
return fn
return wrapper
def slugify(field_name, slug_field_name=None, mutable=False):
"""Class decorator to specify a field to slugify. Slugs are immutable by
default unless mutable=True is passed.
Usage::
@slugify('title')
def Post(Model):
title = Column(String(100))
slug = Column(String(100))
# pass a second argument to specify the slug attribute field:
@slugify('title', 'title_slug')
def Post(Model)
title = Column(String(100))
title_slug = Column(String(100))
# optionally set mutable to True for a slug that changes every time
# the slugified field changes:
@slugify('title', mutable=True)
def Post(Model):
title = Column(String(100))
slug = Column(String(100))
"""
slug_field_name = slug_field_name or 'slug'
def _set_slug(target, value, old_value, _, mutable=False):
existing_slug = getattr(target, slug_field_name)
if existing_slug and not mutable:
return
if value and (not existing_slug or value != old_value):
setattr(target, slug_field_name, _slugify(value))
def wrapper(cls):
event.listen(getattr(cls, field_name), 'set',
partial(_set_slug, mutable=mutable))
return cls
return wrapper
| {
"repo_name": "briancappello/flask-react-spa",
"path": "backend/database/events.py",
"copies": "1",
"size": "4285",
"license": "mit",
"hash": -3822482964007806500,
"line_mean": 33.0079365079,
"line_max": 85,
"alpha_frac": 0.5964994166,
"autogenerated": false,
"ratio": 4.034839924670433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5131339341270433,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from strategies import chain, minimize
from . import branch
from .branch import yieldify
identity = lambda x: x
def treeapply(tree, join, leaf=identity):
""" Apply functions onto recursive containers (tree)
join - a dictionary mapping container types to functions
e.g. ``{list: minimize, tuple: chain}``
Keys are containers/iterables. Values are functions [a] -> a.
Examples
--------
>>> from strategies.tree import treeapply
>>> tree = [(3, 2), (4, 1)]
>>> treeapply(tree, {list: max, tuple: min})
2
>>> from toolz.curried import reduce
>>> import operator
>>> sum = reduce(operator.add)
>>> prod = reduce(operator.mul)
>>> tree = [(3, 2), (4, 1)]
>>> treeapply(tree, {list: prod, tuple: sum})
25
"""
for typ in join:
if isinstance(tree, typ):
return join[typ]([treeapply(child, join=join, leaf=leaf)
for child in tree])
return leaf(tree)
def greedy(tree, objective=identity, **kwargs):
""" Execute a strategic tree. Select alternatives greedily
Trees
-----
Nodes in a tree can be either
function - a leaf
list - a selection among operations
tuple - a sequence of chained operations
Textual examples
----------------
Text: Run f, then run g, e.g. ``lambda x: g(f(x))``
Code: ``(f, g)``
Text: Run either f or g, whichever minimizes the objective
Code: ``[f, g]``
Textx: Run either f or g, whichever is better, then run h
Code: ``([f, g], h)``
Text: Either expand then simplify or try factor then foosimp. Finally print
Code: ``([(expand, simplify), (factor, foosimp)], print)``
Objective
---------
"Better" is determined by the objective keyword. This function makes
choices to minimize the objective. It defaults to the identity.
Example
-------
>>> from strategies.tree import greedy
>>> inc = lambda x: x + 1
>>> dec = lambda x: x - 1
>>> double = lambda x: 2*x
>>> tree = [inc, (dec, double)] # either inc or dec-then-double
>>> fn = greedy(tree)
>>> fn(4) # lowest value comes from the inc
5
>>> fn(1) # lowest value comes from dec then double
0
This funcion selects between options in a tuple. The result is chosen that
minimizes the objective function.
>>> fn = greedy(tree, objective=lambda x: -x) # maximize
>>> fn(4) # highest value comes from the dec then double
6
>>> fn(1) # highest value comes from the inc
2
Greediness
----------
This is a greedy algorithm. In the example:
([a, b], c) # do either a or b, then do c
the choice between running ``a`` or ``b`` is made without foresight to c
"""
optimize = partial(minimize, objective=objective)
return treeapply(tree, {list: optimize, tuple: chain}, **kwargs)
def allresults(tree, leaf=yieldify):
""" Execute a strategic tree. Return all possibilities.
Returns a lazy iterator of all possible results
Exhaustiveness
--------------
This is an exhaustive algorithm. In the example
([a, b], [c, d])
All of the results from
(a, c), (b, c), (a, d), (b, d)
are returned. This can lead to combinatorial blowup.
See strategies.tree.greedy for details on input
"""
return treeapply(tree, {list: branch.multiplex, tuple: branch.chain},
leaf=leaf)
def brute(tree, objective=identity, **kwargs):
return lambda expr: min(tuple(allresults(tree, **kwargs)(expr)),
key=objective)
| {
"repo_name": "logpy/strategies",
"path": "strategies/tree.py",
"copies": "2",
"size": "3661",
"license": "bsd-3-clause",
"hash": -6197235336044756000,
"line_mean": 26.5263157895,
"line_max": 79,
"alpha_frac": 0.5998361104,
"autogenerated": false,
"ratio": 3.8135416666666666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5413377777066666,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sys import maxint
import twitter
import sys
import time
from urllib2 import URLError
from httplib import BadStatusLine
import json
def oauth_login():
# XXX: Go to http://twitter.com/apps/new to create an app and get values
# for these credentials that you'll need to provide in place of these
# empty string values that are defined as placeholders.
# See https://dev.twitter.com/docs/auth/oauth for more information
# on Twitter's OAuth implementation.
# CONSUMER_KEY = ''
# CONSUMER_SECRET = ''
# OAUTH_TOKEN = ''
# OAUTH_TOKEN_SECRET = ''
auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET,
CONSUMER_KEY, CONSUMER_SECRET)
twitter_api = twitter.Twitter(auth=auth)
return twitter_api
# Returns an instance of twitter.Twitter
twitter_api = oauth_login()
# Fetch the id of the @VlaamseTweeps/vlaamsetweeps list
list_result = twitter_api.lists.list(screen_name="VlaamseTweeps")
list_data = [user for user in list_result if user['name'] == 'vlaamsemedia']
# print json.dumps(list_data, indent=2)
list_id = list_data[0]['id']
# print list_id
# Fetch all member of the list with id list_id
cursor=-1
list_result = twitter_api.lists.members(list_id=list_id,cursor=cursor)
#print json.dumps(list_result,indent=0)
list_members = list_result['users']
cursor = list_result['next_cursor']data-status-id
while cursor is not 0:
print 'cursor = ', cursor
list_result = twitter_api.lists.members(list_id=list_id,cursor=cursor)
list_members += list_result['users']
cursor = list_result['next_cursor']
# Extract the member ids
# print json.dumps(list_members, indent=2)
ids = [member['id'] for member in list_members]
print json.dumps(ids, indent=2)
# write to file
with open('vlaamsemedia_ids.json', 'w') as fp:
json.dump(ids, fp, indent=2)
| {
"repo_name": "mixbe/kerstkaart2013",
"path": "scripts/dump_vlaamsemedia_list.py",
"copies": "1",
"size": "1878",
"license": "mit",
"hash": -6797757564985959000,
"line_mean": 30.8305084746,
"line_max": 77,
"alpha_frac": 0.7060702875,
"autogenerated": false,
"ratio": 3.294736842105263,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9479316941651217,
"avg_score": 0.004298037590809387,
"num_lines": 59
} |
from functools import partial
from sys import stdout, stderr
from tmc import conf
# User might want to disable all coloring
no_coloring = not conf.use_ansi_colors
class AnsiColorCodes(object):
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# Creates an escaped ANSI color/style character
to_escaped = lambda x: "\033[{0}m".format(x)
class Escaped(object):
""" Helper class for translating AnsiColorCodes (or similar) to escaped"""
def __init__(self, codes):
for item in dir(codes):
if item.startswith("_"):
continue
val = "" if no_coloring else to_escaped(getattr(codes, item))
setattr(self, item, val)
# Use this to refer to escaped color characters
# Example: print(Colors.RED + "Hello" + Colors.RESET)
Colors = Escaped(AnsiColorCodes)
def formatter(color, s):
""" Formats a string with color """
if no_coloring:
return s
return "{begin}{s}{reset}".format(begin=color, s=s, reset=Colors.RESET)
class Printer(object):
"""
Context manager for printing in specific color
Uses terminal coloring character codes defined in Colors-class.
Example:
with Printer(Colors.RED, sys.stdout.writer) as wr:
wr("This will be in red")
wr("So will this")
"""
def __init__(self, color, output):
"""
Args:
color: One of the character codes from Colors-class.
output: An object that has .write method (e.g sys.stdout)
"""
self.color = color
self.output = output.write
def __enter__(self):
self.output(self.color)
return self.output
def __exit__(self, exc_ty, exc_val, tb):
self.output(Colors.RESET)
self.output = None
# Use one of these to print multiple lines in a specific
# color context.
ErrorPrinter = partial(Printer, Colors.RED, stderr)
WarningPrinter = partial(Printer, Colors.YELLOW, stderr)
SuccessPrinter = partial(Printer, Colors.GREEN, stdout)
InfoPrinter = partial(Printer, Colors.CYAN, stdout)
# These are useful is you have only a few lines you want
# to print with specific color context
as_error = partial(formatter, Colors.RED)
as_warning = partial(formatter, Colors.YELLOW)
as_success = partial(formatter, Colors.GREEN)
as_info = partial(formatter, Colors.CYAN)
# Identical to print() but with support for output coloring
def _print(color, *args, sep=' ', end='\n', file=stdout):
print(*(map(color, args)), sep=sep, end=end, file=file)
errormsg = partial(_print, as_error, file=stderr)
warningmsg = partial(_print, as_warning, file=stderr)
successmsg = partial(_print, as_success, file=stdout)
infomsg = partial(_print, as_info, file=stdout)
| {
"repo_name": "JuhaniImberg/tmc.py",
"path": "tmc/coloring.py",
"copies": "1",
"size": "2808",
"license": "mit",
"hash": 1445473419650275300,
"line_mean": 26.801980198,
"line_max": 78,
"alpha_frac": 0.6563390313,
"autogenerated": false,
"ratio": 3.627906976744186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9783145898033185,
"avg_score": 0.00022002200220022004,
"num_lines": 101
} |
from functools import partial
from tastypie import fields
from tastypie.resources import Resource
from tastypie.exceptions import ApiFieldError
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from .resources import GenericResource
class GenericForeignKeyField(fields.ToOneField):
"""
Provides access to GenericForeignKey objects from the django content_types
framework.
"""
def __init__(self, to, attribute, **kwargs):
if not isinstance(to, dict):
raise ValueError('to field must be a dictionary in GenericForeignKeyField')
if len(to) <= 0:
raise ValueError('to field must have some values')
for k, v in to.iteritems():
if not issubclass(k, models.Model) or not issubclass(v, Resource):
raise ValueError('to field must map django models to tastypie resources')
super(GenericForeignKeyField, self).__init__(to, attribute, **kwargs)
def get_related_resource(self, related_instance):
self._to_class = self.to.get(type(related_instance), None)
if self._to_class is None:
raise TypeError('no resource for model %s' % type(related_instance))
return super(GenericForeignKeyField, self).get_related_resource(related_instance)
@property
def to_class(self):
if self._to_class and not issubclass(GenericResource, self._to_class):
return self._to_class
return partial(GenericResource, resources=self.to.values())
def resource_from_uri(self, fk_resource, uri, request=None, related_obj=None, related_name=None):
try:
obj = fk_resource.get_via_uri(uri, request=request)
fk_resource = self.get_related_resource(obj)
return super(GenericForeignKeyField, self).resource_from_uri(fk_resource, uri, request, related_obj, related_name)
except ObjectDoesNotExist:
raise ApiFieldError("Could not find the provided object via resource URI '%s'." % uri)
def build_related_resource(self, *args, **kwargs):
self._to_class = None
return super(GenericForeignKeyField, self).build_related_resource(*args, **kwargs)
| {
"repo_name": "mitar/django-tastypie",
"path": "tastypie/contrib/contenttypes/fields.py",
"copies": "16",
"size": "2202",
"license": "bsd-3-clause",
"hash": 8080678378476819000,
"line_mean": 39.7777777778,
"line_max": 126,
"alpha_frac": 0.6857402361,
"autogenerated": false,
"ratio": 4.284046692607004,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from tempfile import mkstemp
from twisted.python.filepath import FilePath
from documint.errors import RemoteExternalProcessError
from documint.extproc.common import getProcessOutput, sanitizePaths, which
_neonBinary = partial(which, 'clj-neon')
def failingPDFSign(*a, **kw):
"""
Fail to sign anything.
"""
raise RemoteExternalProcessError('PDF signing is not correctly configured')
def signPDF(data, keystorePath, keystorePassword, reason, location,
signaturePage=None, fields=None, privateKeyPassword=None,
imagePath=None, rectangle=None):
"""
Digitally sign a PDF.
@param data: Unsigned PDF bytes.
@type data: L{str}
@param keystorePath: The path to the Java Keystore.
@type keystorePath: L{twisted.python.filepath.FilePath}
@param keystorePassword: The Java Keystore password.
@type keystorePassword: L{str}
@param reason: The reason for signing the PDF.
@type reason: L{str}
@param location: The location the PDF was signed.
@type location: L{str}
@param signaturePage: Path to signature page.
@type signaturePage: L{FilePath} or L{None}
@param fields: Mapping of signature page field names and values.
@type fields: L{dict}
@param privateKeyPassword: The password for the private key contained in
the Java Keystore.
@type privateKeyPassword: L{str} or L{None}
@param imagePath: The path to an image to stamp on the PDF.
@type imagePath: L{twisted.python.filepath.FilePath}
@param rectangle: The size of the signature rectangle. eg:
[LX1,LY1,UX2,UY2]
@type rectangle: L{list} of L{str}
@return: A deferred resulting in the signed PDF content as a byte string or
a L{diamond.error.ExternalProcessError}.
"""
tempPath = FilePath(mkstemp()[1])
tempPath.setContent(data)
def _cleanup(result):
tempPath.remove()
return result
keystorePath, inputPath = sanitizePaths([keystorePath, tempPath])
args = [inputPath,
'-',
keystorePath,
'--keystore-pass', keystorePassword,
'--reason', reason,
'--location', location]
if privateKeyPassword:
args.extend(['--password', privateKeyPassword])
if imagePath:
args.extend(['--signature-image', imagePath])
if rectangle:
args.extend(['--signature-rect', ','.join(rectangle)])
if signaturePage:
args.extend(['--signature-page', signaturePage.path])
if fields:
for k, v in fields.iteritems():
args.extend(['--field', '%s:%s' % (k, v)])
d = getProcessOutput(_neonBinary(), args)
d.addBoth(_cleanup)
return d
| {
"repo_name": "fusionapp/documint",
"path": "documint/extproc/neon.py",
"copies": "1",
"size": "2744",
"license": "mit",
"hash": -8213579040124811000,
"line_mean": 28.8260869565,
"line_max": 79,
"alpha_frac": 0.6629008746,
"autogenerated": false,
"ratio": 4.005839416058394,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007872767346599296,
"num_lines": 92
} |
from functools import partial
from testfixtures.compat import ClassType
from testfixtures.resolve import resolve, not_there
from testfixtures.utils import wrap, extend_docstring
import warnings
def not_same_descriptor(x, y, descriptor):
return isinstance(x, descriptor) and not isinstance(y, descriptor)
class Replacer:
"""
These are used to manage the mocking out of objects so that units
of code can be tested without having to rely on their normal
dependencies.
"""
def __init__(self):
self.originals = {}
def _replace(self, container, name, method, value, strict=True):
if value is not_there:
if method == 'a':
delattr(container, name)
if method == 'i':
del container[name]
else:
if method == 'a':
setattr(container, name, value)
if method == 'i':
container[name] = value
def __call__(self, target, replacement, strict=True):
"""
Replace the specified target with the supplied replacement.
"""
container, method, attribute, t_obj = resolve(target)
if method is None:
raise ValueError('target must contain at least one dot!')
if t_obj is not_there and strict:
raise AttributeError('Original %r not found' % attribute)
if t_obj is not_there and replacement is not_there:
return not_there
replacement_to_use = replacement
if isinstance(container, (type, ClassType)):
if not_same_descriptor(t_obj, replacement, classmethod):
replacement_to_use = classmethod(replacement)
elif not_same_descriptor(t_obj, replacement, staticmethod):
replacement_to_use = staticmethod(replacement)
self._replace(container, attribute, method, replacement_to_use, strict)
if target not in self.originals:
self.originals[target] = t_obj
return replacement
def replace(self, target, replacement, strict=True):
"""
Replace the specified target with the supplied replacement.
"""
self(target, replacement, strict)
def restore(self):
"""
Restore all the original objects that have been replaced by
calls to the :meth:`replace` method of this :class:`Replacer`.
"""
for target, original in tuple(self.originals.items()):
container, method, attribute, found = resolve(target)
self._replace(container, attribute, method, original, strict=False)
del self.originals[target]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.restore()
def __del__(self):
if self.originals:
# no idea why coverage misses the following statement
# it's covered by test_replace.TestReplace.test_replacer_del
warnings.warn( # pragma: no cover
'Replacer deleted without being restored, '
'originals left: %r' % self.originals
)
def replace(target, replacement, strict=True):
"""
A decorator to replace a target object for the duration of a test
function.
"""
r = Replacer()
return wrap(partial(r.__call__, target, replacement, strict), r.restore)
class Replace(object):
"""
A context manager that uses a :class:`Replacer` to replace a single target.
"""
def __init__(self, target, replacement, strict=True):
self.target = target
self.replacement = replacement
self.strict = strict
self._replacer = Replacer()
def __enter__(self):
return self._replacer(self.target, self.replacement, self.strict)
def __exit__(self, exc_type, exc_val, exc_tb):
self._replacer.restore()
replace_params_doc = """
:param target: A string containing the dotted-path to the
object to be replaced. This path may specify a
module in a package, an attribute of a module,
or any attribute of something contained within
a module.
:param replacement: The object to use as a replacement.
:param strict: When `True`, an exception will be raised if an
attempt is made to replace an object that does
not exist.
"""
# add the param docs, so we only have one copy of them!
extend_docstring(replace_params_doc,
[Replacer.__call__, Replacer.replace, replace, Replace])
| {
"repo_name": "nebulans/testfixtures",
"path": "testfixtures/replace.py",
"copies": "1",
"size": "4551",
"license": "mit",
"hash": 7422946388296480000,
"line_mean": 32.2189781022,
"line_max": 79,
"alpha_frac": 0.6167875192,
"autogenerated": false,
"ratio": 4.435672514619883,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5552460033819883,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from testtools import TestCase
from testtools.matchers import (
Contains, Equals, ContainsDict, raises, MatchesStructure, MatchesSetwise)
from twisted.internet.defer import Deferred
from twisted.python.urlpath import URLPath
from twisted.web import http
from twisted.web.error import UnsupportedMethod
from twisted.web.resource import getChildForRequest, Resource
from twisted.web.template import Element, TagLoader, tags
from zope.interface import implementer
from txspinneret.interfaces import INegotiableResource, ISpinneretResource
from txspinneret.resource import (
ContentTypeNegotiator, SpinneretResource, _renderResource)
from txspinneret.util import identity
from txspinneret.test.util import InMemoryRequest, MatchesException
class RenderResourceTests(TestCase):
"""
Tests for `txspinneret.resource._renderResource`.
"""
def test_existingRenderer(self):
"""
Call the renderer defined that matches the request method.
"""
called = []
resource = Resource()
request = InMemoryRequest([])
request.method = b'PUT'
resource.render_PUT = called.append
_renderResource(resource, request)
self.assertThat(
called,
Equals([request]))
def test_hasAllowedMethods(self):
"""
Raise `UnsupportedErrors`, with the value of
``resource.allowedMethods``, if there are no matching renderers.
"""
resource = Resource()
request = InMemoryRequest([])
request.method = b'PUT'
resource.allowedMethods = [b'GET', b'HEAD']
self.assertThat(
partial(_renderResource, resource, request),
MatchesException(
UnsupportedMethod,
MatchesStructure(
allowedMethods=MatchesSetwise(Equals('GET'),
Equals('HEAD')))))
def test_computeAllowedMethods(self):
"""
Raise `UnsupportedErrors`, computing the allowed methods, if there are
no matching renderers.
"""
class _Resource(object):
render_GET = identity
render_HEAD = identity
resource = _Resource()
request = InMemoryRequest([])
request.method = b'PUT'
self.assertThat(
partial(_renderResource, resource, request),
MatchesException(
UnsupportedMethod,
MatchesStructure(
allowedMethods=MatchesSetwise(Equals('GET'),
Equals('HEAD')))))
class SpinneretResourceTests(TestCase):
"""
Tests for `txspinneret.resource.SpinneretResource`.
"""
def test_renderDeferred(self):
"""
It is possible to return a `Deferred` from a render method.
"""
@implementer(ISpinneretResource)
class _RenderDeferred(object):
def render_GET(zelf, request):
return d
d = Deferred()
resource = SpinneretResource(_RenderDeferred())
request = InMemoryRequest([])
request.method = b'GET'
request.render(resource)
self.assertThat(request.written, Equals([]))
d.callback(b'hello')
self.assertThat(request.written, Equals([b'hello']))
def test_locateChildSetPostpath(self):
"""
The second elements in ``locateChild`` return value is the new request
postpath.
"""
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return None, [b'quux']
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([b'foo', b'bar'])
self.assertThat(
request.postpath,
Equals([b'foo', b'bar']))
getChildForRequest(resource, request)
self.assertThat(
request.postpath,
Equals([b'quux']))
def test_locateChildDefault(self):
"""
``locateChild`` returns 404 Not Found by default.
"""
resource = SpinneretResource(Resource())
request = InMemoryRequest([''])
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
b''.join(request.written),
Contains(b'404 - No Such Resource'))
self.assertThat(
http.NOT_FOUND,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
def test_locateChildNotFound(self):
"""
If ``locateChild`` returns ``None`` the result is a resource for 404 Not
Found.
"""
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return None, segments
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([''])
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
b''.join(request.written),
Contains(b'404 - No Such Resource'))
self.assertThat(
http.NOT_FOUND,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
def test_locateChildRenderable(self):
"""
If ``locateChild`` returns something adaptable to `IRenderable` it is
rendered.
"""
class _TestElement(Element):
loader = TagLoader(tags.span(u'Hello ', tags.em(u'World')))
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return _TestElement(), []
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([''])
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
b''.join(request.written),
Equals(b'<!DOCTYPE html>\n<span>Hello <em>World</em></span>'))
self.assertThat(
http.OK,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
def test_locateChildResource(self):
"""
If ``locateChild`` returns something adaptable to `IResource` it is
returned.
"""
class _ResultingResource(Resource):
isLeaf = True
def render(zelf, request):
request.setResponseCode(http.OK)
return b'hello world'
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return _ResultingResource(), []
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([''])
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
b''.join(request.written),
Equals(b'hello world'))
self.assertThat(
http.OK,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
def test_locateChildSpinneretResource(self):
"""
If ``locateChild`` returns something adaptable to `ISpinneretResource`
it is adapted to an `IResource`.
"""
@implementer(ISpinneretResource)
class _ResultingResource(object):
def render_GET(zelf, request):
request.setResponseCode(http.OK)
return b'hello world'
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return _ResultingResource(), []
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([''])
request.method = b'GET'
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
b''.join(request.written),
Equals(b'hello world'))
self.assertThat(
http.OK,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
def test_locateChildRedirect(self):
"""
If ``locateChild`` returns a `URLPath` instance a redirect is made.
"""
@implementer(ISpinneretResource)
class _TestResource(object):
def locateChild(zelf, request, segments):
return URLPath.fromString(b'http://quux.com/bar'), []
resource = SpinneretResource(_TestResource())
request = InMemoryRequest([''])
result = getChildForRequest(resource, request)
request.render(result)
self.assertThat(
request.outgoingHeaders,
ContainsDict(
{b'location': Equals(b'http://quux.com/bar')}))
self.assertThat(
http.FOUND,
Equals(request.responseCode))
self.assertThat(
request.postpath,
Equals([]))
@implementer(INegotiableResource)
class _FooJSON(Resource):
"""
Resource for handling ``application/json`` requests.
"""
contentType = b'application/json'
acceptTypes = [contentType]
def render_GET(zelf, request):
request.setResponseCode(http.OK)
return b'hello world'
@implementer(INegotiableResource, ISpinneretResource)
class _FooSpinneretJSON(object):
"""
Spinneret resource for handling ``application/json`` requests.
"""
contentType = b'application/json'
acceptTypes = [contentType]
def render_GET(zelf, request):
request.setResponseCode(http.OK)
return b'hello world'
class ContentTypeNegotiatorTests(TestCase):
"""
Tests for `txspinneret.resource.ContentTypeNegotiator`.
"""
def test_duplicateHandlers(self):
"""
Only one handler for an accept type may be specified.
"""
@implementer(INegotiableResource)
class _BarJSON(object):
contentType = b'application/json'
acceptTypes = [b'application/json']
self.assertThat(
partial(ContentTypeNegotiator, [_FooJSON(), _FooJSON()]),
raises(ValueError))
self.assertThat(
partial(ContentTypeNegotiator, [_FooJSON(), _BarJSON()]),
raises(ValueError))
def test_unacceptable(self):
"""
If no handler could be negotiated then return an empty resource with
406 Not Acceptable.
"""
resource = ContentTypeNegotiator([_FooJSON()])
request = InMemoryRequest([])
request.requestHeaders.setRawHeaders(b'accept', [b'text/plain'])
request.render(resource)
self.assertThat(
b''.join(request.written),
Equals(b''))
self.assertThat(
http.NOT_ACCEPTABLE,
Equals(request.responseCode))
def test_fallback(self):
"""
If no handler could be negotiated but ``fallback`` was ``True`` then
use the first specified handler.
"""
@implementer(INegotiableResource)
class _BarXML(object):
contentType = b'application/xml'
acceptTypes = [b'applicaton/xml']
resource = ContentTypeNegotiator(
[_FooJSON(), _BarXML()], fallback=True)
request = InMemoryRequest([])
request.requestHeaders.setRawHeaders(b'accept', [b'text/plain'])
request.render(resource)
self.assertThat(
b''.join(request.written),
Equals(b'hello world'))
self.assertThat(
request.outgoingHeaders,
ContainsDict(
{b'content-type': Equals(b'application/json')}))
self.assertThat(
http.OK,
Equals(request.responseCode))
def test_negotiateResource(self):
"""
Negotiate a handler resource based on the ``Accept`` header.
"""
resource = ContentTypeNegotiator([_FooJSON()])
request = InMemoryRequest([])
request.requestHeaders.setRawHeaders(b'accept', [b'application/json'])
request.render(resource)
self.assertThat(
b''.join(request.written),
Equals(b'hello world'))
self.assertThat(
request.outgoingHeaders,
ContainsDict(
{b'content-type': Equals(b'application/json')}))
self.assertThat(
http.OK,
Equals(request.responseCode))
def test_negotiateSpinneretResource(self):
"""
Negotiate a Spinneret handler resource based on the ``Accept`` header.
"""
resource = ContentTypeNegotiator([_FooSpinneretJSON()])
request = InMemoryRequest([])
request.requestHeaders.setRawHeaders(b'accept', [b'application/json'])
request.render(resource)
self.assertThat(
b''.join(request.written),
Equals(b'hello world'))
self.assertThat(
request.outgoingHeaders,
ContainsDict(
{b'content-type': Equals(b'application/json')}))
self.assertThat(
http.OK,
Equals(request.responseCode))
| {
"repo_name": "mithrandi/txspinneret",
"path": "txspinneret/test/test_resource.py",
"copies": "1",
"size": "13443",
"license": "mit",
"hash": -2716666827504374300,
"line_mean": 31.4710144928,
"line_max": 80,
"alpha_frac": 0.5910139106,
"autogenerated": false,
"ratio": 4.505026809651475,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0011866891346055772,
"num_lines": 414
} |
from functools import partial
from threading import Thread
from enum import IntEnum
from aspyrobotmx.codes import (HolderType, PortState, RobotStatus, DumbbellState,
SampleState, PortState)
from dcss import Server as DHS
SAMPLES_PER_POSITION = 96
HOLDER_TYPE_MAP = {
HolderType.unknown: 'u',
HolderType.normal: '1',
HolderType.calibration: '2',
HolderType.superpuck: '3',
# HolderType.error: 'X', # SPEL doesn't report error
}
PORT_STATE_MAP = {
PortState.unknown: 'u',
PortState.empty: '0',
PortState.full: '1',
PortState.error: 'b',
}
class Output(IntEnum):
"""Indexes of the digital outputs.
This Enum maps the indexes as they appear on the BluIce Robot Advanced tab.
They may or may not correspond to the physical outputs of the robot I/O box.
"""
gripper = 1
lid = 3
heater = 14
heater_air = 13
class RobotDHS(DHS):
def __init__(self, dcss, robot):
super(RobotDHS, self).__init__('robot', dcss)
self.robot = robot
self.robot.delegate = self
def setup(self):
"""Start DHS.loop to process incoming DCSS messages
"""
self.recv_loop_thread = Thread(target=self.loop, daemon=True)
self.recv_loop_thread.start()
def login(self):
"""Called by DCSS.connect() after DCSS connection established.
"""
super(RobotDHS, self).login()
self.robot.setup()
self.send_set_status_string()
self.send_set_state_string()
self.send_set_robot_cassette_string()
self.send_set_robot_force_string('left')
self.send_set_robot_force_string('middle')
self.send_set_robot_force_string('right')
self.send_calibration_timestamps()
def operation_callback(self, operation, handle, stage, message=None,
error=None):
"""Callback function to be supplied when starting operations.
When the operation stage reaches 'end' will send operation_completed or
operation_error depending on whether an error occurred.
"""
self.log.info('operation_callback: %r %r %r %r %r',
operation, handle, stage, message, error)
if stage == 'end':
if error:
operation.operation_error(error)
else:
operation.operation_completed(message or 'OK')
# ***************************************************************
# ******************** DHS attributes ***************************
# ***************************************************************
@property
def needs_clear(self):
"""Whether the robot status needs to be cleared.
If `True` the status flags will need to be reset via the BluIce
Inspected button.
"""
return bool(self.robot.status & RobotStatus.need_clear)
@property
def needs_reset(self):
"""Whether the robot needs a reset via robot_config clear_all in BluIce."""
return bool(self.robot.status & RobotStatus.need_reset)
@property
def needs_calibration(self):
"""Whether the robot needs calibration."""
# TODO: Check .NET behaviour
return bool(self.robot.status & RobotStatus.need_cal_all)
@property
def warning(self):
return '' # Not current being used.
@property
def mounted(self):
"""The pin mounted in DCSS format: `'l 1 A'`."""
sample_on_goniometer = self.robot.sample_locations['goniometer']
if not sample_on_goniometer:
return ''
return self.port_tuple_to_str(sample_on_goniometer)
@property
def sample_state(self):
"""The sample location in DCSS format."""
location_to_state = {'cavity': 'on tong', 'picker': 'on picker',
'placer': 'on placer', 'goniometer': 'on gonio'}
location = next((location_to_state[loc]
for loc, sample in self.robot.sample_locations.items()
if sample), 'no')
return location
@property
def dumbbell_state(self):
"""The dumbbell location."""
try:
return DumbbellState(self.robot.dumbbell_state).name
except ValueError:
return 'bad'
@property
def manual_mode(self):
return False # TODO: Check whether needed.
@property
def needs_toolset_calibration(self):
"""Whether the robot needs a toolset calibration."""
return bool(self.robot.status & RobotStatus.need_cal_magnet)
@property
def needs_cassette_calibration(self):
"""Whether the robot needs a cassette calibration."""
return bool(self.robot.status & RobotStatus.need_cal_cassette)
@property
def ln2(self):
"""Whether LN2 is present in DCSS format."""
if self.robot.ln2_level == 0:
return 'no'
elif self.robot.ln2_level == 1:
return 'yes'
else:
return 'wrong'
@property
def state(self):
"""Current task being executed."""
state = self.robot.current_task or 'unknown'
return state.lower() # Lower-case to prevent "Idle" blocking BluIce
# ****************************************************************
# ******************** EPICS callbacks ***************************
# ****************************************************************
def on_task_message(self, value):
"""
When a task message is received broadcast the robot_status string to
update the calibration message and also log to BluIce.
"""
self.send_set_status_string()
try:
level, message = value.split(' ', 1)
except ValueError:
self.log.error('Expected space in %r', value)
level, message = 'INFO', value
level = {
'DEBUG': 'note',
'INFO': 'note',
'WARNING': 'warning',
'ERROR': 'error',
}.get(level, 'error')
self.send_xos3('htos_log %s robot %s' % (level, message))
def on_system_error_message(self, value):
if value != 'OK':
self.send_xos3('htos_log error robot %s' % value)
def on_port_distances(self, value):
# TODO: Need to know which position
self.send_set_robot_force_string('left')
self.send_set_robot_force_string('middle')
self.send_set_robot_force_string('right')
def on_status(self, _): self.send_set_status_string()
def on_current_task(self, _): self.send_set_status_string()
def on_at_home(self, _): self.send_set_status_string()
def on_lid_command(self, _): self.send_set_output_string()
def on_gripper_command(self, _): self.send_set_output_string()
def on_heater_command(self, _): self.send_set_output_string()
def on_heater_air_command(self, _): self.send_set_output_string()
def on_lid_open(self, _): self.send_set_input_string()
def on_lid_closed(self, _): self.send_set_input_string()
def on_gripper_open(self, _): self.send_set_input_string()
def on_gripper_closed(self, _): self.send_set_input_string()
def on_heater_hot(self, _): self.send_set_input_string()
def on_pins_mounted(self, _): self.send_set_status_string()
def on_pins_lost(self, _): self.send_set_status_string()
def on_task_progress(self, _): self.send_set_status_string()
def on_closest_point(self, _): self.send_set_state_string()
def on_ln2_level(self, _): self.send_set_state_string()
def on_dumbbell_state(self, _): self.send_set_state_string()
def on_port_states(self, _): self.send_set_robot_cassette_string()
def on_holder_types(self, _): self.send_set_robot_cassette_string()
def on_sample_locations(self, _):
self.send_set_state_string()
self.send_set_status_string()
self.send_set_robot_cassette_string()
def on_last_toolset_calibration(self, _): self.send_calibration_timestamps()
def on_last_left_calibration(self, _): self.send_calibration_timestamps()
def on_last_middle_calibration(self, _): self.send_calibration_timestamps()
def on_last_right_calibration(self, _): self.send_calibration_timestamps()
def on_last_goniometer_calibration(self, _): self.send_calibration_timestamps()
def on_mount_message(self, message):
self.send_xos3('htos_set_string_completed robot_sample normal %s' % message)
# ****************************************************************
# ******************** dhs -> dcss messages **********************
# ****************************************************************
def send_set_output_string(self):
"""Send DCSS the state of digital outputs."""
msg = (
'htos_set_string_completed robot_output normal '
'0 ' # out0
'{robot.gripper_command} '
'0 ' # out2
'{robot.lid_command} '
'0 0 0 0 0 0 0 0 0 ' # out4-14
'{robot.heater_air_command} '
'{robot.heater_command} '
'0' # out15
).format(robot=self.robot)
self.send_xos3(msg)
def send_set_input_string(self):
"""Send DCSS the state of digital inputs."""
msg = (
'htos_set_string_completed robot_input normal '
'0 0 0 0 0 0 0 0 ' # in0-7
'{robot.gripper_open} '
'{robot.gripper_closed} '
'0 ' # in10
'{robot.lid_closed} '
'{robot.lid_open} '
'{robot.heater_hot} '
'0 0' # in14-15
).format(robot=self.robot)
self.send_xos3(msg)
def send_set_status_string(self):
"""Send the robot_status string to the DCSS.
Components of the message are:
* status: Status code. 0-400000000
* need_reset: Robot needs reset. 0 or 1
* need_cal: Robot needs calibration. 0 or 1
* state: Current task. {idle}, {prepare_mount_crystal}
* warning: Warning message. {empty port in mounting}
* cal_msg: Calibration message. {touching seat}
* cal_step: Calibration progress. {d of d} {+d} {-d}
* mounted: {} or port position that has been mounted like {l 4 A}
* pin_lost: Number of pins lost.
* pin_mounted: Number of pins mounted since last reset.
"""
msg = ('htos_set_string_completed robot_status '
'normal ' # Always "normal"
'status: {0.robot.status} '
'need_reset: {0.needs_reset:d} '
'need_cal: {0.needs_calibration:d} '
'state: {{{0.state}}} '
'warning: {{{0.warning}}} '
'cal_msg: {{{robot.task_message}}} '
'cal_step: {{{robot.task_progress}}} ' # TODO: should validate
'mounted: {{{0.mounted}}} '
'pin_lost: {0.robot.pins_lost} '
'pin_mounted: {0.robot.pins_mounted} '
'manual_mode: {0.manual_mode:d} '
'need_mag_cal: {0.needs_toolset_calibration:d} '
'need_cas_cal: {0.needs_cassette_calibration:d} '
'need_clear: {0.needs_clear:d}').format(self, robot=self.robot)
self.send_xos3(msg)
def send_set_state_string(self):
"""Send the robot_state string to the DCSS.
Eg:
htos_set_string_completed robot_state normal
{on gonio} {in cradle}
P18 no {m 2 A} 110 1 1 1 0 0
{invalid} {invalid} {invalid}
27 27 0 0
Components of string are:
* sample_state: no / on tong / on placer / on picker / \
on gonio / bad state
* dumbbell_state: out / raised / in cradle / in tong / bad state
* current_point: P0 / P1 / ... / Wrong
* ln2: no / yes / wrong
* current_port: m 2 A / invalid
* pins_mounted pins_lost pins_mounted_before_lost
* sample_on_goni: 1, 0
* pins_stripped pins_stripped_short_trip
* tong_port: m 2 A / invalid
* picker_port: m 2 A / invalid
* placer_port: m 2 A / invalid
* num_puck_pin_mounted num_puck_pin_mounted_short_trip
* num_pin_moved num_puck_pin_moved
"""
sample_is_on_goni = bool(self.robot.sample_locations['goniometer'])
tong_port = self.port_tuple_to_str(self.robot.sample_locations['cavity'])
picker_port = self.port_tuple_to_str(self.robot.sample_locations['picker'])
placer_port = self.port_tuple_to_str(self.robot.sample_locations['placer'])
msg = (
'htos_set_string_completed robot_state normal '
'{{{0.sample_state}}} '
'{{{0.dumbbell_state}}} '
'P{robot.closest_point} '
'{0.ln2} '
'{{{0.mounted}}} '
'0 0 0 '
'{sample_is_on_goni:d} '
'0 0 '
'{{{tong_port}}} '
'{{{picker_port}}} '
'{{{placer_port}}} '
'0 0 '
'0 0'
).format(self, robot=self.robot, sample_is_on_goni=sample_is_on_goni,
tong_port=tong_port, picker_port=picker_port,
placer_port=placer_port)
self.send_xos3(msg)
def send_set_robot_cassette_string(self):
"""Send DCSS the probe states."""
# TODO: Test mounted position
sample_on_goni = self.robot.sample_locations['goniometer']
mounted_position, mounted_port = (sample_on_goni
if sample_on_goni else (None, None))
msg = 'htos_set_string_completed robot_cassette normal'
for position in ['left', 'middle', 'right']:
states = [PORT_STATE_MAP[s] for s in self.robot.port_states[position]]
if mounted_position == position:
states[mounted_port] = 'm'
msg += ' {type} {states}'.format(
type=HOLDER_TYPE_MAP[self.robot.holder_types[position]],
states=' '.join(states)
)
self.send_xos3(msg)
def send_calibration_timestamps(self):
timestamps = [
self.robot.last_toolset_calibration,
self.robot.last_left_calibration,
self.robot.last_middle_calibration,
self.robot.last_right_calibration,
self.robot.last_goniometer_calibration,
]
timestamp_strings = []
for ts in timestamps:
timestamp_strings.append('{%s}' % ts if ts else '{}')
msg = ('htos_set_string_completed ts_robot_cal normal ' +
' '.join(timestamp_strings))
self.send_xos3(msg)
def send_set_robot_force_string(self, position):
distance_strings = []
for distance in self.robot.port_distances[position]:
if distance is None:
distance_strings.append('uuuu')
else:
distance_strings.append('{:.1f}'.format(distance))
msg = (
'htos_set_string_completed robot_force_{position} normal'
' {height_error} {distances}'
).format(
position=position,
height_error=self.robot.height_errors[position] or 0,
distances=' '.join(distance_strings)
)
self.send_xos3(msg)
# ****************************************************************
# ******************** dcss -> dhs messages **********************
# ****************************************************************
def stoh_register_string(self, *args, **kwargs):
"""Ignored.
We aren't using this part of the DCS protocol as we hardcode which
strings and operations the DCSS will receive. This method is only here
to supress warnings about unimplemented DCSS functions.
"""
def stoh_register_operation(self, *args, **kwargs):
"""Ignored: see stoh_register_operation."""
def stoh_abort_all(self, operation, *args):
"""Called by BluIce Abort button."""
pass # TODO: Do anything?
def robot_config(self, operation, task, *args):
"""Delegate robot_config operations to the appropriate method.
Catch DCSS requests such as "robot_config <task>" and if there is a
method named robot_config_<task> then execute that method.
"""
try:
func = getattr(self, 'robot_config_' + task)
except AttributeError:
self.log.info('Operation robot_config %s is not handled' % task)
else:
func(operation, *args)
def robot_config_clear(self, operation):
"""Called by BluIce "Inspected" button."""
callback = partial(self.operation_callback, operation)
self.robot.inspected(callback=callback)
def robot_config_clear_status(self, operation):
"""Clear the robot status flags.
Called by running "robot_config clear_status" in BluIce Operation View.
"""
callback = partial(self.operation_callback, operation)
self.robot.clear('status', callback=callback)
def robot_config_clear_all(self, operation):
"""Clear the robot status and probe information.
Called by running "robot_config clear_all" in BluIce Operation View.
"""
callback = partial(self.operation_callback, operation)
self.robot.clear('all', callback=callback)
def robot_config_hw_output_switch(self, operation, output):
"""Called by the I/O buttons on the BluIce Robot Advanced tab."""
output = int(output)
if output == Output.gripper:
func = self.robot.set_gripper
value = 1 - self.robot.gripper_command
elif output == Output.lid:
func = self.robot.set_lid
value = 1 - self.robot.lid_command
elif output == Output.heater:
func = self.robot.set_heater
value = 1 - self.robot.heater_command
elif output == Output.heater_air:
func = self.robot.set_heater_air
value = 1 - self.robot.heater_air_command
else:
return operation.operation_error('Not implemented')
func(value, callback=partial(self.operation_callback, operation))
def robot_config_reset_cassette(self, operation):
"""Called by the "reset all to unknown" BluIce button."""
callback = partial(self.operation_callback, operation)
self.robot.reset_holders(['left', 'middle', 'right'], callback=callback)
def robot_config_set_index_state(self, operation, start, port_count, state):
"""Called by right-clicking ports in BluIce.
Examples:
left cassette column A 1-8 to bad: start='1', port_count='8', state='b'
middle puck A port 1 to bad: start='98', port_count='1', state='b'
middle puck B port 1 to bad: start='114', port_count='1', state='b'
"""
start, port_count = int(start), int(port_count)
state = PortState.error if state == 'b' else PortState.unknown
samples_and_type_per_position = SAMPLES_PER_POSITION + 1
position_index = start // samples_and_type_per_position
position = ['left', 'middle', 'right'][position_index]
start = start % samples_and_type_per_position
start -= 1
# If right-clicking a single port we support setting it to error
# If right-clicked on all for multiple ports only resetting to unknown
# is supported
if port_count == 1:
try:
column, row = self.column_and_row_from_port_index(position, start)
except ValueError as e:
operation.operation_error(str(e))
else:
callback = partial(self.operation_callback, operation)
self.robot.set_port_state(position, column, row, state,
callback=callback)
else:
end = start + port_count
ports = {position: [0] * SAMPLES_PER_POSITION
for position in ['left', 'middle', 'right']}
ports[position][start:end] = [1] * port_count
callback = partial(self.operation_callback, operation)
self.robot.reset_ports(ports, callback=callback)
def robot_config_set_port_state(self, operation, port, state):
"""Called by the reset cassette status to unknown button in BluIce."""
if port.endswith('X0') and state == 'u':
position = {'l': 'left', 'm': 'middle', 'r': 'right'}.get(port[0])
callback = partial(self.operation_callback, operation)
self.robot.reset_holders([position], callback=callback)
else:
operation.operation_error('Not implemented')
def robot_config_reset_mounted_counter(self, operation):
"""Called by the BluIce Reset Counter button."""
self.robot.run_operation('reset_mount_counters')
def robot_config_set_mounted(self, operation, arg):
"""Set which sample is mounted.
Called by running "robot_config set_mounted lA1" in BluIce Operation View.
"""
try:
position, column, port = arg[0], arg[1], arg[2:]
position = {'l': 'left', 'm': 'middle', 'r': 'right'}[position.lower()]
column = column.upper()
port = int(port)
state = int(SampleState.goniometer)
except:
operation.operation_error('Invalid argument')
else:
callback = partial(self.operation_callback, operation)
self.robot.set_sample_state(position, column, port, state,
callback=callback)
def robot_config_probe(self, operation, *ports):
"""Called by starting a probe from the BluIce Robot Probe tab."""
ports = [int(p) for p in ports]
n = SAMPLES_PER_POSITION + 1
spec = {
'left': ports[1:n], # Skip the holder type element
'middle': ports[n+1:2*n],
'right': ports[2*n+1:3*n],
}
self.robot.probe(spec, callback=partial(self.operation_callback, operation))
def robot_calibrate(self, operation, target, *task_args):
"""Called by starting a calibration from the BluIce Robot Calibrate tab."""
task_args = ' '.join(task_args)
if target == 'magnet_post':
target = 'toolset'
self.robot.calibrate(target=target, task_args=task_args,
callback=partial(self.operation_callback, operation))
def prepare_mount_crystal(self, operation, *args):
"""Instruct the robot to prepare for a sample mount.
Called by starting a mount from the BluIce Sample tab. This method will
instruct the robot to go to the cooling point and wait. Meanwhile the
DCSS will be performing the makesafe routine. Only if the makesafe
succeeds will the robot be told to mount the sample.
Note: prepare_dismount_crystal and prepare_mount_next_crystal delegate to
this method.
"""
self.log.info('prepare_mount_crystal: %r', args)
# TODO: Check args
operation.operation_update('OK to prepare')
callback = partial(self.operation_callback, operation)
self.robot.prepare_for_mount(callback=callback)
def prepare_dismount_crystal(self, operation, *args):
"""Called by requesting a dismount from the BluIce Sample tab."""
self.log.info('prepare_dismount_crystal: %r', args)
self.prepare_mount_crystal(operation)
def prepare_mount_next_crystal(self, operation, *args):
"""
Called by requesting a mount from the BluIce Sample tab when a sample
is already mounted on the goniometer.
"""
self.log.info('prepare_mount_next_crystal: %r', args)
self.prepare_mount_crystal(operation)
def mount_crystal(self, operation, cassette, row, column, *args):
"""
Called by the DCSS after the user has requested a mount and the
makesafe routine has completed successfully.
Note: `mount_next_crystal` delegates to this method.
"""
self.log.info('mount_crystal: %r %r %r', cassette, row, column)
cassette = {'r': 'right', 'm': 'middle', 'l': 'left'}[cassette]
callback = partial(self.operation_callback, operation)
self.robot.mount(cassette, column, int(row), callback=callback)
def dismount_crystal(self, operation, cassette, row, column, *_):
"""
Called by the DCSS after the user has requested a dismount and the
makesafe routine has completed successfully.
"""
self.log.info('dismount_crystal: %r %r %r', cassette, row, column)
cassette = {'r': 'right', 'm': 'middle', 'l': 'left'}[cassette]
callback = partial(self.operation_callback, operation)
self.robot.dismount(cassette, column, int(row), callback=callback)
def mount_next_crystal(self, operation,
current_cassette, current_row, current_column,
cassette, row, column, *args):
"""
Called by the DCSS after the user has requested a mount when a sample
is already mounted and the makesafe routine has completed successfully.
"""
self.log.info('mount_next_crystal %r %r %r %r %r %r',
current_cassette, current_row, current_column,
cassette, row, column)
self.mount_crystal(operation, cassette, row, column)
def robot_standby(self, operation, *args):
callback = partial(self.operation_callback, operation)
self.robot.go_to_standby(callback=callback)
def port_tuple_to_str(self, port_tuple):
"""Convert a `(position, port_index)` tuple to a port string.
Args:
port_tuple: tuple of position (`'left'`, `'middle'`, `'right'`) and
port index (0-97)
Returns (str): Port string in DCSS format (eg `'l 1 A'`).
"""
if not port_tuple:
return 'invalid'
position, port = port_tuple
try:
column, row = self.column_and_row_from_port_index(position, port)
except ValueError:
return 'invalid'
else:
return '{position} {row} {column}'.format(position=position[0],
column=column, row=row)
def column_and_row_from_port_index(self, position, port):
holder_type = self.robot.holder_types[position]
if holder_type in {HolderType.normal, HolderType.calibration}:
column = chr(port // 8 + ord('A'))
row = port % 8 + 1
return column, row
elif holder_type == HolderType.superpuck:
puck = chr(port // 16 + ord('A'))
row = port % 16 + 1
return puck, row
else:
raise ValueError('Cannot determine column, port if type is unknown')
| {
"repo_name": "AustralianSynchrotron/pyrobotdhs",
"path": "pyrobotdhs/dhs.py",
"copies": "1",
"size": "27144",
"license": "mit",
"hash": -8845179518872991000,
"line_mean": 37.5568181818,
"line_max": 84,
"alpha_frac": 0.5679708223,
"autogenerated": false,
"ratio": 3.8545867651235444,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49225575874235444,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from threading import Thread
import wx
from wx.gizmos import TreeListCtrl
from spacq.interface.resources import NotReadable
from spacq.interface.units import IncompatibleDimensions
from ...tool.box import MessageDialog
"""
A tree of subdevices and resources.
"""
class ResourceFetcher(Thread):
"""
A thread which iterates over a list of items, getting resource values.
"""
def __init__(self, items, getter, callback, *args, **kwargs):
"""
items: List of TreeListCtrl items.
getter: Given an item, returns a Resource or None.
callback: Is called with the item and the value of the resource.
"""
Thread.__init__(self, *args, **kwargs)
self.items = items
self.getter = getter
self.callback = callback
def run(self):
try:
for item in self.items:
resource = self.getter(item)
if resource is not None and resource.readable:
if resource.slow:
wx.CallAfter(self.callback, item, '[N/A]')
else:
wx.CallAfter(self.callback, item, resource.value)
except wx.PyDeadObjectError:
# The values are no longer wanted.
return
class ItemData(object):
"""
Useful information about a node to be stored in its PyData.
"""
def __init__(self, path, resource):
self.path = path
self.resource = resource
self.fetched = False
class ResourceTree(TreeListCtrl):
"""
A tree list to display an hierarchy of subdevices and resources.
"""
def __init__(self, parent, *args, **kwargs):
TreeListCtrl.__init__(self, parent, *args,
style=wx.TR_DEFAULT_STYLE|wx.TR_FULL_ROW_HIGHLIGHT|wx.TR_HIDE_ROOT,
**kwargs)
self.root = None
self.resource_labels = []
self.col_name = 0
self.AddColumn('Name', 200)
self.col_r = 1
self.AddColumn('R', 24)
self.col_w = 2
self.AddColumn('W', 24)
self.col_units = 3
self.AddColumn('Units', 50)
self.col_label = 4
self.AddColumn('Label', 200, edit=True)
self.col_value = 5
self.AddColumn('Value', 400, edit=True)
# Extra 50 for nesting.
self.SetMinSize((950, -1))
self.Bind(wx.EVT_TREE_ITEM_EXPANDED, self.OnItemExpanded)
self.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.OnBeginLabelEdit)
self.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.OnEndLabelEdit)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.OnActivated)
def GetChildren(self, item):
"""
Non-recursive generator for the children of an item.
"""
if self.HasChildren(item):
child, cookie = self.GetFirstChild(item)
while child:
yield child
child, cookie = self.GetNextChild(item, cookie)
def GetLeaves(self, item=None):
"""
Recursively collect the leaves under an item.
"""
if item is None:
if self.root is None:
return []
else:
item = self.root
if not self.HasChildren(item):
return [item]
else:
result = []
for child in self.GetChildren(item):
result.extend(self.GetLeaves(child))
return result
def fell(self):
"""
Cut down the tree.
"""
self.DeleteAllItems()
self.root = None
def spawn_fetch_thread(self, items):
"""
Create a thread to populate the items.
"""
def fetch(item):
pydata = self.GetItemPyData(item)
if pydata is not None:
if not pydata.fetched:
pydata.fetched = True
return pydata.resource
def set(item, value):
try:
self.SetItemText(item, str(value), self.col_value)
except wx.PyDeadObjectError:
# The value isn't wanted anymore.
return
thr = ResourceFetcher(items, fetch, set)
thr.daemon = True
thr.start()
def build_tree(self, device, resource_labels, root=None, path=None):
"""
Recursively append all subdevices and resources.
"""
if root is None:
self.fell()
self.root = self.AddRoot('')
root = self.root
path = ()
for name, subdev in device.subdevices.items():
item = self.AppendItem(root, name)
full_path = path + (name,)
self.build_tree(subdev, resource_labels, item, full_path)
for name, resource in device.resources.items():
item = self.AppendItem(root, name)
full_path = path + (name,)
if resource.getter is not None:
self.SetItemText(item, 'R', self.col_r)
if resource.setter is not None:
self.SetItemText(item, 'W', self.col_w)
if resource.display_units is not None:
self.SetItemText(item, resource.display_units, self.col_units)
self.SetItemPyData(item, ItemData(full_path, resource))
if full_path in resource_labels:
self.SetItemText(item, resource_labels[full_path], self.col_label)
self.SortChildren(root)
if root == self.root:
self.spawn_fetch_thread(self.GetChildren(self.root))
def set_value(self, item, value, error_callback=None):
"""
Set the value of a resource, as well as the displayed value.
"""
pydata = self.GetItemPyData(item)
resource = pydata.resource
def update():
try:
resource.value = resource.convert(value)
except IncompatibleDimensions:
if error_callback is not None:
error_callback(ValueError('Expected dimensions to match "{0}"'.format(resource.units)))
else:
raise
except Exception as e:
if error_callback is not None:
error_callback(e)
else:
raise
try:
true_value = str(resource.value)
except NotReadable:
pass
else:
wx.CallAfter(self.SetItemText, item, true_value, self.col_value)
thr = Thread(target=update)
thr.daemon = True
thr.start()
def OnItemExpanded(self, evt):
"""
Get any resources which may now be visible.
"""
self.spawn_fetch_thread(self.GetChildren(evt.Item))
def OnBeginLabelEdit(self, evt):
# EVT_TREE_END_LABEL_EDIT does not carry this value.
self.editing_col = evt.Int
if evt.Int == self.col_label:
# Only resources can have labels.
if not (self.GetItemText(evt.Item, self.col_r) or
self.GetItemText(evt.Item, self.col_w)):
evt.Veto()
else:
self.old_label = self.GetItemText(evt.Item, self.col_label)
elif evt.Int == self.col_value:
# Can only write to writable resources.
if not self.GetItemText(evt.Item, self.col_w):
evt.Veto()
pydata = self.GetItemPyData(evt.Item)
resource = pydata.resource
if resource.allowed_values is not None:
options = [str(x) for x in sorted(resource.allowed_values)]
dlg = wx.SingleChoiceDialog(self, '', 'Choose value', options)
# Select the current value if possible.
try:
dlg.SetSelection(options.index(self.GetItemText(evt.Item, self.col_value)))
except ValueError:
pass
if dlg.ShowModal() == wx.ID_OK:
try:
self.set_value(evt.Item, dlg.GetStringSelection())
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return
# No need for the editor.
evt.Veto()
else:
evt.Veto()
def OnEndLabelEdit(self, evt):
if self.editing_col == self.col_label:
# Prevent duplicates.
value = evt.Label
# Don't do anything if unchanged.
if value != self.old_label:
if value not in self.resource_labels:
if self.old_label:
self.resource_labels.remove(self.old_label)
if value:
self.resource_labels.append(value)
else:
evt.Veto()
MessageDialog(self, str(value), 'Duplicate label').Show()
return
elif self.editing_col == self.col_value:
# Update the real value.
value = evt.Label
def error_callback(e):
MessageDialog(self, str(e), 'Invalid value').Show()
self.set_value(evt.Item, value, error_callback=partial(wx.CallAfter, error_callback))
def OnActivated(self, evt):
"""
Double click to edit.
"""
self.EditLabel(evt.Item, evt.Int)
class DeviceResourcesPanel(wx.Panel):
"""
A panel for displaying the subdevices and resources of a device.
"""
def __init__(self, parent, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
# Panel.
panel_box = wx.BoxSizer(wx.VERTICAL)
## Tree.
self.tree = ResourceTree(self)
panel_box.Add(self.tree, proportion=1, flag=wx.EXPAND)
self.SetSizer(panel_box)
def set_device(self, device, resource_labels):
if device is None:
self.tree.fell()
else:
self.tree.build_tree(device, resource_labels)
def GetValue(self):
labels = {}
resources = {}
for leaf in self.tree.GetLeaves():
pydata = self.tree.GetItemPyData(leaf)
name = self.tree.GetItemText(leaf, self.tree.col_label)
if name:
labels[pydata.path] = name
resources[name] = pydata.resource
return (labels, resources)
def SetValue(self, resource_labels, resources):
for path, name in resource_labels.items():
for leaf in self.tree.GetLeaves():
pydata = self.tree.GetItemPyData(leaf)
if pydata.path == path:
self.tree.SetItemText(leaf, name, self.tree.col_label)
self.tree.resource_labels = resource_labels.values()
| {
"repo_name": "0/SpanishAcquisition",
"path": "spacq/gui/config/device/resource_tree.py",
"copies": "2",
"size": "8692",
"license": "bsd-2-clause",
"hash": 7072594350741149000,
"line_mean": 22.9449035813,
"line_max": 92,
"alpha_frac": 0.6774045099,
"autogenerated": false,
"ratio": 3.0627202255109234,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4740124735410923,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from threading import Thread
import wx
from wx.lib import filebrowsebutton
from wx.lib.scrolledpanel import ScrolledPanel
from spacq.interface.pulse.parser import PulseError, PulseSyntaxError
from spacq.interface.pulse.program import Program
from spacq.interface.resources import Resource
from spacq.interface.units import IncompatibleDimensions, Quantity
from ..display.waveform import WaveformFrame
from ..tool.box import OK_BACKGROUND_COLOR, determine_wildcard, MessageDialog
class FileBrowseButton(filebrowsebutton.FileBrowseButton):
ChangeValue = filebrowsebutton.FileBrowseButton.SetValue
def SetBackgroundColour(self, colour):
self.textControl.SetBackgroundColour(colour)
def pos_int_converter(x):
try:
result = int(x)
if result <= 0:
raise ValueError()
return result
except ValueError:
raise ValueError('Expected positive integer')
def quantity_converter(x, symbols='s', dimensions='time', non_negative=True):
try:
q = Quantity(x)
q.assert_dimensions(symbols)
except (IncompatibleDimensions, ValueError):
raise ValueError('Expected {0} quantity'.format(dimensions))
if non_negative and q.value < 0:
raise ValueError('Expected non-negative quantity')
return q
class ParameterPanel(ScrolledPanel):
"""
A generic panel to display parameters of a particular type.
"""
attributes = False
hide_variables = False
use_resource_labels = False
spacer_height = 15
def extract_variables(self, prog):
"""
By default, extract the variables which pertain to the current type.
"""
return [k for k, v in prog.variables.items() if v == self.type]
def extract_parameters(self, prog):
"""
By default, extract the parameters which pertain to the current type.
"""
variables = self.extract_variables(prog)
return sorted([item for item in prog.all_values for variable in variables if item[0] == variable])
@property
def num_cols(self):
"""
Number of columns per row.
"""
# Label and input field.
cols = 2
# Label includes attribute name.
if self.attributes:
cols += 1
# Label excludes variable name.
if self.hide_variables:
cols -= 1
# Also resource label input field.
if self.use_resource_labels:
cols += 1
return cols
@property
def input_col(self):
"""
The 0-based position of the growable input column.
"""
if self.use_resource_labels:
# Second-to-last column.
return self.num_cols - 2
else:
# Last column.
return self.num_cols - 1
def get_value(self, parameter):
"""
Get the value of a parameter as a string, or raise KeyError if not available.
"""
return str(self.values[parameter])
def get_resource_label(self, parameter):
"""
Get the resource label for a parameter, or empty string if not available.
"""
try:
return self.resource_labels[parameter]
except KeyError:
return ''
@property
def posn(self):
return (self.cur_row, self.cur_col)
def add_headings(self):
"""
Add column headings.
"""
if self.use_resource_labels:
# Default value.
self.parameter_sizer.Add(wx.StaticText(self, label='Default value'), (self.cur_row, self.input_col),
flag=wx.EXPAND)
# Resource label.
self.parameter_sizer.Add(wx.StaticText(self, label='Resource label'), (self.cur_row, self.input_col + 1),
flag=wx.EXPAND)
else:
self.parameter_sizer.Add(wx.StaticText(self, label=''), (self.cur_row, 0),
flag=wx.EXPAND)
self.cur_row += 1
def add_resource_label(self, parameter):
"""
Add a resource label input.
"""
resource_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT, partial(self.OnResourceChange, parameter), resource_input)
self.Bind(wx.EVT_TEXT_ENTER, partial(self.OnResourceInput, parameter), resource_input)
self.parameter_sizer.Add(resource_input, self.posn, flag=wx.EXPAND)
self.cur_col += 1
label = self.get_resource_label(parameter)
resource_input.ChangeValue(label)
resource_input.default_background_color = resource_input.BackgroundColour
if label:
resource_input.BackgroundColour = OK_BACKGROUND_COLOR
def add_row(self, parameter, input_type='text', increment_row=True):
"""
Add a parameter to the sizer and display the value if it is available.
"""
self.cur_col = 0
if not self.hide_variables:
if self.last_variable == parameter[0]:
label = ''
else:
label = parameter[0]
self.last_variable = parameter[0]
self.parameter_sizer.Add(wx.StaticText(self, label=label), self.posn,
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.cur_col += 1
if self.attributes:
self.parameter_sizer.Add(wx.StaticText(self, label=parameter[1]), self.posn,
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_LEFT)
self.cur_col += 1
if input_type == 'text':
input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT, partial(self.OnChange, parameter), input)
self.Bind(wx.EVT_TEXT_ENTER, partial(self.OnInput, parameter), input)
elif input_type == 'file':
input = FileBrowseButton(self, labelText='File:', changeCallback=partial(self.OnInput, parameter))
else:
raise ValueError('Unrecognized type "{0}"'.format(input_type))
self.parameter_sizer.Add(input, self.posn, flag=wx.EXPAND)
self.cur_col += 1
input.default_background_color = input.BackgroundColour
try:
input.ChangeValue(self.get_value(parameter))
except KeyError:
# No default value set.
pass
else:
input.SetBackgroundColour(OK_BACKGROUND_COLOR)
if self.use_resource_labels:
self.add_resource_label(parameter)
if increment_row:
self.cur_row += 1
def converter(self, parameter, x):
"""
Identity.
"""
return x
def __init__(self, parent, global_store, prog, *args, **kwargs):
ScrolledPanel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.prog = prog
self.values = prog.values
self.resource_labels = prog.resource_labels
self.resources = prog.resources
self.last_variable = None
self.cur_row, self.cur_col = 0, 0
parameters = self.extract_parameters(prog)
# Panel.
self.parameter_sizer = wx.GridBagSizer(hgap=5)
self.parameter_sizer.AddGrowableCol(self.input_col, 1)
if self.use_resource_labels:
self.parameter_sizer.AddGrowableCol(self.input_col + 1, 1)
## Headings.
self.add_headings()
## Parameter inputs.
for parameter in parameters:
self.add_row(parameter)
self.SetSizer(self.parameter_sizer)
self.SetupScrolling()
def set_value(self, parameter, value):
self.values[parameter] = value
def del_value(self, parameter):
try:
del self.values[parameter]
except KeyError:
pass
def set_resource_label(self, parameter, value, resource):
self.resource_labels[parameter] = value
self.resources[parameter] = resource
def del_resource_label(self, parameter):
try:
label = self.resource_labels[parameter]
except KeyError:
pass
else:
del self.resource_labels[parameter]
del self.resources[parameter]
del self.global_store.resources[label]
def OnChange(self, parameter, evt):
# Awaiting validation.
self.del_value(parameter)
evt.EventObject.BackgroundColour = evt.EventObject.default_background_color
def OnInput(self, parameter, evt):
try:
value = self.converter(parameter, evt.String)
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return
# Validated.
self.set_value(parameter, value)
evt.EventObject.BackgroundColour = OK_BACKGROUND_COLOR
def OnResourceChange(self, parameter, evt):
# Awaiting validation.
self.del_resource_label(parameter)
evt.EventObject.BackgroundColour = evt.EventObject.default_background_color
def OnResourceInput(self, parameter, evt):
label = evt.String
try:
# Do nothing if there has not been a change.
if label == self.resource_labels[parameter]:
return
except KeyError:
pass
# The actual setter is generated when the program is cloned.
resource = Resource(setter=lambda x: None)
try:
self.global_store.resources[label] = resource
except KeyError as e:
MessageDialog(self, str(e[0]), 'Resource label conflicts').Show()
return
# Validated.
self.set_resource_label(parameter, label, resource)
evt.EventObject.BackgroundColour = OK_BACKGROUND_COLOR
class AcqMarkerPanel(ParameterPanel):
type = 'acq_marker'
name = 'Acquisition'
attributes = True
hide_variables = True
def converter(self, parameter, x):
x = ParameterPanel.converter(self, parameter, x)
if parameter[1] == 'marker_num':
return pos_int_converter(x)
elif parameter[1] == 'output':
try:
if self.prog.variables[x] == 'output':
return x
else:
raise KeyError()
except KeyError:
raise ValueError('Expected valid output name')
def __init__(self, *args, **kwargs):
ParameterPanel.__init__(self, *args, **kwargs)
# Spacer.
self.parameter_sizer.Add((-1, self.spacer_height), (self.cur_row, 0))
self.cur_row += 1
# Times to average.
self.parameter_sizer.Add(wx.StaticText(self, label='Times to average'), (self.cur_row, 0),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.times_average_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.parameter_sizer.Add(self.times_average_input, (self.cur_row, 1), flag=wx.EXPAND)
self.cur_row += 1
self.times_average_input.Value = str(self.prog.times_average)
self.times_average_input.default_background_color = self.times_average_input.BackgroundColour
self.times_average_input.BackgroundColour = OK_BACKGROUND_COLOR
self.Bind(wx.EVT_TEXT, self.OnTimesAverageChange, self.times_average_input)
self.Bind(wx.EVT_TEXT_ENTER, self.OnTimesAverageInput, self.times_average_input)
# Post-trigger delay.
self.parameter_sizer.Add(wx.StaticText(self, label='Post-trigger delay'), (self.cur_row, 0),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.delay_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.parameter_sizer.Add(self.delay_input, (self.cur_row, 1), flag=wx.EXPAND)
self.cur_row += 1
self.delay_input.Value = str(self.prog.acq_delay)
self.delay_input.default_background_color = self.delay_input.BackgroundColour
self.delay_input.BackgroundColour = OK_BACKGROUND_COLOR
self.Bind(wx.EVT_TEXT, self.OnDelayChange, self.delay_input)
self.Bind(wx.EVT_TEXT_ENTER, self.OnDelayInput, self.delay_input)
def OnTimesAverageChange(self, evt=None):
self.times_average_input.BackgroundColour = self.times_average_input.default_background_color
def OnTimesAverageInput(self, evt=None):
try:
value = pos_int_converter(self.times_average_input.Value)
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return
self.prog.times_average = value
self.times_average_input.BackgroundColour = OK_BACKGROUND_COLOR
def OnDelayChange(self, evt=None):
self.delay_input.BackgroundColour = self.delay_input.default_background_color
def OnDelayInput(self, evt=None):
try:
value = quantity_converter(self.delay_input.Value, 's', 'time')
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return
self.prog.acq_delay = value
self.delay_input.BackgroundColour = OK_BACKGROUND_COLOR
class DelayPanel(ParameterPanel):
type = 'delay'
name = 'Delays'
use_resource_labels = True
def converter(self, parameter, x):
x = ParameterPanel.converter(self, parameter, x)
return quantity_converter(x)
class IntPanel(ParameterPanel):
type = 'int'
name = 'Integers'
use_resource_labels = True
def converter(self, parameter, x):
x = ParameterPanel.converter(self, parameter, x)
try:
return int(x)
except ValueError:
raise ValueError('Expected integer')
class OutputPanel(ParameterPanel):
type = 'output'
name = 'Outputs'
def extract_parameters(self, prog):
return sorted([(x,) for x in self.extract_variables(prog)])
@property
def num_cols(self):
return ParameterPanel.num_cols.__get__(self) + 1
@property
def input_col(self):
return self.num_cols - 2
def get_value(self, parameter):
result = self.prog.output_channels[parameter[0]]
if result is not None:
return str(result)
else:
raise KeyError(parameter[0])
def add_row(self, parameter):
ParameterPanel.add_row(self, parameter, increment_row=False)
view_button = wx.Button(self, label='View')
self.parameter_sizer.Add(view_button, self.posn)
self.cur_col += 1
self.Bind(wx.EVT_BUTTON, partial(self.OnView, parameter), view_button)
self.cur_row += 1
def converter(self, parameter, x):
if x == '':
return x
else:
return pos_int_converter(x)
def __init__(self, *args, **kwargs):
ParameterPanel.__init__(self, *args, **kwargs)
# Spacer.
self.parameter_sizer.Add((-1, self.spacer_height), (self.cur_row, 0))
self.cur_row += 1
# Frequency input.
self.parameter_sizer.Add(wx.StaticText(self, label='Sampling rate'), (self.cur_row, 0),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.freq_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.parameter_sizer.Add(self.freq_input, (self.cur_row, 1), flag=wx.EXPAND)
self.cur_row += 1
self.freq_input.Value = str(self.prog.frequency)
self.freq_input.default_background_color = self.freq_input.BackgroundColour
self.freq_input.BackgroundColour = OK_BACKGROUND_COLOR
self.Bind(wx.EVT_TEXT, self.OnFrequencyChange, self.freq_input)
self.Bind(wx.EVT_TEXT_ENTER, self.OnFrequencyInput, self.freq_input)
# Spacer.
self.parameter_sizer.Add((-1, self.spacer_height), (self.cur_row, 0))
self.cur_row += 1
# AWG input.
self.parameter_sizer.Add(wx.StaticText(self, label='AWG'), (self.cur_row, 0),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.awg_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.parameter_sizer.Add(self.awg_input, (self.cur_row, 1), flag=wx.EXPAND)
self.cur_row += 1
self.awg_input.Value = self.prog.awg
self.awg_input.default_background_color = self.awg_input.BackgroundColour
self.awg_input.BackgroundColour = OK_BACKGROUND_COLOR
self.Bind(wx.EVT_TEXT, self.OnAWGChange, self.awg_input)
self.Bind(wx.EVT_TEXT_ENTER, self.OnAWGInput, self.awg_input)
# Oscilloscope input.
self.parameter_sizer.Add(wx.StaticText(self, label='Oscilloscope'), (self.cur_row, 0),
flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self.oscilloscope_input = wx.TextCtrl(self, style=wx.TE_PROCESS_ENTER)
self.parameter_sizer.Add(self.oscilloscope_input, (self.cur_row, 1), flag=wx.EXPAND)
self.cur_row += 1
self.oscilloscope_input.Value = self.prog.oscilloscope
self.oscilloscope_input.default_background_color = self.oscilloscope_input.BackgroundColour
self.oscilloscope_input.BackgroundColour = OK_BACKGROUND_COLOR
self.Bind(wx.EVT_TEXT, self.OnOscilloscopeChange, self.oscilloscope_input)
self.Bind(wx.EVT_TEXT_ENTER, self.OnOscilloscopeInput, self.oscilloscope_input)
def set_value(self, parameter, value):
if value == '':
value = None
self.prog.output_channels[parameter[0]] = value
def OnFrequencyChange(self, evt=None):
self.freq_input.BackgroundColour = self.freq_input.default_background_color
def OnFrequencyInput(self, evt=None):
try:
value = quantity_converter(self.freq_input.Value, 'Hz', 'frequency')
except ValueError as e:
MessageDialog(self, str(e), 'Invalid value').Show()
return
self.prog.frequency = value
self.freq_input.BackgroundColour = OK_BACKGROUND_COLOR
def OnAWGChange(self, evt=None):
self.awg_input.BackgroundColour = self.awg_input.default_background_color
def OnAWGInput(self, evt=None):
self.prog.awg = self.awg_input.Value
self.awg_input.BackgroundColour = OK_BACKGROUND_COLOR
def OnOscilloscopeChange(self, evt=None):
self.oscilloscope_input.BackgroundColour = self.oscilloscope_input.default_background_color
def OnOscilloscopeInput(self, evt=None):
self.prog.oscilloscope = self.oscilloscope_input.Value
self.oscilloscope_input.BackgroundColour = OK_BACKGROUND_COLOR
def OnView(self, parameter, evt=None):
def show_frame(waveform, markers, frequency):
view_frame = WaveformFrame(self, parameter[0])
view_frame.SetValue(waveform, markers, frequency)
view_frame.Show()
def show_error(error, monospace=False):
MessageDialog(self, error, 'Waveform generation error', monospace=monospace).Show()
def show_waveform():
try:
waveforms = self.prog.generate_waveforms()
except ValueError as e:
wx.CallAfter(show_error, str(e))
return
except PulseError as e:
wx.CallAfter(show_error, '\n'.join((e[0])), monospace=True)
return
waveform, markers = waveforms[parameter[0]]
wx.CallAfter(show_frame, waveform, markers, self.prog.frequency)
thr = Thread(target=show_waveform)
thr.daemon = True
thr.start()
class PulsePanel(ParameterPanel):
type = 'pulse'
name = 'Pulses'
attributes = True
use_resource_labels = True
def add_resource_label(self, parameter):
if parameter[1] == 'shape':
self.cur_col += 1
else:
ParameterPanel.add_resource_label(self, parameter)
def add_row(self, parameter):
kwargs = {}
if parameter[1] == 'shape':
kwargs['input_type'] = 'file'
return ParameterPanel.add_row(self, parameter, **kwargs)
def converter(self, parameter, x):
x = ParameterPanel.converter(self, parameter, x)
if parameter[1] == 'amplitude':
return quantity_converter(x, 'V', 'voltage', False)
elif parameter[1] == 'length':
return quantity_converter(x)
elif parameter[1] == 'shape':
return x
class PulseProgramPanel(wx.Panel):
"""
A panel to display and change all the parameters of a program.
"""
panel_types = {'acq_marker': AcqMarkerPanel, 'delay': DelayPanel, 'int': IntPanel,
'output': OutputPanel, 'pulse': PulsePanel}
def __init__(self, parent, global_store, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.prog = None
# Panel.
panel_box = wx.BoxSizer(wx.VERTICAL)
## Notebook.
self.parameter_notebook = wx.Notebook(self)
self.parameter_notebook.SetMinSize((600, 400))
panel_box.Add(self.parameter_notebook, proportion=1, flag=wx.EXPAND|wx.ALL, border=5)
self.SetSizerAndFit(panel_box)
def create_parameter_panels(self, prog):
types = set(prog.variables.values())
for type in sorted(types):
try:
result = self.panel_types[type](self.parameter_notebook, self.global_store, prog)
except KeyError:
MessageDialog('Unrecognized variable type "{0}"'.format(type)).Show()
return
self.parameter_notebook.AddPage(result, result.name)
def OnOpen(self, prog):
self.prog = prog
self.create_parameter_panels(self.prog)
def OnClose(self):
self.prog = None
self.parameter_notebook.DeleteAllPages()
class PulseProgramFrame(wx.Frame):
default_title = 'Pulse program'
def __init__(self, parent, global_store, close_callback, *args, **kwargs):
if 'title' not in kwargs:
kwargs['title'] = self.default_title
else:
self.default_title = kwargs['title']
wx.Frame.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
self.close_callback = close_callback
# Menu.
menuBar = wx.MenuBar()
## File.
menu = wx.Menu()
menuBar.Append(menu, '&File')
item = menu.Append(wx.ID_OPEN, '&Open...')
self.Bind(wx.EVT_MENU, self.OnMenuFileOpen, item)
item = menu.Append(wx.ID_CLOSE, '&Close')
self.Bind(wx.EVT_MENU, self.OnMenuFileClose, item)
self.SetMenuBar(menuBar)
# Frame.
frame_box = wx.BoxSizer(wx.VERTICAL)
self.pulse_panel = PulseProgramPanel(self, self.global_store)
frame_box.Add(self.pulse_panel, proportion=1, flag=wx.EXPAND)
self.SetSizerAndFit(frame_box)
self.Bind(wx.EVT_CLOSE, self.OnClose)
# Reload existing program.
if self.global_store.pulse_program is not None:
self.load_program(self.global_store.pulse_program)
def load_program(self, prog):
self.Title = '{0} - {1}'.format(prog.filename, self.default_title)
self.pulse_panel.OnOpen(prog)
def OnMenuFileOpen(self, evt=None):
wildcard = determine_wildcard('pulse', 'Pulse program')
dlg = wx.FileDialog(parent=self, message='Load...', wildcard=wildcard,
style=wx.FD_OPEN)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
try:
prog = Program.from_file(path)
except PulseSyntaxError as e:
MessageDialog(self, '\n'.join(e[0]), 'Compilation error', monospace=True).Show()
return
# Only purge the previous file if this one has been opened successfully.
self.OnMenuFileClose()
self.load_program(prog)
self.global_store.pulse_program = prog
def OnMenuFileClose(self, evt=None):
if self.global_store.pulse_program is None:
return
self.pulse_panel.OnClose()
self.Title = self.default_title
for label in self.global_store.pulse_program.resource_labels.values():
del self.global_store.resources[label]
self.global_store.pulse_program = None
def OnClose(self, evt):
self.close_callback()
evt.Skip()
| {
"repo_name": "ghwatson/SpanishAcquisitionIQC",
"path": "spacq/gui/config/pulse.py",
"copies": "2",
"size": "20928",
"license": "bsd-2-clause",
"hash": -7103058238978541000,
"line_mean": 26.25,
"line_max": 108,
"alpha_frac": 0.7163608563,
"autogenerated": false,
"ratio": 3.0735790865031576,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47899399428031575,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from threading import Thread
import wx
from spacq.iteration.variables import OutputVariable
from spacq.tool.box import sift
from ..tool.box import MessageDialog
class SmoothResetPanel(wx.Panel):
"""
A panel to change variables smoothly to and from preset values.
"""
def __init__(self, parent, global_store, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.global_store = global_store
# Panel.
panel_box = wx.BoxSizer(wx.VERTICAL)
## Reset.
reset_static_box = wx.StaticBox(self, label='Smooth reset')
reset_box = wx.StaticBoxSizer(reset_static_box, wx.VERTICAL)
panel_box.Add(reset_box, flag=wx.CENTER|wx.ALL, border=10)
### To zero.
self.to_button = wx.Button(self, label='To zero')
self.Bind(wx.EVT_BUTTON, self.OnResetToZero, self.to_button)
reset_box.Add(self.to_button, flag=wx.EXPAND)
### From zero.
self.from_button = wx.Button(self, label='From zero')
self.Bind(wx.EVT_BUTTON, self.OnResetFromZero, self.from_button)
reset_box.Add(self.from_button, flag=wx.EXPAND)
### Steps.
steps_static_box = wx.StaticBox(self, label='Steps')
steps_box = wx.StaticBoxSizer(steps_static_box, wx.VERTICAL)
reset_box.Add(steps_box, flag=wx.EXPAND)
self.reset_steps_input = wx.SpinCtrl(self, min=1, initial=10)
steps_box.Add(self.reset_steps_input)
self.SetSizer(panel_box)
def choose_variables(self):
"""
Return all the selected variables, ensuring that their resources are valid.
"""
all_vars = sift(self.global_store.variables.values(), OutputVariable)
vars = [var for var in all_vars if var.enabled and var.use_const and var.resource_name]
missing_resources = []
unwritable_resources = []
for var in vars:
try:
if not self.global_store.resources[var.resource_name].writable:
unwritable_resources.append(var.resource_name)
except KeyError:
missing_resources.append(var.resource_name)
if missing_resources:
MessageDialog(self, ', '.join(missing_resources), 'Missing resources').Show()
if unwritable_resources:
MessageDialog(self, ', '.join(unwritable_resources), 'Unwritable resources').Show()
if missing_resources or unwritable_resources:
return None
return vars
def reset(self, from_zero):
vars = self.choose_variables()
if vars is None:
return
self.to_button.Disable()
self.from_button.Disable()
def exception_callback(e):
MessageDialog(self, str(e), 'Error writing to resource').Show()
def sweep_all_vars():
try:
thrs = []
for var in vars:
resource = self.global_store.resources[var.resource_name]
if from_zero:
value_from, value_to = 0, var.with_type(var.const)
else:
value_from, value_to = var.with_type(var.const), 0
thr = Thread(target=resource.sweep, args=(value_from, value_to, self.reset_steps_input.Value),
kwargs={'exception_callback': partial(wx.CallAfter, exception_callback)})
thr.daemon = True
thrs.append(thr)
for thr in thrs:
thr.start()
for thr in thrs:
thr.join()
finally:
if self:
wx.CallAfter(self.to_button.Enable)
wx.CallAfter(self.from_button.Enable)
thr = Thread(target=sweep_all_vars)
thr.daemon = True
thr.start()
def OnResetToZero(self, evt=None):
self.reset(False)
def OnResetFromZero(self, evt=None):
self.reset(True)
| {
"repo_name": "0/SpanishAcquisition",
"path": "spacq/gui/action/smooth_reset.py",
"copies": "1",
"size": "3352",
"license": "bsd-2-clause",
"hash": -3676192610894515700,
"line_mean": 27.1680672269,
"line_max": 99,
"alpha_frac": 0.6989856802,
"autogenerated": false,
"ratio": 3.036231884057971,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9057469863860936,
"avg_score": 0.035549540079406756,
"num_lines": 119
} |
from functools import partial
from time import sleep
from mock import call, Mock
from scrapy.crawler import Crawler
from scrapy.http import Request
from scrapy import log, signals
from scrapy.settings import Settings
from scrapy.spider import BaseSpider
from scrapy.xlib.pydispatch import dispatcher
from twisted.internet import reactor
from scrapy_webdriver.http import WebdriverRequest
BASE_SETTINGS = dict(
DOWNLOAD_HANDLERS={
'http': 'scrapy_webdriver.download.WebdriverDownloadHandler',
'https': 'scrapy_webdriver.download.WebdriverDownloadHandler',
},
SPIDER_MIDDLEWARES={
'scrapy_webdriver.middlewares.WebdriverSpiderMiddleware': 543,
})
class TestRequestQueue:
@classmethod
def setup_class(cls):
cls._settings = BASE_SETTINGS
def settings(self, **options):
settings = self._settings.copy()
settings.update(**options)
return settings
def _stop_reactor(self):
reactor.stop()
def _wait(self, url, *args, **kwargs):
sleep(0.1)
def test_priorization(self):
webdriver = Mock()
settings = self.settings(WEBDRIVER_BROWSER=webdriver)
webdriver.get.side_effect = self._wait
webdriver.page_source = u''
dispatcher.connect(self._stop_reactor, signal=signals.spider_closed)
crawler = Crawler(Settings(values=settings))
crawler.configure()
spider = self.Spider(name='test', domain='testdomain')
crawler.crawl(spider)
crawler.start()
log.start(loglevel='ERROR')
reactor.run()
assert webdriver.get.mock_calls == [
call('http://testdomain/path?wr=0'),
call('http://testdomain/path?wr=0&wa=0'),
call('http://testdomain/path?wr=0&wa=1'),
call('http://testdomain/path?wr=1'),
call('http://testdomain/path?wr=1&wa=0'),
call('http://testdomain/path?wr=1&wa=1'),
call('http://testdomain/path?wr=0&wa=0&wr=0'),
call('http://testdomain/path?wr=0&wa=1&wr=0'),
call('http://testdomain/path?wr=1&wa=0&wr=0'),
call('http://testdomain/path?wr=1&wa=1&wr=0')]
class Spider(BaseSpider):
def start_requests(self):
for i in xrange(2):
yield WebdriverRequest('http://testdomain/path?wr=%d' % i)
yield Request('http://testdomain/path?r=%d' % i)
def parse(self, response):
def get(url):
response.webdriver.get(url)
for i in xrange(2):
fake_url = '%s&wa=%d' % (response.url, i)
request = response.action_request(url=fake_url,
callback=self.parse_action)
# Leave a trace in the webdriver instance mock so we can look
# at the request processing order.
request.actions = Mock()
request.actions.perform.side_effect = partial(get, fake_url)
yield request
def parse_action(self, response):
yield WebdriverRequest('%s&wr=%d' % (response.url, 0),
callback=self.parse_nothing)
def parse_nothing(self, response):
pass
| {
"repo_name": "jc0n/scrapy-webdriver",
"path": "scrapy_webdriver/tests/test_request_queue.py",
"copies": "6",
"size": "3272",
"license": "mit",
"hash": -9063896609300627000,
"line_mean": 33.8085106383,
"line_max": 77,
"alpha_frac": 0.5968826406,
"autogenerated": false,
"ratio": 3.985383678440926,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 94
} |
from functools import partial
from time import time
class Scheduler:
def __init__(self):
self.tasks = []
def loop(self):
"""
Run tasks if they're due to run.
:return: None
:rtype: NoneType
"""
tasks_run = []
for task in filter(lambda t: t.should_run(), self.tasks):
task.run()
tasks_run.append(task)
for task in tasks_run:
self.tasks.remove(task)
def add_task(self, task):
"""
Add a task to the scheduler.
:param task: Task to run some time in the future.
:type task: Task
:return: None
:rtype: NoneType
"""
self.tasks.append(task)
class Task:
def __init__(self, run_after, function, *args, **kwargs):
"""
Initialise a new task to run after a given time.
:param run_after: Unix timestamp to run the task after, from time()
:type run_after: float
:param function: Function to call with *args and **kwargs
:type function: Function
"""
self.run_after = run_after
self.function = partial(function, *args, **kwargs)
def should_run(self):
"""
Checks if the task should run yet.
:return: True if the current time is >= self.run_after
:rtype: bool
"""
return time() >= self.run_after
def run(self):
"""
Runs the task.
:return: The return value of the function.
"""
return self.function()
| {
"repo_name": "flyte/pi-mqtt-gpio",
"path": "pi_mqtt_gpio/scheduler.py",
"copies": "1",
"size": "1544",
"license": "mit",
"hash": -4741047967444476000,
"line_mean": 25.6206896552,
"line_max": 75,
"alpha_frac": 0.5414507772,
"autogenerated": false,
"ratio": 4.13941018766756,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5180860964867561,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from toolz.compatibility import range, map
def hashable(x):
try:
hash(x)
return True
except TypeError:
return False
def transitive_get(key, d):
""" Transitive dict.get
>>> d = {1: 2, 2: 3, 3: 4}
>>> d.get(1)
2
>>> transitive_get(1, d)
4
"""
while hashable(key) and key in d:
key = d[key]
return key
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
# Taken from theano/theano/gof/sched.py
# Avoids licensing issues because this was written by Matthew Rocklin
def _toposort(edges):
""" Topological sort algorithm by Kahn [1] - O(nodes + vertices)
inputs:
edges - a dict of the form {a: {b, c}} where b and c depend on a
outputs:
L - an ordered list of nodes that satisfy the dependencies of edges
>>> _toposort({1: (2, 3), 2: (3, )})
[1, 2, 3]
Closely follows the wikipedia page [2]
[1] Kahn, Arthur B. (1962), "Topological sorting of large networks",
Communications of the ACM
[2] http://en.wikipedia.org/wiki/Toposort#Algorithms
"""
incoming_edges = reverse_dict(edges)
incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items())
S = set((v for v in edges if v not in incoming_edges))
L = []
while S:
n = S.pop()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S.add(m)
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
return L
def reverse_dict(d):
"""Reverses direction of dependence dict
>>> d = {'a': (1, 2), 'b': (2, 3), 'c':()}
>>> reverse_dict(d) # doctest: +SKIP
{1: ('a',), 2: ('a', 'b'), 3: ('b',)}
:note: dict order are not deterministic. As we iterate on the
input dict, it make the output of this function depend on the
dict order. So this function output order should be considered
as undeterministic.
"""
result = {}
for key in d:
for val in d[key]:
result[val] = result.get(val, tuple()) + (key, )
return result
def xfail(func):
try:
func()
raise Exception("XFailed test passed") # pragma:nocover
except:
pass
def freeze(d):
""" Freeze container to hashable form
>>> freeze(1)
1
>>> freeze([1, 2])
(1, 2)
>>> freeze({1: 2}) # doctest: +SKIP
frozenset([(1, 2)])
"""
if isinstance(d, dict):
return frozenset(map(freeze, d.items()))
if isinstance(d, set):
return frozenset(map(freeze, d))
if isinstance(d, (tuple, list)):
return tuple(map(freeze, d))
return d
| {
"repo_name": "mrocklin/unification",
"path": "unification/utils.py",
"copies": "1",
"size": "2844",
"license": "bsd-3-clause",
"hash": -734053786170655100,
"line_mean": 23.1016949153,
"line_max": 77,
"alpha_frac": 0.5654008439,
"autogenerated": false,
"ratio": 3.4556500607533414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45210509046533415,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from tornado import stack_context
from tornado.ioloop import IOLoop
from slacker.postpone import safe_proceed
class DummyWorker(object):
""" Dummy worker for local immediate execution """
def proceed(self, postponed, callback = None):
# safe_proceed is called instead of _proceed
# for consistent error handling
res = safe_proceed(postponed)
if callback:
callback(res)
class ThreadWorker(object):
"""
Executes code in a thread from a ThreadPool.
.. warning::
postponed code shouldn't interact with tornado because
tornado is not thread-safe.
.. waring::
I'm bad at threads so this can be broken ;)
"""
_default_pool = None
def __init__(self, pool=None, ioloop=None):
"""
Initializes ThreadWorker.
'pool' is a multiprocessing.pool.ThreadPool instance,
'ioloop' is a tornado.ioloop.IOLoop instance.
"""
self.ioloop = ioloop or IOLoop.instance()
# create default pool only if necessary
if not pool and not self.__class__._default_pool:
from multiprocessing.pool import ThreadPool
self.__class__._default_pool = ThreadPool(5)
self.pool = pool or self.__class__._default_pool
def proceed(self, postponed, callback=None):
_proceed = partial(safe_proceed, postponed)
if callback is None:
self.pool.apply_async(_proceed)
return
# Without stack_context.wrap exceptions will not be propagated,
# they'll be catched by tornado. Hours of debugging ;)
@stack_context.wrap
def on_response(result):
self.ioloop.add_callback(partial(callback, result))
self.pool.apply_async(_proceed, callback = on_response)
| {
"repo_name": "kmike/tornado-slacker",
"path": "slacker/workers/local.py",
"copies": "1",
"size": "1835",
"license": "mit",
"hash": 6022899078491079000,
"line_mean": 29.0819672131,
"line_max": 71,
"alpha_frac": 0.6370572207,
"autogenerated": false,
"ratio": 4.277389277389277,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002766019887007626,
"num_lines": 61
} |
from functools import partial
from types import MethodType
def sampleSpy(spyObj, methodName, *args, **kwargs):
spyObj.lastCall = methodName
spyObj.lastArgs = args
spyObj.lastKwargs = kwargs
class RedisSpy(object):
""" Report on how "redis" is being called
in testing"""
def __init__(self):
self.addSpy("__getitem__")
self.addSpy("__setitem__")
self.addSpy("append")
self.addSpy("bitcount")
self.addSpy("blpop")
self.addSpy("brpop")
self.addSpy("debug_object")
self.addSpy("decr")
self.addSpy("delete")
self.addSpy("exists")
self.addSpy("expire")
self.addSpy("expireat")
self.addSpy("get")
self.addSpy("getbit")
self.addSpy("getrange")
self.addSpy("getset")
self.addSpy("hdel")
self.addSpy("hexists")
self.addSpy("hget")
self.addSpy("hgetall")
self.addSpy("hincrby")
self.addSpy("hincrbyfloat")
self.addSpy("hkeys")
self.addSpy("hlen")
self.addSpy("hmget")
self.addSpy("hmset")
self.addSpy("hset")
self.addSpy("hsetnx")
self.addSpy("hvals")
self.addSpy("incr")
self.addSpy("incrbyfloat")
self.addSpy("lindex")
self.addSpy("linsert")
self.addSpy("llen")
self.addSpy("lock")
self.addSpy("lpop")
self.addSpy("lpush")
self.addSpy("lpushx")
self.addSpy("lrange")
self.addSpy("lrem")
self.addSpy("lset")
self.addSpy("ltrim")
self.addSpy("mget")
self.addSpy("persist")
self.addSpy("pexpire")
self.addSpy("pexpireat")
self.addSpy("ping")
self.addSpy("pipeline")
self.addSpy("pttl")
self.addSpy("rpop")
self.addSpy("rpush")
self.addSpy("rpushx")
self.addSpy("sadd")
self.addSpy("scard")
self.addSpy("set")
self.addSpy("setbit")
self.addSpy("setex")
self.addSpy("setnx")
self.addSpy("setrange")
self.addSpy("sinter")
self.addSpy("sismember")
self.addSpy("smembers")
self.addSpy("spop")
self.addSpy("srandmember")
self.addSpy("srem")
self.addSpy("substr")
self.addSpy("sunion")
self.addSpy("time")
self.addSpy("ttl")
self.addSpy("type")
self.addSpy("zadd")
self.addSpy("zcard")
self.addSpy("zincrby")
self.addSpy("zinterstore")
self.addSpy("zrange")
self.addSpy("zrangebyscore")
self.addSpy("zrank")
self.addSpy("zrem")
self.addSpy("zremrangebyrank")
self.addSpy("zremrangebyscore")
self.addSpy("zrevrange")
self.addSpy("zrevrangebyscore")
self.addSpy("zrevrank")
self.addSpy("zscore")
def addSpy(self, methodName):
spyMethod = MethodType(sampleSpy, self)
spyBoundToName = partial(spyMethod, methodName)
spyBoundToName.__name__ = methodName
setattr(self, methodName, spyBoundToName)
| {
"repo_name": "softwaredoug/subredis",
"path": "tests/redisspy.py",
"copies": "1",
"size": "3112",
"license": "apache-2.0",
"hash": 8857762162149493000,
"line_mean": 28.9230769231,
"line_max": 55,
"alpha_frac": 0.5623393316,
"autogenerated": false,
"ratio": 3.102691924227318,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4165031255827318,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from types import TracebackType
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
if False:
from .promise import Promise
from typing import (
Any,
Optional,
Tuple,
Union,
List,
Type,
Collection,
) # flake8: noqa
class PromiseList(object):
__slots__ = ("_values", "_length", "_total_resolved", "promise", "_promise_class")
def __init__(self, values, promise_class):
# type: (Union[Collection, Promise[Collection]], Type[Promise]) -> None
self._promise_class = promise_class
self.promise = self._promise_class()
self._length = 0
self._total_resolved = 0
self._values = None # type: Optional[Collection]
Promise = self._promise_class
if Promise.is_thenable(values):
values_as_promise = Promise._try_convert_to_promise(
values
)._target() # type: ignore
self._init_promise(values_as_promise)
else:
self._init(values) # type: ignore
def __len__(self):
# type: () -> int
return self._length
def _init_promise(self, values):
# type: (Promise[Collection]) -> None
if values.is_fulfilled:
values = values._value()
elif values.is_rejected:
self._reject(values._reason())
return
self.promise._is_async_guaranteed = True
values._then(self._init, self._reject)
return
def _init(self, values):
# type: (Collection) -> None
self._values = values
if not isinstance(values, Iterable):
err = Exception(
"PromiseList requires an iterable. Received {}.".format(repr(values))
)
self.promise._reject_callback(err, False)
return
if not values:
self._resolve([])
return
self._iterate(values)
return
def _iterate(self, values):
# type: (Collection[Any]) -> None
Promise = self._promise_class
is_resolved = False
self._length = len(values)
self._values = [None] * self._length
result = self.promise
for i, val in enumerate(values):
if Promise.is_thenable(val):
maybe_promise = Promise._try_convert_to_promise(val)._target()
# if is_resolved:
# # maybe_promise.suppressUnhandledRejections
# pass
if maybe_promise.is_pending:
maybe_promise._add_callbacks(
partial(self._promise_fulfilled, i=i),
partial(self._promise_rejected, promise=maybe_promise),
None,
)
self._values[i] = maybe_promise
elif maybe_promise.is_fulfilled:
is_resolved = self._promise_fulfilled(maybe_promise._value(), i)
elif maybe_promise.is_rejected:
is_resolved = self._promise_rejected(maybe_promise._reason(), promise=maybe_promise)
else:
is_resolved = self._promise_fulfilled(val, i)
if is_resolved:
break
if not is_resolved:
result._is_async_guaranteed = True
def _promise_fulfilled(self, value, i):
# type: (Any, int) -> bool
if self.is_resolved:
return False
# assert not self.is_resolved
# assert isinstance(self._values, Iterable)
# assert isinstance(i, int)
self._values[i] = value # type: ignore
self._total_resolved += 1
if self._total_resolved >= self._length:
self._resolve(self._values) # type: ignore
return True
return False
def _promise_rejected(self, reason, promise):
# type: (Exception, Promise) -> bool
if self.is_resolved:
return False
# assert not self.is_resolved
# assert isinstance(self._values, Iterable)
self._total_resolved += 1
self._reject(reason, traceback=promise._target()._traceback)
return True
@property
def is_resolved(self):
# type: () -> bool
return self._values is None
def _resolve(self, value):
# type: (Collection[Any]) -> None
assert not self.is_resolved
assert not isinstance(value, self._promise_class)
self._values = None
self.promise._fulfill(value)
def _reject(self, reason, traceback=None):
# type: (Exception, Optional[TracebackType]) -> None
assert not self.is_resolved
self._values = None
self.promise._reject_callback(reason, False, traceback=traceback)
| {
"repo_name": "syrusakbary/pypromise",
"path": "promise/promise_list.py",
"copies": "2",
"size": "4853",
"license": "mit",
"hash": -3042642463662816000,
"line_mean": 30.9276315789,
"line_max": 104,
"alpha_frac": 0.5528539048,
"autogenerated": false,
"ratio": 4.169243986254296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002921754951083544,
"num_lines": 152
} |
from functools import partial
from typing import Any, Callable, Dict, Generator, List, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = (
"New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
)
NEW_DESC_ADDED_TEMPLATE = (
"New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
)
DESC_CHANGED_TEMPLATE = (
"Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
)
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = (
"State of the {entity} {name_template} was changed from **{old}** to **{new}**."
)
NAME_CHANGED_TEMPLATE = (
"The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
)
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {operation}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {operation} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = (
"Task **{task_description}** ({name_template}) was completed. :tada:"
)
STORY_ADDED_REMOVED_EPIC_TEMPLATE = (
"The story {story_name_template} was {operation} the epic {epic_name_template}."
)
STORY_EPIC_CHANGED_TEMPLATE = "The story {story_name_template} was moved from {old_epic_name_template} to {new_epic_name_template}."
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = (
"A {type} attachment `{file_name}` was added to the story {name_template}."
)
LABEL_TEMPLATE = "**{name}**"
STORY_LABEL_TEMPLATE = "The label {labels} was added to the story {name_template}."
STORY_LABEL_PLURAL_TEMPLATE = "The labels {labels} were added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = (
"The story {name_template} was moved from the **{old}** project to **{new}**."
)
STORY_UPDATE_TYPE_TEMPLATE = (
"The type of the story {name_template} was changed from **{old_type}** to **{new_type}**."
)
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE = " ({old} -> {new})"
STORY_GITHUB_PR_TEMPLATE = (
"New GitHub PR [#{name}]({url}) opened for story {name_template}{workflow_state_template}."
)
STORY_GITHUB_COMMENT_PR_TEMPLATE = "Existing GitHub PR [#{name}]({url}) associated with story {name_template}{workflow_state_template}."
STORY_GITHUB_BRANCH_TEMPLATE = "New GitHub branch [{name}]({url}) associated with story {name_template}{workflow_state_template}."
STORY_UPDATE_BATCH_TEMPLATE = "The story {name_template} {templates}{workflow_state_template}."
STORY_UPDATE_BATCH_CHANGED_TEMPLATE = "{operation} from {sub_templates}"
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE = "{entity_type} **{old}** to **{new}**"
STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE = "{operation} with {entity}"
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_event(payload: Dict[str, Any], action: Dict[str, Any]) -> Optional[str]:
event = "{}_{}".format(action["entity_type"], action["action"])
# We only consider the change to be a batch update only if there are multiple stories (thus there is no primary_id)
if event == "story_update" and payload.get("primary_id") is None:
return "{}_{}".format(event, "batch")
if event in IGNORED_EVENTS:
return None
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
elif changes.get("owner_ids") is not None:
event = "{}_{}".format(event, "owner")
return event
def get_topic_function_based_on_type(payload: Dict[str, Any], action: Dict[str, Any]) -> Any:
entity_type = action["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_delete_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
return DELETE_TEMPLATE.format(**action)
def get_story_create_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], action: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(
payload: Dict[str, Any], action: Dict[str, Any], entity: str
) -> str:
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"]),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], action: Dict[str, Any], entity: str) -> str:
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_update_archived_body(payload: Dict[str, Any], action: Dict[str, Any], entity: str) -> str:
archived = action["changes"]["archived"]
if archived["new"]:
operation = "archived"
else:
operation = "unarchived"
kwargs = {
"entity": entity,
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
"operation": operation,
}
return ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: Dict[str, Any], operation: str) -> str:
kwargs = {
"task_description": action["description"],
"operation": operation,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any], action: Dict[str, Any]) -> Optional[str]:
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["operation"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["operation"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = f"{new} points"
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]:
ref: Dict[str, Any] = {}
for reference in payload["references"]:
if reference["id"] == ref_id:
ref = reference
return ref
def get_secondary_actions_with_param(
payload: Dict[str, Any], entity: str, changed_attr: str
) -> Generator[Dict[str, Any], None, None]:
# This function is a generator for secondary actions that have the required changed attributes,
# i.e.: "story" that has "pull-request_ids" changed.
for action in payload["actions"]:
if action["entity_type"] == entity and action["changes"].get(changed_attr) is not None:
yield action
def get_story_create_github_entity_body(
payload: Dict[str, Any], action: Dict[str, Any], entity: str
) -> str:
pull_request_action: Dict[str, Any] = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(**action),
"name": pull_request_action.get("number")
if entity == "pull-request" or entity == "pull-request-comment"
else pull_request_action.get("name"),
"url": pull_request_action["url"],
"workflow_state_template": "",
}
# Sometimes the workflow state of the story will not be changed when linking to a PR.
if action["changes"].get("workflow_state_id") is not None:
new_state_id = action["changes"]["workflow_state_id"]["new"]
old_state_id = action["changes"]["workflow_state_id"]["old"]
new_state = get_reference_by_id(payload, new_state_id)["name"]
old_state = get_reference_by_id(payload, old_state_id)["name"]
kwargs["workflow_state_template"] = TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format(
new=new_state, old=old_state
)
if entity == "pull-request":
template = STORY_GITHUB_PR_TEMPLATE
elif entity == "pull-request-comment":
template = STORY_GITHUB_COMMENT_PR_TEMPLATE
else:
template = STORY_GITHUB_BRANCH_TEMPLATE
return template.format(**kwargs)
def get_story_update_attachment_body(
payload: Dict[str, Any], action: Dict[str, Any]
) -> Optional[str]:
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update(
type=ref["entity_type"],
file_name=ref["name"],
)
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_joined_label_list(
payload: Dict[str, Any], action: Dict[str, Any], label_ids_added: List[int]
) -> str:
labels = []
for label_id in label_ids_added:
label_name = ""
for action in payload["actions"]:
if action.get("id") == label_id:
label_name = action.get("name", "")
if label_name == "":
label_name = get_reference_by_id(payload, label_id).get("name", "")
labels.append(LABEL_TEMPLATE.format(name=label_name))
return ", ".join(labels)
def get_story_label_body(payload: Dict[str, Any], action: Dict[str, Any]) -> Optional[str]:
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when no label is added, ignore it
if not label_ids_added:
return None
kwargs.update(labels=get_story_joined_label_list(payload, action, label_ids_added))
return (
STORY_LABEL_TEMPLATE.format(**kwargs)
if len(label_ids_added) == 1
else STORY_LABEL_PLURAL_TEMPLATE.format(**kwargs)
)
def get_story_update_project_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update(new=ref["name"])
if ref["id"] == old_project_id:
kwargs.update(old=ref["name"])
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"],
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_story_update_owner_body(payload: Dict[str, Any], action: Dict[str, Any]) -> str:
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
def get_story_update_batch_body(payload: Dict[str, Any], action: Dict[str, Any]) -> Optional[str]:
# When the user selects one or more stories with the checkbox, they can perform
# a batch update on multiple stories while changing multiple attribtues at the
# same time.
changes = action["changes"]
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
"workflow_state_template": "",
}
templates = []
last_change = "other"
move_sub_templates = []
if "epic_id" in changes:
last_change = "epic"
move_sub_templates.append(
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
entity_type="Epic",
old=get_reference_by_id(payload, changes["epic_id"].get("old")).get("name"),
new=get_reference_by_id(payload, changes["epic_id"].get("new")).get("name"),
)
)
if "project_id" in changes:
last_change = "project"
move_sub_templates.append(
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
entity_type="Project",
old=get_reference_by_id(payload, changes["project_id"].get("old")).get("name"),
new=get_reference_by_id(payload, changes["project_id"].get("new")).get("name"),
)
)
if len(move_sub_templates) > 0:
templates.append(
STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format(
operation="was moved",
sub_templates=", ".join(move_sub_templates),
)
)
if "story_type" in changes:
last_change = "type"
templates.append(
STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format(
operation="{} changed".format("was" if len(templates) == 0 else "and"),
sub_templates=STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
entity_type="type",
old=changes["story_type"].get("old"),
new=changes["story_type"].get("new"),
),
)
)
if "label_ids" in changes:
last_change = "label"
labels = get_story_joined_label_list(payload, action, changes["label_ids"].get("adds"))
templates.append(
STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE.format(
operation="{} added".format("was" if len(templates) == 0 else "and"),
entity="the new label{plural} {labels}".format(
plural="s" if len(changes["label_ids"]) > 1 else "", labels=labels
),
)
)
if "workflow_state_id" in changes:
last_change = "state"
kwargs.update(
workflow_state_template=TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format(
old=get_reference_by_id(payload, changes["workflow_state_id"].get("old")).get(
"name"
),
new=get_reference_by_id(payload, changes["workflow_state_id"].get("new")).get(
"name"
),
)
)
# Use the default template for state change if it is the only one change.
if len(templates) <= 1 or (len(templates) == 0 and last_change == "state"):
event: str = "{}_{}".format("story_update", last_change)
alternative_body_func = EVENT_BODY_FUNCTION_MAPPER.get(event)
# If last_change is not one of "epic", "project", "type", "label" and "state"
# we should ignore the action as there is no way for us to render the changes.
if alternative_body_func is None:
return None
return alternative_body_func(payload, action)
kwargs.update(templates=", ".join(templates))
return STORY_UPDATE_BATCH_TEMPLATE.format(**kwargs)
def get_entity_name(
payload: Dict[str, Any], action: Dict[str, Any], entity: Optional[str] = None
) -> Optional[str]:
name = action.get("name")
if name is None or action["entity_type"] == "branch":
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
def send_stream_messages_for_actions(
request: HttpRequest,
user_profile: UserProfile,
payload: Dict[str, Any],
action: Dict[str, Any],
event: str,
) -> None:
body_func = EVENT_BODY_FUNCTION_MAPPER.get(event)
topic_func = get_topic_function_based_on_type(payload, action)
if body_func is None or topic_func is None:
raise UnsupportedWebhookEventType(event)
topic = topic_func(payload, action)
body = body_func(payload, action)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
EVENT_BODY_FUNCTION_MAPPER: Dict[str, Callable[[Dict[str, Any], Dict[str, Any]], Optional[str]]] = {
"story_update_archived": partial(get_update_archived_body, entity="story"),
"epic_update_archived": partial(get_update_archived_body, entity="epic"),
"story_create": get_story_create_body,
"pull-request_create": partial(get_story_create_github_entity_body, entity="pull-request"),
"pull-request_comment": partial(
get_story_create_github_entity_body, entity="pull-request-comment"
),
"branch_create": partial(get_story_create_github_entity_body, entity="branch"),
"story_delete": get_delete_body,
"epic_delete": get_delete_body,
"story-task_create": partial(get_story_task_body, operation="added to"),
"story-task_delete": partial(get_story_task_body, operation="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_owner": get_story_update_owner_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity="epic"),
"story-comment_create": partial(get_comment_added_body, entity="story"),
"epic_update_description": partial(get_update_description_body, entity="epic"),
"story_update_description": partial(get_update_description_body, entity="story"),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity="epic"),
"story_update_name": partial(get_update_name_body, entity="story"),
"story_update_batch": get_story_update_batch_body,
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity="story"),
"pull-request": partial(get_entity_name, entity="story"),
"branch": partial(get_entity_name, entity="story"),
"story-comment": partial(get_entity_name, entity="story"),
"story-task": partial(get_entity_name, entity="story"),
"epic": partial(get_entity_name, entity="epic"),
"epic-comment": partial(get_entity_name, entity="epic"),
}
IGNORED_EVENTS = {
"story-comment_update",
}
EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER: Dict[
str, Callable[[Dict[str, Any]], Generator[Dict[str, Any], None, None]]
] = {
"pull-request_create": partial(
get_secondary_actions_with_param, entity="story", changed_attr="pull_request_ids"
),
"branch_create": partial(
get_secondary_actions_with_param, entity="story", changed_attr="branch_ids"
),
"pull-request_comment": partial(
get_secondary_actions_with_param, entity="story", changed_attr="pull_request_ids"
),
}
@webhook_view("ClubHouse")
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: Optional[Dict[str, Any]] = REQ(argument_type="body"),
) -> HttpResponse:
# Clubhouse has a tendency to send empty POST requests to
# third-party endpoints. It is unclear as to which event type
# such requests correspond to. So, it is best to ignore such
# requests for now.
if payload is None:
return json_success()
if payload.get("primary_id") is not None:
action = get_action_with_primary_id(payload)
primary_actions = [action]
else:
primary_actions = payload["actions"]
for primary_action in primary_actions:
event = get_event(payload, primary_action)
if event is None:
continue
if event in EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER:
sec_actions_func = EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER[event]
for sec_action in sec_actions_func(payload):
send_stream_messages_for_actions(request, user_profile, payload, sec_action, event)
else:
send_stream_messages_for_actions(request, user_profile, payload, primary_action, event)
return json_success()
| {
"repo_name": "eeshangarg/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "4",
"size": "27208",
"license": "apache-2.0",
"hash": -1667618916099025400,
"line_mean": 35.7179487179,
"line_max": 136,
"alpha_frac": 0.6091590709,
"autogenerated": false,
"ratio": 3.5743562795585917,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6183515350458593,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Callable, Dict, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = (
"New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
)
NEW_DESC_ADDED_TEMPLATE = (
"New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
)
DESC_CHANGED_TEMPLATE = (
"Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
)
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = (
"State of the {entity} {name_template} was changed from **{old}** to **{new}**."
)
NAME_CHANGED_TEMPLATE = (
"The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
)
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {action}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {action} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = (
"Task **{task_description}** ({name_template}) was completed. :tada:"
)
STORY_ADDED_REMOVED_EPIC_TEMPLATE = (
"The story {story_name_template} was {action} the epic {epic_name_template}."
)
STORY_EPIC_CHANGED_TEMPLATE = "The story {story_name_template} was moved from {old_epic_name_template} to {new_epic_name_template}."
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = (
"A {type} attachment `{file_name}` was added to the story {name_template}."
)
STORY_LABEL_TEMPLATE = "The label **{label_name}** was added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = (
"The story {name_template} was moved from the **{old}** project to **{new}**."
)
STORY_UPDATE_TYPE_TEMPLATE = (
"The type of the story {name_template} was changed from **{old_type}** to **{new_type}**."
)
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
STORY_GITHUB_PR_TEMPLATE = (
"New GitHub PR [#{name}]({url}) opened for story {name_template} ({old} -> {new})."
)
STORY_GITHUB_BRANCH_TEMPLATE = (
"New GitHub branch [{name}]({url}) associated with story {name_template} ({old} -> {new})."
)
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_event(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
event = "{}_{}".format(action["entity_type"], action["action"])
if event in IGNORED_EVENTS:
return None
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
elif changes.get("owner_ids") is not None:
event = "{}_{}".format(event, "owner")
return event
def get_topic_function_based_on_type(payload: Dict[str, Any]) -> Any:
entity_type = get_action_with_primary_id(payload)["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_delete_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
return DELETE_TEMPLATE.format(**action)
def get_story_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"]),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_update_archived_body(payload: Dict[str, Any], entity: str) -> str:
primary_action = get_action_with_primary_id(payload)
archived = primary_action["changes"]["archived"]
if archived["new"]:
action = "archived"
else:
action = "unarchived"
kwargs = {
"entity": entity,
"name_template": get_name_template(entity).format(
name=primary_action["name"],
app_url=primary_action.get("app_url"),
),
"action": action,
}
return ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: str) -> str:
primary_action = get_action_with_primary_id(payload)
kwargs = {
"task_description": primary_action["description"],
"action": action,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["action"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["action"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = f"{new} points"
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]:
ref: Dict[str, Any] = {}
for reference in payload["references"]:
if reference["id"] == ref_id:
ref = reference
return ref
def get_story_create_github_entity_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
story: Dict[str, Any] = {}
for a in payload["actions"]:
if a["entity_type"] == "story" and a["changes"].get("workflow_state_id") is not None:
story = a
new_state_id = story["changes"]["workflow_state_id"]["new"]
old_state_id = story["changes"]["workflow_state_id"]["old"]
new_state = get_reference_by_id(payload, new_state_id)["name"]
old_state = get_reference_by_id(payload, old_state_id)["name"]
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(**story),
"name": action.get("number") if entity == "pull-request" else action.get("name"),
"url": action["url"],
"new": new_state,
"old": old_state,
}
template = (
STORY_GITHUB_PR_TEMPLATE if entity == "pull-request" else STORY_GITHUB_BRANCH_TEMPLATE
)
return template.format(**kwargs)
def get_story_update_attachment_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update(
type=ref["entity_type"],
file_name=ref["name"],
)
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_label_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when a label is removed, ignore it
if not label_ids_added:
return None
label_id = label_ids_added[0]
label_name = ""
for action in payload["actions"]:
if action["id"] == label_id:
label_name = action.get("name", "")
if not label_name:
for reference in payload["references"]:
if reference["id"] == label_id:
label_name = reference.get("name", "")
kwargs.update(label_name=label_name)
return STORY_LABEL_TEMPLATE.format(**kwargs)
def get_story_update_project_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update(new=ref["name"])
if ref["id"] == old_project_id:
kwargs.update(old=ref["name"])
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"],
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_story_update_owner_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
def get_entity_name(payload: Dict[str, Any], entity: Optional[str] = None) -> Optional[str]:
action = get_action_with_primary_id(payload)
name = action.get("name")
if name is None or action["entity_type"] == "branch":
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
EVENT_BODY_FUNCTION_MAPPER: Dict[str, Callable[[Dict[str, Any]], Optional[str]]] = {
"story_update_archived": partial(get_update_archived_body, entity="story"),
"epic_update_archived": partial(get_update_archived_body, entity="epic"),
"story_create": get_story_create_body,
"pull-request_create": partial(get_story_create_github_entity_body, entity="pull-request"),
"branch_create": partial(get_story_create_github_entity_body, entity="branch"),
"story_delete": get_delete_body,
"epic_delete": get_delete_body,
"story-task_create": partial(get_story_task_body, action="added to"),
"story-task_delete": partial(get_story_task_body, action="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_owner": get_story_update_owner_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity="epic"),
"story-comment_create": partial(get_comment_added_body, entity="story"),
"epic_update_description": partial(get_update_description_body, entity="epic"),
"story_update_description": partial(get_update_description_body, entity="story"),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity="epic"),
"story_update_name": partial(get_update_name_body, entity="story"),
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity="story"),
"pull-request": partial(get_entity_name, entity="story"),
"branch": partial(get_entity_name, entity="story"),
"story-comment": partial(get_entity_name, entity="story"),
"story-task": partial(get_entity_name, entity="story"),
"epic": partial(get_entity_name, entity="epic"),
"epic-comment": partial(get_entity_name, entity="epic"),
}
IGNORED_EVENTS = {
"story-comment_update",
}
@webhook_view("ClubHouse")
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: Optional[Dict[str, Any]] = REQ(argument_type="body"),
) -> HttpResponse:
# Clubhouse has a tendency to send empty POST requests to
# third-party endpoints. It is unclear as to which event type
# such requests correspond to. So, it is best to ignore such
# requests for now.
if payload is None:
return json_success()
event = get_event(payload)
if event is None:
return json_success()
body_func = EVENT_BODY_FUNCTION_MAPPER.get(event)
topic_func = get_topic_function_based_on_type(payload)
if body_func is None or topic_func is None:
raise UnsupportedWebhookEventType(event)
topic = topic_func(payload)
body = body_func(payload)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
| {
"repo_name": "hackerkid/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "2",
"size": "20140",
"license": "apache-2.0",
"hash": -8247584183439700000,
"line_mean": 33.3100511073,
"line_max": 132,
"alpha_frac": 0.6104766634,
"autogenerated": false,
"ratio": 3.4959208470751606,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005308197962241581,
"num_lines": 587
} |
from functools import partial
from typing import Any, Callable, Generic, List
from typing import Optional as Opt
from typing import overload
from .types import Built, FieldMap, SaveHooks, Seed, T
class Blueprint(Generic[T]):
def __init__(self, model, fields=None, pre_save=None, post_save=None, seed=None):
# type: (T, Opt[FieldMap], Opt[SaveHooks], Opt[SaveHooks], Opt[Seed]) -> None
from .faker_factory import factory
self.factory = factory
self._model = model
self._fields = fields or {}
self.seed = seed
self.pre_save = pre_save
self.post_save = post_save
self.pk = -1
def fields(self, **kwargs):
# type: (**Any) -> "Blueprint"
return Blueprint(
model=self._model,
fields=dict(self._fields, **kwargs),
pre_save=self.pre_save,
post_save=self.post_save,
seed=self.seed,
)
def make_one(
self, fields=None, pre_save=None, post_save=None, seed=None, iteration=None
):
# type: (Opt[FieldMap], Opt[SaveHooks], Opt[SaveHooks], Opt[Seed], Opt[int]) -> T
_fields = self._fields.copy()
if fields:
_fields.update(fields)
if seed is None:
seed = self.seed
if pre_save is None:
pre_save = self.pre_save
if post_save is None:
post_save = self.post_save
return self.factory.make_one(
self._model, _fields, pre_save, post_save, seed, iteration
)
@overload
def make(self, fields, pre_save, post_save, seed, quantity): # pragma: no cover
# type: (Opt[FieldMap], Opt[SaveHooks], Opt[SaveHooks], Opt[Seed], None) -> T
pass
@overload
def make(self, fields, pre_save, post_save, seed, quantity): # pragma: no cover
# type: (Opt[FieldMap], Opt[SaveHooks], Opt[SaveHooks], Opt[Seed], int) -> List[T]
pass
def make(
self, fields=None, pre_save=None, post_save=None, seed=None, quantity=None
):
_fields = self._fields.copy()
if fields:
_fields.update(fields)
if seed is None:
seed = self.seed
if pre_save is None:
pre_save = self.pre_save
if post_save is None:
post_save = self.post_save
return self.factory.make(
self._model, _fields, pre_save, post_save, seed, quantity
)
@overload
def build(self, fields, pre_save, seed, quantity, make_fks): # pragma: no cover
# type: (Opt[FieldMap], Opt[SaveHooks], Opt[Seed], None, bool) -> Built
pass
@overload
def build(self, fields, pre_save, seed, quantity, make_fks): # pragma: no cover
# type: (Opt[FieldMap], Opt[SaveHooks], Opt[Seed], int, bool) -> List[Built]
pass
def build(
self, fields=None, pre_save=None, seed=None, quantity=None, make_fks=False
):
_fields = self._fields.copy()
if fields:
_fields.update(fields)
if seed is None:
seed = self.seed
if pre_save is None:
pre_save = self.pre_save
return self.factory.build(
self._model, _fields, pre_save, seed, quantity, make_fks
)
@overload
def m(self, pre_save, post_save, seed, quantity): # pragma: no cover
# type: (Opt[SaveHooks], Opt[SaveHooks], Opt[Seed], None) -> Callable[..., T]
pass
@overload
def m(self, pre_save, post_save, seed, quantity): # pragma: no cover
# type: (Opt[SaveHooks], Opt[SaveHooks], Opt[Seed], int) -> Callable[..., List[T]]
pass
def m(self, pre_save=None, post_save=None, seed=None, quantity=None):
make = partial(
self.make,
pre_save=pre_save,
post_save=post_save,
seed=seed,
quantity=quantity,
)
def fn(**kwargs):
return make(fields=kwargs)
return fn
@overload
def b(self, pre_save, seed, quantity, make_fks): # pragma: no cover
# type: (Opt[SaveHooks], Opt[Seed], None, bool) -> Callable[..., Built]
pass
@overload
def b(self, pre_save, seed, quantity, make_fks): # pragma: no cover
# type: (Opt[SaveHooks], Opt[Seed], int, bool) -> Callable[..., List[Built]]
pass
def b(self, pre_save=None, seed=None, quantity=None, make_fks=False):
build = partial(
self.build,
pre_save=pre_save,
seed=seed,
quantity=quantity,
make_fks=make_fks,
)
def fn(**kwargs):
return build(fields=kwargs)
return fn
| {
"repo_name": "fcurella/django-fakery",
"path": "django_fakery/blueprint.py",
"copies": "1",
"size": "4698",
"license": "mit",
"hash": 3987016501298142700,
"line_mean": 29.3096774194,
"line_max": 90,
"alpha_frac": 0.5589612601,
"autogenerated": false,
"ratio": 3.4955357142857144,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45544969743857144,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Dict, Iterable, Optional
from inspect import signature
import re
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message, \
validate_extract_webhook_http_header, UnexpectedWebhookEventType
from zerver.lib.webhooks.git import EMPTY_SHA, \
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE, \
get_commits_comment_action_message, get_issue_event_message, \
get_pull_request_event_message, get_push_commits_event_message, \
get_push_tag_event_message, get_remove_branch_event_message
from zerver.models import UserProfile
def get_push_event_body(payload: Dict[str, Any]) -> str:
if payload.get('after') == EMPTY_SHA:
return get_remove_branch_event_body(payload)
return get_normal_push_event_body(payload)
def get_normal_push_event_body(payload: Dict[str, Any]) -> str:
compare_url = u'{}/compare/{}...{}'.format(
get_repository_homepage(payload),
payload['before'],
payload['after']
)
commits = [
{
'name': commit.get('author').get('name'),
'sha': commit.get('id'),
'message': commit.get('message'),
'url': commit.get('url')
}
for commit in payload['commits']
]
return get_push_commits_event_message(
get_user_name(payload),
compare_url,
get_branch_name(payload),
commits
)
def get_remove_branch_event_body(payload: Dict[str, Any]) -> str:
return get_remove_branch_event_message(
get_user_name(payload),
get_branch_name(payload)
)
def get_tag_push_event_body(payload: Dict[str, Any]) -> str:
return get_push_tag_event_message(
get_user_name(payload),
get_tag_name(payload),
action="pushed" if payload.get('checkout_sha') else "removed"
)
def get_issue_created_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
description = payload['object_attributes'].get('description')
# Filter out multiline hidden comments
if description is not None:
description = re.sub('<!--.*?-->', '', description, 0, re.DOTALL)
description = description.rstrip()
return get_issue_event_message(
get_issue_user_name(payload),
'created',
get_object_url(payload),
payload['object_attributes'].get('iid'),
description,
get_objects_assignee(payload),
payload.get('assignees'),
title=payload['object_attributes'].get('title') if include_title else None
)
def get_issue_event_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
return get_issue_event_message(
get_issue_user_name(payload),
action,
get_object_url(payload),
payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title') if include_title else None
)
def get_merge_request_updated_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
if payload['object_attributes'].get('oldrev'):
return get_merge_request_event_body(
payload, "added commit(s) to",
include_title=include_title
)
return get_merge_request_open_or_updated_body(
payload, "updated",
include_title=include_title
)
def get_merge_request_event_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
type='MR',
title=payload['object_attributes'].get('title') if include_title else None
)
def get_merge_request_open_or_updated_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
pull_request.get('source_branch'),
pull_request.get('target_branch'),
pull_request.get('description'),
get_objects_assignee(payload),
type='MR',
title=payload['object_attributes'].get('title') if include_title else None
)
def get_objects_assignee(payload: Dict[str, Any]) -> Optional[str]:
assignee_object = payload.get('assignee')
if assignee_object:
return assignee_object.get('name')
else:
assignee_object = payload.get('assignees')
if assignee_object:
for assignee in payload.get('assignees'):
return assignee['name']
return None
def get_commented_commit_event_body(payload: Dict[str, Any]) -> str:
comment = payload['object_attributes']
action = u'[commented]({})'.format(comment['url'])
return get_commits_comment_action_message(
get_issue_user_name(payload),
action,
payload['commit'].get('url'),
payload['commit'].get('id'),
comment['note'],
)
def get_commented_merge_request_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/merge_requests/{}'.format(
payload['project'].get('web_url'),
payload['merge_request'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['merge_request'].get('iid'),
message=comment['note'],
type='MR',
title=payload.get('merge_request').get('title') if include_title else None
)
def get_commented_issue_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/issues/{}'.format(
payload['project'].get('web_url'),
payload['issue'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['issue'].get('iid'),
message=comment['note'],
type='Issue',
title=payload.get('issue').get('title') if include_title else None
)
def get_commented_snippet_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/snippets/{}'.format(
payload['project'].get('web_url'),
payload['snippet'].get('id')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['snippet'].get('id'),
message=comment['note'],
type='Snippet',
title=payload.get('snippet').get('title') if include_title else None
)
def get_wiki_page_event_body(payload: Dict[str, Any], action: str) -> str:
return u"{} {} [Wiki Page \"{}\"]({}).".format(
get_issue_user_name(payload),
action,
payload['object_attributes'].get('title'),
payload['object_attributes'].get('url'),
)
def get_build_hook_event_body(payload: Dict[str, Any]) -> str:
build_status = payload.get('build_status')
if build_status == 'created':
action = 'was created'
elif build_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(build_status)
return u"Build {} from {} stage {}.".format(
payload.get('build_name'),
payload.get('build_stage'),
action
)
def get_test_event_body(payload: Dict[str, Any]) -> str:
return u"Webhook for **{repo}** has been configured successfully! :tada:".format(
repo=get_repo_name(payload))
def get_pipeline_event_body(payload: Dict[str, Any]) -> str:
pipeline_status = payload['object_attributes'].get('status')
if pipeline_status == 'pending':
action = 'was created'
elif pipeline_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(pipeline_status)
builds_status = u""
for build in payload['builds']:
builds_status += u"* {} - {}\n".format(build.get('name'), build.get('status'))
return u"Pipeline {} with build(s):\n{}.".format(action, builds_status[:-1])
def get_repo_name(payload: Dict[str, Any]) -> str:
return payload['project']['name']
def get_user_name(payload: Dict[str, Any]) -> str:
return payload['user_name']
def get_issue_user_name(payload: Dict[str, Any]) -> str:
return payload['user']['name']
def get_repository_homepage(payload: Dict[str, Any]) -> str:
return payload['repository']['homepage']
def get_branch_name(payload: Dict[str, Any]) -> str:
return payload['ref'].replace('refs/heads/', '')
def get_tag_name(payload: Dict[str, Any]) -> str:
return payload['ref'].replace('refs/tags/', '')
def get_object_url(payload: Dict[str, Any]) -> str:
return payload['object_attributes']['url']
EVENT_FUNCTION_MAPPER = {
'Push Hook': get_push_event_body,
'Tag Push Hook': get_tag_push_event_body,
'Test Hook': get_test_event_body,
'Issue Hook open': get_issue_created_event_body,
'Issue Hook close': partial(get_issue_event_body, action='closed'),
'Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Issue Hook update': partial(get_issue_event_body, action='updated'),
'Confidential Issue Hook open': get_issue_created_event_body,
'Confidential Issue Hook close': partial(get_issue_event_body, action='closed'),
'Confidential Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Confidential Issue Hook update': partial(get_issue_event_body, action='updated'),
'Note Hook Commit': get_commented_commit_event_body,
'Note Hook MergeRequest': get_commented_merge_request_event_body,
'Note Hook Issue': get_commented_issue_event_body,
'Confidential Note Hook Issue': get_commented_issue_event_body,
'Note Hook Snippet': get_commented_snippet_event_body,
'Merge Request Hook approved': partial(get_merge_request_event_body, action='approved'),
'Merge Request Hook open': partial(get_merge_request_open_or_updated_body, action='created'),
'Merge Request Hook update': get_merge_request_updated_event_body,
'Merge Request Hook merge': partial(get_merge_request_event_body, action='merged'),
'Merge Request Hook close': partial(get_merge_request_event_body, action='closed'),
'Merge Request Hook reopen': partial(get_merge_request_event_body, action='reopened'),
'Wiki Page Hook create': partial(get_wiki_page_event_body, action='created'),
'Wiki Page Hook update': partial(get_wiki_page_event_body, action='updated'),
'Job Hook': get_build_hook_event_body,
'Build Hook': get_build_hook_event_body,
'Pipeline Hook': get_pipeline_event_body,
}
@api_key_only_webhook_view("Gitlab")
@has_request_variables
def api_gitlab_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
branches: Optional[str]=REQ(default=None),
user_specified_topic: Optional[str]=REQ("topic", default=None)) -> HttpResponse:
event = get_event(request, payload, branches)
if event is not None:
event_body_function = get_body_based_on_event(event)
if 'include_title' in signature(event_body_function).parameters:
body = event_body_function(
payload,
include_title=user_specified_topic is not None
)
else:
body = event_body_function(payload)
topic = get_subject_based_on_event(event, payload)
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
def get_body_based_on_event(event: str) -> Any:
return EVENT_FUNCTION_MAPPER[event]
def get_subject_based_on_event(event: str, payload: Dict[str, Any]) -> str:
if event == 'Push Hook':
return u"{} / {}".format(get_repo_name(payload), get_branch_name(payload))
elif event == 'Job Hook' or event == 'Build Hook':
return u"{} / {}".format(payload['repository'].get('name'), get_branch_name(payload))
elif event == 'Pipeline Hook':
return u"{} / {}".format(
get_repo_name(payload),
payload['object_attributes'].get('ref').replace('refs/heads/', ''))
elif event.startswith('Merge Request Hook'):
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event.startswith('Issue Hook') or event.startswith('Confidential Issue Hook'):
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event == 'Note Hook Issue' or event == 'Confidential Note Hook Issue':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['issue'].get('iid'),
title=payload['issue'].get('title')
)
elif event == 'Note Hook MergeRequest':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['merge_request'].get('iid'),
title=payload['merge_request'].get('title')
)
elif event == 'Note Hook Snippet':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Snippet',
id=payload['snippet'].get('id'),
title=payload['snippet'].get('title')
)
return get_repo_name(payload)
def get_event(request: HttpRequest, payload: Dict[str, Any], branches: Optional[str]) -> Optional[str]:
event = validate_extract_webhook_http_header(request, 'X_GITLAB_EVENT', 'GitLab')
if event in ['Confidential Issue Hook', 'Issue Hook', 'Merge Request Hook', 'Wiki Page Hook']:
action = payload['object_attributes'].get('action')
event = "{} {}".format(event, action)
elif event in ['Confidential Note Hook', 'Note Hook']:
action = payload['object_attributes'].get('noteable_type')
event = "{} {}".format(event, action)
elif event == 'Push Hook':
if branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event
raise UnexpectedWebhookEventType('GitLab', event)
| {
"repo_name": "dhcrzf/zulip",
"path": "zerver/webhooks/gitlab/view.py",
"copies": "1",
"size": "15569",
"license": "apache-2.0",
"hash": 2523232086889400300,
"line_mean": 38.7168367347,
"line_max": 103,
"alpha_frac": 0.6193075984,
"autogenerated": false,
"ratio": 3.7335731414868105,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9838596655928329,
"avg_score": 0.0028568167916962705,
"num_lines": 392
} |
from functools import partial
from typing import Any, Dict, Iterable, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = "New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
NEW_DESC_ADDED_TEMPLATE = "New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
DESC_CHANGED_TEMPLATE = ("Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = "State of the {entity} {name_template} was changed from **{old}** to **{new}**."
NAME_CHANGED_TEMPLATE = ("The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
STORY_ARCHIVED_TEMPLATE = "The story {name_template} was {action}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {action} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = "Task **{task_description}** ({name_template}) was completed. :tada:"
STORY_ADDED_REMOVED_EPIC_TEMPLATE = ("The story {story_name_template} was {action} the"
" epic {epic_name_template}.")
STORY_EPIC_CHANGED_TEMPLATE = ("The story {story_name_template} was moved from {old_epic_name_template}"
" to {new_epic_name_template}.")
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = "A {type} attachment `{file_name}` was added to the story {name_template}."
STORY_LABEL_TEMPLATE = "The label **{label_name}** was added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = ("The story {name_template} was moved from"
" the **{old}** project to **{new}**.")
STORY_UPDATE_TYPE_TEMPLATE = ("The type of the story {name_template} was changed"
" from **{old_type}** to **{new_type}**.")
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_body_function_based_on_type(payload: Dict[str, Any]) -> Any:
action = get_action_with_primary_id(payload)
event = "{}_{}".format(action["entity_type"], action["action"])
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
return EVENT_BODY_FUNCTION_MAPPER.get(event)
def get_topic_function_based_on_type(payload: Dict[str, Any]) -> Any:
entity_type = get_action_with_primary_id(payload)["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_story_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"])
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
)
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_archived_body(payload: Dict[str, Any]) -> str:
primary_action = get_action_with_primary_id(payload)
archived = primary_action["changes"]["archived"]
if archived["new"]:
action = "archived"
else:
action = "unarchived"
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=primary_action["name"],
app_url=primary_action.get("app_url")
),
"action": action,
}
return STORY_ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: str) -> str:
primary_action = get_action_with_primary_id(payload)
kwargs = {
"task_description": primary_action["description"],
"action": action,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["action"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["action"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = "{} points".format(new)
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_story_update_attachment_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update({
"type": ref["entity_type"],
"file_name": ref["name"],
})
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_label_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when a label is removed, ignore it
if not label_ids_added:
return None
label_id = label_ids_added[0]
for action in payload["actions"]:
if action["id"] == label_id:
kwargs.update({"label_name": action["name"]})
return STORY_LABEL_TEMPLATE.format(**kwargs)
def get_story_update_project_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update({"new": ref["name"]})
if ref["id"] == old_project_id:
kwargs.update({"old": ref["name"]})
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"]
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_entity_name(payload: Dict[str, Any], entity: Optional[str]=None) -> Optional[str]:
name = get_action_with_primary_id(payload).get("name")
if name is None:
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
EVENT_BODY_FUNCTION_MAPPER = {
"story_update_archived": get_story_update_archived_body,
"story_create": get_story_create_body,
"story-task_create": partial(get_story_task_body, action="added to"),
"story-task_delete": partial(get_story_task_body, action="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity='epic'),
"story-comment_create": partial(get_comment_added_body, entity='story'),
"epic_update_description": partial(get_update_description_body, entity='epic'),
"story_update_description": partial(get_update_description_body, entity='story'),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity='epic'),
"story_update_name": partial(get_update_name_body, entity='story'),
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity='story'),
"story-comment": partial(get_entity_name, entity='story'),
"story-task": partial(get_entity_name, entity='story'),
"epic": partial(get_entity_name, entity='epic'),
"epic-comment": partial(get_entity_name, entity='epic'),
}
@api_key_only_webhook_view('ClubHouse')
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body')
) -> HttpResponse:
body_func = get_body_function_based_on_type(payload)
topic_func = get_topic_function_based_on_type(payload)
topic = topic_func(payload)
body = body_func(payload)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
| {
"repo_name": "jackrzhang/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "1",
"size": "16633",
"license": "apache-2.0",
"hash": 271797017044313470,
"line_mean": 36.1272321429,
"line_max": 105,
"alpha_frac": 0.6018757891,
"autogenerated": false,
"ratio": 3.5389361702127657,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46408119593127656,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Dict, Iterable, Optional, Text
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_stream_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.git import EMPTY_SHA, \
SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE, \
get_commits_comment_action_message, get_issue_event_message, \
get_pull_request_event_message, get_push_commits_event_message, \
get_push_tag_event_message, get_remove_branch_event_message
from zerver.models import UserProfile
class UnknownEventType(Exception):
pass
def get_push_event_body(payload: Dict[str, Any]) -> Text:
if payload.get('after') == EMPTY_SHA:
return get_remove_branch_event_body(payload)
return get_normal_push_event_body(payload)
def get_normal_push_event_body(payload: Dict[str, Any]) -> Text:
compare_url = u'{}/compare/{}...{}'.format(
get_repository_homepage(payload),
payload['before'],
payload['after']
)
commits = [
{
'name': commit.get('author').get('name'),
'sha': commit.get('id'),
'message': commit.get('message'),
'url': commit.get('url')
}
for commit in payload['commits']
]
return get_push_commits_event_message(
get_user_name(payload),
compare_url,
get_branch_name(payload),
commits
)
def get_remove_branch_event_body(payload: Dict[str, Any]) -> Text:
return get_remove_branch_event_message(
get_user_name(payload),
get_branch_name(payload)
)
def get_tag_push_event_body(payload: Dict[str, Any]) -> Text:
return get_push_tag_event_message(
get_user_name(payload),
get_tag_name(payload),
action="pushed" if payload.get('checkout_sha') else "removed"
)
def get_issue_created_event_body(payload: Dict[str, Any]) -> Text:
return get_issue_event_message(
get_issue_user_name(payload),
'created',
get_object_url(payload),
payload['object_attributes'].get('iid'),
payload['object_attributes'].get('description'),
get_objects_assignee(payload)
)
def get_issue_event_body(payload: Dict[str, Any], action: Text) -> Text:
return get_issue_event_message(
get_issue_user_name(payload),
action,
get_object_url(payload),
payload['object_attributes'].get('iid'),
)
def get_merge_request_updated_event_body(payload: Dict[str, Any]) -> Text:
if payload['object_attributes'].get('oldrev'):
return get_merge_request_event_body(payload, "added commit(s) to")
return get_merge_request_open_or_updated_body(payload, "updated")
def get_merge_request_event_body(payload: Dict[str, Any], action: Text) -> Text:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
type='MR',
)
def get_merge_request_open_or_updated_body(payload: Dict[str, Any], action: Text) -> Text:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
pull_request.get('source_branch'),
pull_request.get('target_branch'),
pull_request.get('description'),
get_objects_assignee(payload),
type='MR',
)
def get_objects_assignee(payload: Dict[str, Any]) -> Optional[Text]:
assignee_object = payload.get('assignee')
if assignee_object:
return assignee_object.get('name')
return None
def get_commented_commit_event_body(payload: Dict[str, Any]) -> Text:
comment = payload['object_attributes']
action = u'[commented]({})'.format(comment['url'])
return get_commits_comment_action_message(
get_issue_user_name(payload),
action,
payload['commit'].get('url'),
payload['commit'].get('id'),
comment['note'],
)
def get_commented_merge_request_event_body(payload: Dict[str, Any]) -> Text:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/merge_requests/{}'.format(
payload['project'].get('web_url'),
payload['merge_request'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['merge_request'].get('iid'),
message=comment['note'],
type='MR'
)
def get_commented_issue_event_body(payload: Dict[str, Any]) -> Text:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/issues/{}'.format(
payload['project'].get('web_url'),
payload['issue'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['issue'].get('iid'),
message=comment['note'],
type='Issue'
)
def get_commented_snippet_event_body(payload: Dict[str, Any]) -> Text:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/snippets/{}'.format(
payload['project'].get('web_url'),
payload['snippet'].get('id')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['snippet'].get('id'),
message=comment['note'],
type='Snippet'
)
def get_wiki_page_event_body(payload: Dict[str, Any], action: Text) -> Text:
return u"{} {} [Wiki Page \"{}\"]({}).".format(
get_issue_user_name(payload),
action,
payload['object_attributes'].get('title'),
payload['object_attributes'].get('url'),
)
def get_build_hook_event_body(payload: Dict[str, Any]) -> Text:
build_status = payload.get('build_status')
if build_status == 'created':
action = 'was created'
elif build_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(build_status)
return u"Build {} from {} stage {}.".format(
payload.get('build_name'),
payload.get('build_stage'),
action
)
def get_test_event_body(payload: Dict[str, Any]) -> Text:
return u"Webhook for **{repo}** has been configured successfully! :tada:".format(
repo=get_repo_name(payload))
def get_pipeline_event_body(payload: Dict[str, Any]) -> Text:
pipeline_status = payload['object_attributes'].get('status')
if pipeline_status == 'pending':
action = 'was created'
elif pipeline_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(pipeline_status)
builds_status = u""
for build in payload['builds']:
builds_status += u"* {} - {}\n".format(build.get('name'), build.get('status'))
return u"Pipeline {} with build(s):\n{}.".format(action, builds_status[:-1])
def get_repo_name(payload: Dict[str, Any]) -> Text:
return payload['project']['name']
def get_user_name(payload: Dict[str, Any]) -> Text:
return payload['user_name']
def get_issue_user_name(payload: Dict[str, Any]) -> Text:
return payload['user']['name']
def get_repository_homepage(payload: Dict[str, Any]) -> Text:
return payload['repository']['homepage']
def get_branch_name(payload: Dict[str, Any]) -> Text:
return payload['ref'].replace('refs/heads/', '')
def get_tag_name(payload: Dict[str, Any]) -> Text:
return payload['ref'].replace('refs/tags/', '')
def get_object_iid(payload: Dict[str, Any]) -> Text:
return payload['object_attributes']['iid']
def get_object_url(payload: Dict[str, Any]) -> Text:
return payload['object_attributes']['url']
EVENT_FUNCTION_MAPPER = {
'Push Hook': get_push_event_body,
'Tag Push Hook': get_tag_push_event_body,
'Test Hook': get_test_event_body,
'Issue Hook open': get_issue_created_event_body,
'Issue Hook close': partial(get_issue_event_body, action='closed'),
'Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Issue Hook update': partial(get_issue_event_body, action='updated'),
'Note Hook Commit': get_commented_commit_event_body,
'Note Hook MergeRequest': get_commented_merge_request_event_body,
'Note Hook Issue': get_commented_issue_event_body,
'Note Hook Snippet': get_commented_snippet_event_body,
'Merge Request Hook open': partial(get_merge_request_open_or_updated_body, action='created'),
'Merge Request Hook update': get_merge_request_updated_event_body,
'Merge Request Hook merge': partial(get_merge_request_event_body, action='merged'),
'Merge Request Hook close': partial(get_merge_request_event_body, action='closed'),
'Merge Request Hook reopen': partial(get_merge_request_event_body, action='reopened'),
'Wiki Page Hook create': partial(get_wiki_page_event_body, action='created'),
'Wiki Page Hook update': partial(get_wiki_page_event_body, action='updated'),
'Build Hook': get_build_hook_event_body,
'Pipeline Hook': get_pipeline_event_body,
}
@api_key_only_webhook_view("Gitlab")
@has_request_variables
def api_gitlab_webhook(request, user_profile,
stream=REQ(default='gitlab'),
payload=REQ(argument_type='body'),
branches=REQ(default=None)):
# type: (HttpRequest, UserProfile, Text, Dict[str, Any], Optional[Text]) -> HttpResponse
event = get_event(request, payload, branches)
if event is not None:
body = get_body_based_on_event(event)(payload)
subject = get_subject_based_on_event(event, payload)
check_send_stream_message(user_profile, request.client, stream, subject, body)
return json_success()
def get_body_based_on_event(event: str) -> Any:
return EVENT_FUNCTION_MAPPER[event]
def get_subject_based_on_event(event: str, payload: Dict[str, Any]) -> Text:
if event == 'Push Hook':
return u"{} / {}".format(get_repo_name(payload), get_branch_name(payload))
elif event == 'Build Hook':
return u"{} / {}".format(payload['repository'].get('name'), get_branch_name(payload))
elif event == 'Pipeline Hook':
return u"{} / {}".format(
get_repo_name(payload),
payload['object_attributes'].get('ref').replace('refs/heads/', ''))
elif event.startswith('Merge Request Hook'):
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event.startswith('Issue Hook'):
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event == 'Note Hook Issue':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['issue'].get('iid'),
title=payload['issue'].get('title')
)
elif event == 'Note Hook MergeRequest':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['merge_request'].get('iid'),
title=payload['merge_request'].get('title')
)
elif event == 'Note Hook Snippet':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Snippet',
id=payload['snippet'].get('id'),
title=payload['snippet'].get('title')
)
return get_repo_name(payload)
def get_event(request: HttpRequest, payload: Dict[str, Any], branches: Optional[Text]) -> Optional[str]:
# if there is no 'action' attribute, then this is a test payload
# and we should ignore it
event = request.META['HTTP_X_GITLAB_EVENT']
if event in ['Issue Hook', 'Merge Request Hook', 'Wiki Page Hook']:
action = payload['object_attributes'].get('action')
if action is None:
return 'Test Hook'
event = "{} {}".format(event, action)
elif event == 'Note Hook':
action = payload['object_attributes'].get('noteable_type')
event = "{} {}".format(event, action)
elif event == 'Push Hook':
if branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event
raise UnknownEventType(u'Event {} is unknown and cannot be handled'.format(event))
| {
"repo_name": "mahim97/zulip",
"path": "zerver/webhooks/gitlab/view.py",
"copies": "2",
"size": "13100",
"license": "apache-2.0",
"hash": 8053351938490649000,
"line_mean": 36.8612716763,
"line_max": 104,
"alpha_frac": 0.6238167939,
"autogenerated": false,
"ratio": 3.6197844708483005,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5243601264748301,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Dict, Optional
from inspect import signature
import re
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message, \
validate_extract_webhook_http_header, UnexpectedWebhookEventType
from zerver.lib.webhooks.git import EMPTY_SHA, \
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE, \
get_commits_comment_action_message, get_issue_event_message, \
get_pull_request_event_message, get_push_commits_event_message, \
get_push_tag_event_message, get_remove_branch_event_message
from zerver.models import UserProfile
def fixture_to_headers(fixture_name: str) -> Dict[str, Any]:
if fixture_name.startswith("build"):
return {} # Since there are 2 possible event types.
# Map "push_hook__push_commits_more_than_limit.json" into GitLab's
# HTTP event title "Push Hook".
return {"HTTP_X_GITLAB_EVENT": fixture_name.split("__")[0].replace("_", " ").title()}
def get_push_event_body(payload: Dict[str, Any]) -> str:
if payload.get('after') == EMPTY_SHA:
return get_remove_branch_event_body(payload)
return get_normal_push_event_body(payload)
def get_normal_push_event_body(payload: Dict[str, Any]) -> str:
compare_url = u'{}/compare/{}...{}'.format(
get_repository_homepage(payload),
payload['before'],
payload['after']
)
commits = [
{
'name': commit.get('author').get('name'),
'sha': commit.get('id'),
'message': commit.get('message'),
'url': commit.get('url')
}
for commit in payload['commits']
]
return get_push_commits_event_message(
get_user_name(payload),
compare_url,
get_branch_name(payload),
commits
)
def get_remove_branch_event_body(payload: Dict[str, Any]) -> str:
return get_remove_branch_event_message(
get_user_name(payload),
get_branch_name(payload)
)
def get_tag_push_event_body(payload: Dict[str, Any]) -> str:
return get_push_tag_event_message(
get_user_name(payload),
get_tag_name(payload),
action="pushed" if payload.get('checkout_sha') else "removed"
)
def get_issue_created_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
description = payload['object_attributes'].get('description')
# Filter out multiline hidden comments
if description is not None:
description = re.sub('<!--.*?-->', '', description, 0, re.DOTALL)
description = description.rstrip()
return get_issue_event_message(
get_issue_user_name(payload),
'created',
get_object_url(payload),
payload['object_attributes'].get('iid'),
description,
get_objects_assignee(payload),
payload.get('assignees'),
title=payload['object_attributes'].get('title') if include_title else None
)
def get_issue_event_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
return get_issue_event_message(
get_issue_user_name(payload),
action,
get_object_url(payload),
payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title') if include_title else None
)
def get_merge_request_updated_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
if payload['object_attributes'].get('oldrev'):
return get_merge_request_event_body(
payload, "added commit(s) to",
include_title=include_title
)
return get_merge_request_open_or_updated_body(
payload, "updated",
include_title=include_title
)
def get_merge_request_event_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
type='MR',
title=payload['object_attributes'].get('title') if include_title else None
)
def get_merge_request_open_or_updated_body(payload: Dict[str, Any], action: str,
include_title: Optional[bool]=False) -> str:
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
pull_request.get('source_branch'),
pull_request.get('target_branch'),
pull_request.get('description'),
get_objects_assignee(payload),
type='MR',
title=payload['object_attributes'].get('title') if include_title else None
)
def get_objects_assignee(payload: Dict[str, Any]) -> Optional[str]:
assignee_object = payload.get('assignee')
if assignee_object:
return assignee_object.get('name')
else:
assignee_object = payload.get('assignees')
if assignee_object:
for assignee in payload.get('assignees'):
return assignee['name']
return None
def get_commented_commit_event_body(payload: Dict[str, Any]) -> str:
comment = payload['object_attributes']
action = u'[commented]({})'.format(comment['url'])
return get_commits_comment_action_message(
get_issue_user_name(payload),
action,
payload['commit'].get('url'),
payload['commit'].get('id'),
comment['note'],
)
def get_commented_merge_request_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/merge_requests/{}'.format(
payload['project'].get('web_url'),
payload['merge_request'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['merge_request'].get('iid'),
message=comment['note'],
type='MR',
title=payload.get('merge_request').get('title') if include_title else None
)
def get_commented_issue_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/issues/{}'.format(
payload['project'].get('web_url'),
payload['issue'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['issue'].get('iid'),
message=comment['note'],
type='Issue',
title=payload.get('issue').get('title') if include_title else None
)
def get_commented_snippet_event_body(payload: Dict[str, Any],
include_title: Optional[bool]=False) -> str:
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/snippets/{}'.format(
payload['project'].get('web_url'),
payload['snippet'].get('id')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['snippet'].get('id'),
message=comment['note'],
type='Snippet',
title=payload.get('snippet').get('title') if include_title else None
)
def get_wiki_page_event_body(payload: Dict[str, Any], action: str) -> str:
return u"{} {} [Wiki Page \"{}\"]({}).".format(
get_issue_user_name(payload),
action,
payload['object_attributes'].get('title'),
payload['object_attributes'].get('url'),
)
def get_build_hook_event_body(payload: Dict[str, Any]) -> str:
build_status = payload.get('build_status')
if build_status == 'created':
action = 'was created'
elif build_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(build_status)
return u"Build {} from {} stage {}.".format(
payload.get('build_name'),
payload.get('build_stage'),
action
)
def get_test_event_body(payload: Dict[str, Any]) -> str:
return u"Webhook for **{repo}** has been configured successfully! :tada:".format(
repo=get_repo_name(payload))
def get_pipeline_event_body(payload: Dict[str, Any]) -> str:
pipeline_status = payload['object_attributes'].get('status')
if pipeline_status == 'pending':
action = 'was created'
elif pipeline_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(pipeline_status)
builds_status = u""
for build in payload['builds']:
builds_status += u"* {} - {}\n".format(build.get('name'), build.get('status'))
return u"Pipeline {} with build(s):\n{}.".format(action, builds_status[:-1])
def get_repo_name(payload: Dict[str, Any]) -> str:
return payload['project']['name']
def get_user_name(payload: Dict[str, Any]) -> str:
return payload['user_name']
def get_issue_user_name(payload: Dict[str, Any]) -> str:
return payload['user']['name']
def get_repository_homepage(payload: Dict[str, Any]) -> str:
return payload['repository']['homepage']
def get_branch_name(payload: Dict[str, Any]) -> str:
return payload['ref'].replace('refs/heads/', '')
def get_tag_name(payload: Dict[str, Any]) -> str:
return payload['ref'].replace('refs/tags/', '')
def get_object_url(payload: Dict[str, Any]) -> str:
return payload['object_attributes']['url']
EVENT_FUNCTION_MAPPER = {
'Push Hook': get_push_event_body,
'Tag Push Hook': get_tag_push_event_body,
'Test Hook': get_test_event_body,
'Issue Hook open': get_issue_created_event_body,
'Issue Hook close': partial(get_issue_event_body, action='closed'),
'Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Issue Hook update': partial(get_issue_event_body, action='updated'),
'Confidential Issue Hook open': get_issue_created_event_body,
'Confidential Issue Hook close': partial(get_issue_event_body, action='closed'),
'Confidential Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Confidential Issue Hook update': partial(get_issue_event_body, action='updated'),
'Note Hook Commit': get_commented_commit_event_body,
'Note Hook MergeRequest': get_commented_merge_request_event_body,
'Note Hook Issue': get_commented_issue_event_body,
'Confidential Note Hook Issue': get_commented_issue_event_body,
'Note Hook Snippet': get_commented_snippet_event_body,
'Merge Request Hook approved': partial(get_merge_request_event_body, action='approved'),
'Merge Request Hook open': partial(get_merge_request_open_or_updated_body, action='created'),
'Merge Request Hook update': get_merge_request_updated_event_body,
'Merge Request Hook merge': partial(get_merge_request_event_body, action='merged'),
'Merge Request Hook close': partial(get_merge_request_event_body, action='closed'),
'Merge Request Hook reopen': partial(get_merge_request_event_body, action='reopened'),
'Wiki Page Hook create': partial(get_wiki_page_event_body, action='created'),
'Wiki Page Hook update': partial(get_wiki_page_event_body, action='updated'),
'Job Hook': get_build_hook_event_body,
'Build Hook': get_build_hook_event_body,
'Pipeline Hook': get_pipeline_event_body,
}
@api_key_only_webhook_view("Gitlab")
@has_request_variables
def api_gitlab_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
branches: Optional[str]=REQ(default=None),
user_specified_topic: Optional[str]=REQ("topic", default=None)) -> HttpResponse:
event = get_event(request, payload, branches)
if event is not None:
event_body_function = get_body_based_on_event(event)
if 'include_title' in signature(event_body_function).parameters:
body = event_body_function(
payload,
include_title=user_specified_topic is not None
)
else:
body = event_body_function(payload)
topic = get_subject_based_on_event(event, payload)
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
def get_body_based_on_event(event: str) -> Any:
return EVENT_FUNCTION_MAPPER[event]
def get_subject_based_on_event(event: str, payload: Dict[str, Any]) -> str:
if event == 'Push Hook':
return u"{} / {}".format(get_repo_name(payload), get_branch_name(payload))
elif event == 'Job Hook' or event == 'Build Hook':
return u"{} / {}".format(payload['repository'].get('name'), get_branch_name(payload))
elif event == 'Pipeline Hook':
return u"{} / {}".format(
get_repo_name(payload),
payload['object_attributes'].get('ref').replace('refs/heads/', ''))
elif event.startswith('Merge Request Hook'):
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event.startswith('Issue Hook') or event.startswith('Confidential Issue Hook'):
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event == 'Note Hook Issue' or event == 'Confidential Note Hook Issue':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['issue'].get('iid'),
title=payload['issue'].get('title')
)
elif event == 'Note Hook MergeRequest':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['merge_request'].get('iid'),
title=payload['merge_request'].get('title')
)
elif event == 'Note Hook Snippet':
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Snippet',
id=payload['snippet'].get('id'),
title=payload['snippet'].get('title')
)
return get_repo_name(payload)
def get_event(request: HttpRequest, payload: Dict[str, Any], branches: Optional[str]) -> Optional[str]:
event = validate_extract_webhook_http_header(request, 'X_GITLAB_EVENT', 'GitLab')
if event in ['Confidential Issue Hook', 'Issue Hook', 'Merge Request Hook', 'Wiki Page Hook']:
action = payload['object_attributes'].get('action')
event = "{} {}".format(event, action)
elif event in ['Confidential Note Hook', 'Note Hook']:
action = payload['object_attributes'].get('noteable_type')
event = "{} {}".format(event, action)
elif event == 'Push Hook':
if branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event
raise UnexpectedWebhookEventType('GitLab', event)
| {
"repo_name": "tommyip/zulip",
"path": "zerver/webhooks/gitlab/view.py",
"copies": "1",
"size": "15920",
"license": "apache-2.0",
"hash": -31239028473353344,
"line_mean": 38.8997493734,
"line_max": 103,
"alpha_frac": 0.6197236181,
"autogenerated": false,
"ratio": 3.7239766081871344,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4843700226287134,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Dict, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import UnexpectedWebhookEventType, check_send_webhook_message
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = "New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
NEW_DESC_ADDED_TEMPLATE = "New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
DESC_CHANGED_TEMPLATE = ("Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = "State of the {entity} {name_template} was changed from **{old}** to **{new}**."
NAME_CHANGED_TEMPLATE = ("The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {action}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {action} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = "Task **{task_description}** ({name_template}) was completed. :tada:"
STORY_ADDED_REMOVED_EPIC_TEMPLATE = ("The story {story_name_template} was {action} the"
" epic {epic_name_template}.")
STORY_EPIC_CHANGED_TEMPLATE = ("The story {story_name_template} was moved from {old_epic_name_template}"
" to {new_epic_name_template}.")
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = "A {type} attachment `{file_name}` was added to the story {name_template}."
STORY_LABEL_TEMPLATE = "The label **{label_name}** was added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = ("The story {name_template} was moved from"
" the **{old}** project to **{new}**.")
STORY_UPDATE_TYPE_TEMPLATE = ("The type of the story {name_template} was changed"
" from **{old_type}** to **{new_type}**.")
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
STORY_GITHUB_PR_TEMPLATE = ("New GitHub PR [#{name}]({url}) opened for story"
" {name_template} ({old} -> {new}).")
STORY_GITHUB_BRANCH_TEMPLATE = ("New GitHub branch [{name}]({url})"
" associated with story {name_template} ({old} -> {new}).")
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_event(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
event = "{}_{}".format(action["entity_type"], action["action"])
if event in IGNORED_EVENTS:
return None
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
elif changes.get("owner_ids") is not None:
event = "{}_{}".format(event, "owner")
return event
def get_topic_function_based_on_type(payload: Dict[str, Any]) -> Any:
entity_type = get_action_with_primary_id(payload)["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_delete_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
return DELETE_TEMPLATE.format(**action)
def get_story_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"]),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_update_archived_body(payload: Dict[str, Any], entity: str) -> str:
primary_action = get_action_with_primary_id(payload)
archived = primary_action["changes"]["archived"]
if archived["new"]:
action = "archived"
else:
action = "unarchived"
kwargs = {
"entity": entity,
"name_template": get_name_template(entity).format(
name=primary_action["name"],
app_url=primary_action.get("app_url"),
),
"action": action,
}
return ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: str) -> str:
primary_action = get_action_with_primary_id(payload)
kwargs = {
"task_description": primary_action["description"],
"action": action,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["action"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["action"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = f"{new} points"
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]:
ref: Dict[str, Any] = {}
for reference in payload['references']:
if reference['id'] == ref_id:
ref = reference
return ref
def get_story_create_github_entity_body(payload: Dict[str, Any],
entity: str) -> str:
action = get_action_with_primary_id(payload)
story: Dict[str, Any] = {}
for a in payload['actions']:
if (a['entity_type'] == 'story' and
a['changes'].get('workflow_state_id') is not None):
story = a
new_state_id = story['changes']['workflow_state_id']['new']
old_state_id = story['changes']['workflow_state_id']['old']
new_state = get_reference_by_id(payload, new_state_id)['name']
old_state = get_reference_by_id(payload, old_state_id)['name']
kwargs = {
'name_template': STORY_NAME_TEMPLATE.format(**story),
'name': action.get('number') if entity == 'pull-request' else action.get('name'),
'url': action['url'],
'new': new_state,
'old': old_state,
}
template = STORY_GITHUB_PR_TEMPLATE if entity == 'pull-request' else STORY_GITHUB_BRANCH_TEMPLATE
return template.format(**kwargs)
def get_story_update_attachment_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update({
"type": ref["entity_type"],
"file_name": ref["name"],
})
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_label_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when a label is removed, ignore it
if not label_ids_added:
return None
label_id = label_ids_added[0]
label_name = ''
for action in payload["actions"]:
if action['id'] == label_id:
label_name = action.get('name', '')
if not label_name:
for reference in payload["references"]:
if reference["id"] == label_id:
label_name = reference.get('name', '')
kwargs.update({"label_name": label_name})
return STORY_LABEL_TEMPLATE.format(**kwargs)
def get_story_update_project_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update({"new": ref["name"]})
if ref["id"] == old_project_id:
kwargs.update({"old": ref["name"]})
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"],
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_story_update_owner_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
def get_entity_name(payload: Dict[str, Any], entity: Optional[str]=None) -> Optional[str]:
action = get_action_with_primary_id(payload)
name = action.get("name")
if name is None or action['entity_type'] == 'branch':
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
EVENT_BODY_FUNCTION_MAPPER = {
"story_update_archived": partial(get_update_archived_body, entity='story'),
"epic_update_archived": partial(get_update_archived_body, entity='epic'),
"story_create": get_story_create_body,
"pull-request_create": partial(get_story_create_github_entity_body, entity='pull-request'),
"branch_create": partial(get_story_create_github_entity_body, entity='branch'),
"story_delete": get_delete_body,
"epic_delete": get_delete_body,
"story-task_create": partial(get_story_task_body, action="added to"),
"story-task_delete": partial(get_story_task_body, action="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_owner": get_story_update_owner_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity='epic'),
"story-comment_create": partial(get_comment_added_body, entity='story'),
"epic_update_description": partial(get_update_description_body, entity='epic'),
"story_update_description": partial(get_update_description_body, entity='story'),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity='epic'),
"story_update_name": partial(get_update_name_body, entity='story'),
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity='story'),
"pull-request": partial(get_entity_name, entity='story'),
"branch": partial(get_entity_name, entity='story'),
"story-comment": partial(get_entity_name, entity='story'),
"story-task": partial(get_entity_name, entity='story'),
"epic": partial(get_entity_name, entity='epic'),
"epic-comment": partial(get_entity_name, entity='epic'),
}
IGNORED_EVENTS = {
'story-comment_update',
}
@api_key_only_webhook_view('ClubHouse')
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest, user_profile: UserProfile,
payload: Optional[Dict[str, Any]]=REQ(argument_type='body'),
) -> HttpResponse:
# Clubhouse has a tendency to send empty POST requests to
# third-party endpoints. It is unclear as to which event type
# such requests correspond to. So, it is best to ignore such
# requests for now.
if payload is None:
return json_success()
event = get_event(payload)
if event is None:
return json_success()
body_func: Any = EVENT_BODY_FUNCTION_MAPPER.get(event)
topic_func = get_topic_function_based_on_type(payload)
if body_func is None or topic_func is None:
raise UnexpectedWebhookEventType('Clubhouse', event)
topic = topic_func(payload)
body = body_func(payload)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
| {
"repo_name": "brainwane/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "4",
"size": "20329",
"license": "apache-2.0",
"hash": -5022054431634078000,
"line_mean": 36.438305709,
"line_max": 105,
"alpha_frac": 0.6026858183,
"autogenerated": false,
"ratio": 3.5527787486892697,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.615546456698927,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Dict, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.webhooks.common import check_send_webhook_message, \
UnexpectedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = "New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
NEW_DESC_ADDED_TEMPLATE = "New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
DESC_CHANGED_TEMPLATE = ("Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = "State of the {entity} {name_template} was changed from **{old}** to **{new}**."
NAME_CHANGED_TEMPLATE = ("The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {action}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {action} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = "Task **{task_description}** ({name_template}) was completed. :tada:"
STORY_ADDED_REMOVED_EPIC_TEMPLATE = ("The story {story_name_template} was {action} the"
" epic {epic_name_template}.")
STORY_EPIC_CHANGED_TEMPLATE = ("The story {story_name_template} was moved from {old_epic_name_template}"
" to {new_epic_name_template}.")
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = "A {type} attachment `{file_name}` was added to the story {name_template}."
STORY_LABEL_TEMPLATE = "The label **{label_name}** was added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = ("The story {name_template} was moved from"
" the **{old}** project to **{new}**.")
STORY_UPDATE_TYPE_TEMPLATE = ("The type of the story {name_template} was changed"
" from **{old_type}** to **{new_type}**.")
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
STORY_GITHUB_PR_TEMPLATE = ("New GitHub PR [#{name}]({url}) opened for story"
" {name_template} ({old} -> {new}).")
STORY_GITHUB_BRANCH_TEMPLATE = ("New GitHub branch [{name}]({url})"
" associated with story {name_template} ({old} -> {new}).")
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_event(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
event = "{}_{}".format(action["entity_type"], action["action"])
if event in IGNORED_EVENTS:
return None
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
elif changes.get("owner_ids") is not None:
event = "{}_{}".format(event, "owner")
return event
def get_topic_function_based_on_type(payload: Dict[str, Any]) -> Any:
entity_type = get_action_with_primary_id(payload)["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_delete_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
return DELETE_TEMPLATE.format(**action)
def get_story_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"])
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
)
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url")
)
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_update_archived_body(payload: Dict[str, Any], entity: str) -> str:
primary_action = get_action_with_primary_id(payload)
archived = primary_action["changes"]["archived"]
if archived["new"]:
action = "archived"
else:
action = "unarchived"
kwargs = {
"entity": entity,
"name_template": get_name_template(entity).format(
name=primary_action["name"],
app_url=primary_action.get("app_url")
),
"action": action,
}
return ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: str) -> str:
primary_action = get_action_with_primary_id(payload)
kwargs = {
"task_description": primary_action["description"],
"action": action,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["action"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["action"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = "{} points".format(new)
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]:
ref = {} # type: Dict[str, Any]
for reference in payload['references']:
if reference['id'] == ref_id:
ref = reference
return ref
def get_story_create_github_entity_body(payload: Dict[str, Any],
entity: str) -> str:
action = get_action_with_primary_id(payload)
story = {} # type: Dict[str, Any]
for a in payload['actions']:
if (a['entity_type'] == 'story' and
a['changes'].get('workflow_state_id') is not None):
story = a
new_state_id = story['changes']['workflow_state_id']['new']
old_state_id = story['changes']['workflow_state_id']['old']
new_state = get_reference_by_id(payload, new_state_id)['name']
old_state = get_reference_by_id(payload, old_state_id)['name']
kwargs = {
'name_template': STORY_NAME_TEMPLATE.format(**story),
'name': action.get('number') if entity == 'pull-request' else action.get('name'),
'url': action['url'],
'new': new_state,
'old': old_state,
}
template = STORY_GITHUB_PR_TEMPLATE if entity == 'pull-request' else STORY_GITHUB_BRANCH_TEMPLATE
return template.format(**kwargs)
def get_story_update_attachment_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update({
"type": ref["entity_type"],
"file_name": ref["name"],
})
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_label_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when a label is removed, ignore it
if not label_ids_added:
return None
label_id = label_ids_added[0]
label_name = ''
for action in payload["actions"]:
if action['id'] == label_id:
label_name = action.get('name', '')
if not label_name:
for reference in payload["references"]:
if reference["id"] == label_id:
label_name = reference.get('name', '')
kwargs.update({"label_name": label_name})
return STORY_LABEL_TEMPLATE.format(**kwargs)
def get_story_update_project_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update({"new": ref["name"]})
if ref["id"] == old_project_id:
kwargs.update({"old": ref["name"]})
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"]
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_story_update_owner_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"]
)
}
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
def get_entity_name(payload: Dict[str, Any], entity: Optional[str]=None) -> Optional[str]:
action = get_action_with_primary_id(payload)
name = action.get("name")
if name is None or action['entity_type'] == 'branch':
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
EVENT_BODY_FUNCTION_MAPPER = {
"story_update_archived": partial(get_update_archived_body, entity='story'),
"epic_update_archived": partial(get_update_archived_body, entity='epic'),
"story_create": get_story_create_body,
"pull-request_create": partial(get_story_create_github_entity_body, entity='pull-request'),
"branch_create": partial(get_story_create_github_entity_body, entity='branch'),
"story_delete": get_delete_body,
"epic_delete": get_delete_body,
"story-task_create": partial(get_story_task_body, action="added to"),
"story-task_delete": partial(get_story_task_body, action="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_owner": get_story_update_owner_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity='epic'),
"story-comment_create": partial(get_comment_added_body, entity='story'),
"epic_update_description": partial(get_update_description_body, entity='epic'),
"story_update_description": partial(get_update_description_body, entity='story'),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity='epic'),
"story_update_name": partial(get_update_name_body, entity='story'),
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity='story'),
"pull-request": partial(get_entity_name, entity='story'),
"branch": partial(get_entity_name, entity='story'),
"story-comment": partial(get_entity_name, entity='story'),
"story-task": partial(get_entity_name, entity='story'),
"epic": partial(get_entity_name, entity='epic'),
"epic-comment": partial(get_entity_name, entity='epic'),
}
IGNORED_EVENTS = {
'story-comment_update',
}
@api_key_only_webhook_view('ClubHouse')
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest, user_profile: UserProfile,
payload: Optional[Dict[str, Any]]=REQ(argument_type='body')
) -> HttpResponse:
# Clubhouse has a tendency to send empty POST requests to
# third-party endpoints. It is unclear as to which event type
# such requests correspond to. So, it is best to ignore such
# requests for now.
if payload is None:
return json_success()
event = get_event(payload)
if event is None:
return json_success()
body_func = EVENT_BODY_FUNCTION_MAPPER.get(event) # type: Any
topic_func = get_topic_function_based_on_type(payload)
if body_func is None or topic_func is None:
raise UnexpectedWebhookEventType('Clubhouse', event)
topic = topic_func(payload)
body = body_func(payload)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
| {
"repo_name": "tommyip/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "1",
"size": "20347",
"license": "apache-2.0",
"hash": -131021427025417120,
"line_mean": 36.3339449541,
"line_max": 105,
"alpha_frac": 0.6029881555,
"autogenerated": false,
"ratio": 3.548482734565748,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9644676539064156,
"avg_score": 0.0013588702003184308,
"num_lines": 545
} |
from functools import partial
from typing import Any, Dict, Optional
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
EPIC_NAME_TEMPLATE = "**{name}**"
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
COMMENT_ADDED_TEMPLATE = "New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
NEW_DESC_ADDED_TEMPLATE = "New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
DESC_CHANGED_TEMPLATE = ("Description for the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
STATE_CHANGED_TEMPLATE = "State of the {entity} {name_template} was changed from **{old}** to **{new}**."
NAME_CHANGED_TEMPLATE = ("The name of the {entity} {name_template} was changed from:\n"
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```")
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {action}."
STORY_TASK_TEMPLATE = "Task **{task_description}** was {action} the story {name_template}."
STORY_TASK_COMPLETED_TEMPLATE = "Task **{task_description}** ({name_template}) was completed. :tada:"
STORY_ADDED_REMOVED_EPIC_TEMPLATE = ("The story {story_name_template} was {action} the"
" epic {epic_name_template}.")
STORY_EPIC_CHANGED_TEMPLATE = ("The story {story_name_template} was moved from {old_epic_name_template}"
" to {new_epic_name_template}.")
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
FILE_ATTACHMENT_TEMPLATE = "A {type} attachment `{file_name}` was added to the story {name_template}."
STORY_LABEL_TEMPLATE = "The label **{label_name}** was added to the story {name_template}."
STORY_UPDATE_PROJECT_TEMPLATE = ("The story {name_template} was moved from"
" the **{old}** project to **{new}**.")
STORY_UPDATE_TYPE_TEMPLATE = ("The type of the story {name_template} was changed"
" from **{old_type}** to **{new_type}**.")
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
STORY_GITHUB_PR_TEMPLATE = ("New GitHub PR [#{name}]({url}) opened for story"
" {name_template} ({old} -> {new}).")
STORY_GITHUB_BRANCH_TEMPLATE = ("New GitHub branch [{name}]({url})"
" associated with story {name_template} ({old} -> {new}).")
def get_action_with_primary_id(payload: Dict[str, Any]) -> Dict[str, Any]:
for action in payload["actions"]:
if payload["primary_id"] == action["id"]:
action_with_primary_id = action
return action_with_primary_id
def get_event(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
event = "{}_{}".format(action["entity_type"], action["action"])
if event in IGNORED_EVENTS:
return None
changes = action.get("changes")
if changes is not None:
if changes.get("description") is not None:
event = "{}_{}".format(event, "description")
elif changes.get("state") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("workflow_state_id") is not None:
event = "{}_{}".format(event, "state")
elif changes.get("name") is not None:
event = "{}_{}".format(event, "name")
elif changes.get("archived") is not None:
event = "{}_{}".format(event, "archived")
elif changes.get("complete") is not None:
event = "{}_{}".format(event, "complete")
elif changes.get("epic_id") is not None:
event = "{}_{}".format(event, "epic")
elif changes.get("estimate") is not None:
event = "{}_{}".format(event, "estimate")
elif changes.get("file_ids") is not None:
event = "{}_{}".format(event, "attachment")
elif changes.get("label_ids") is not None:
event = "{}_{}".format(event, "label")
elif changes.get("project_id") is not None:
event = "{}_{}".format(event, "project")
elif changes.get("story_type") is not None:
event = "{}_{}".format(event, "type")
elif changes.get("owner_ids") is not None:
event = "{}_{}".format(event, "owner")
return event
def get_topic_function_based_on_type(payload: Dict[str, Any]) -> Any:
entity_type = get_action_with_primary_id(payload)["entity_type"]
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
def get_delete_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
return DELETE_TEMPLATE.format(**action)
def get_story_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
if action.get("epic_id") is None:
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
kwargs = action
else:
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
kwargs = {
"name": action["name"],
"app_url": action["app_url"],
}
epic_id = action["epic_id"]
refs = payload["references"]
for ref in refs:
if ref["id"] == epic_id:
kwargs["epic_name"] = ref["name"]
return message.format(**kwargs)
def get_epic_create_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
message = "New epic **{name}**({state}) was created."
return message.format(**action)
def get_comment_added_body(payload: Dict[str, Any], entity: str) -> str:
actions = payload["actions"]
kwargs = {"entity": entity}
for action in actions:
if action["id"] == payload["primary_id"]:
kwargs["text"] = action["text"]
elif action["entity_type"] == entity:
name_template = get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
)
kwargs["name_template"] = name_template
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_update_description_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
desc = action["changes"]["description"]
kwargs = {
"entity": entity,
"new": desc["new"],
"old": desc["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
if kwargs["new"] and kwargs["old"]:
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
elif kwargs["new"]:
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
else:
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
return body
def get_epic_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
state = action["changes"]["state"]
kwargs = {
"entity": "epic",
"new": state["new"],
"old": state["old"],
"name_template": EPIC_NAME_TEMPLATE.format(name=action["name"]),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_story_update_state_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
workflow_state_id = action["changes"]["workflow_state_id"]
references = payload["references"]
state = {}
for ref in references:
if ref["id"] == workflow_state_id["new"]:
state["new"] = ref["name"]
if ref["id"] == workflow_state_id["old"]:
state["old"] = ref["name"]
kwargs = {
"entity": "story",
"new": state["new"],
"old": state["old"],
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return STATE_CHANGED_TEMPLATE.format(**kwargs)
def get_update_name_body(payload: Dict[str, Any], entity: str) -> str:
action = get_action_with_primary_id(payload)
name = action["changes"]["name"]
kwargs = {
"entity": entity,
"new": name["new"],
"old": name["old"],
"name_template": get_name_template(entity).format(
name=action["name"],
app_url=action.get("app_url"),
),
}
return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_update_archived_body(payload: Dict[str, Any], entity: str) -> str:
primary_action = get_action_with_primary_id(payload)
archived = primary_action["changes"]["archived"]
if archived["new"]:
action = "archived"
else:
action = "unarchived"
kwargs = {
"entity": entity,
"name_template": get_name_template(entity).format(
name=primary_action["name"],
app_url=primary_action.get("app_url"),
),
"action": action,
}
return ARCHIVED_TEMPLATE.format(**kwargs)
def get_story_task_body(payload: Dict[str, Any], action: str) -> str:
primary_action = get_action_with_primary_id(payload)
kwargs = {
"task_description": primary_action["description"],
"action": action,
}
for a in payload["actions"]:
if a["entity_type"] == "story":
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=a["name"],
app_url=a["app_url"],
)
return STORY_TASK_TEMPLATE.format(**kwargs)
def get_story_task_completed_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"task_description": action["description"],
}
story_id = action["story_id"]
for ref in payload["references"]:
if ref["id"] == story_id:
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
name=ref["name"],
app_url=ref["app_url"],
)
if action["changes"]["complete"]["new"]:
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
else:
return None
def get_story_update_epic_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_id = action["changes"]["epic_id"].get("new")
old_id = action["changes"]["epic_id"].get("old")
for ref in payload["references"]:
if ref["id"] == new_id:
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if ref["id"] == old_id:
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
name=ref["name"])
if new_id and old_id:
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
elif new_id:
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
kwargs["action"] = "added to"
else:
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
kwargs["action"] = "removed from"
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def get_story_update_estimate_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"story_name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new = action["changes"]["estimate"].get("new")
if new:
kwargs["estimate"] = f"{new} points"
else:
kwargs["estimate"] = "*Unestimated*"
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]:
ref: Dict[str, Any] = {}
for reference in payload['references']:
if reference['id'] == ref_id:
ref = reference
return ref
def get_story_create_github_entity_body(payload: Dict[str, Any],
entity: str) -> str:
action = get_action_with_primary_id(payload)
story: Dict[str, Any] = {}
for a in payload['actions']:
if (a['entity_type'] == 'story' and
a['changes'].get('workflow_state_id') is not None):
story = a
new_state_id = story['changes']['workflow_state_id']['new']
old_state_id = story['changes']['workflow_state_id']['old']
new_state = get_reference_by_id(payload, new_state_id)['name']
old_state = get_reference_by_id(payload, old_state_id)['name']
kwargs = {
'name_template': STORY_NAME_TEMPLATE.format(**story),
'name': action.get('number') if entity == 'pull-request' else action.get('name'),
'url': action['url'],
'new': new_state,
'old': old_state,
}
template = STORY_GITHUB_PR_TEMPLATE if entity == 'pull-request' else STORY_GITHUB_BRANCH_TEMPLATE
return template.format(**kwargs)
def get_story_update_attachment_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
file_ids_added = action["changes"]["file_ids"].get("adds")
# If this is a payload for when an attachment is removed, ignore it
if not file_ids_added:
return None
file_id = file_ids_added[0]
for ref in payload["references"]:
if ref["id"] == file_id:
kwargs.update(
type=ref["entity_type"],
file_name=ref["name"],
)
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
def get_story_label_body(payload: Dict[str, Any]) -> Optional[str]:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
label_ids_added = action["changes"]["label_ids"].get("adds")
# If this is a payload for when a label is removed, ignore it
if not label_ids_added:
return None
label_id = label_ids_added[0]
label_name = ''
for action in payload["actions"]:
if action['id'] == label_id:
label_name = action.get('name', '')
if not label_name:
for reference in payload["references"]:
if reference["id"] == label_id:
label_name = reference.get('name', '')
kwargs.update(label_name=label_name)
return STORY_LABEL_TEMPLATE.format(**kwargs)
def get_story_update_project_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
new_project_id = action["changes"]["project_id"]["new"]
old_project_id = action["changes"]["project_id"]["old"]
for ref in payload["references"]:
if ref["id"] == new_project_id:
kwargs.update(new=ref["name"])
if ref["id"] == old_project_id:
kwargs.update(old=ref["name"])
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
def get_story_update_type_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
"new_type": action["changes"]["story_type"]["new"],
"old_type": action["changes"]["story_type"]["old"],
}
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
def get_story_update_owner_body(payload: Dict[str, Any]) -> str:
action = get_action_with_primary_id(payload)
kwargs = {
"name_template": STORY_NAME_TEMPLATE.format(
name=action["name"],
app_url=action["app_url"],
),
}
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
def get_entity_name(payload: Dict[str, Any], entity: Optional[str]=None) -> Optional[str]:
action = get_action_with_primary_id(payload)
name = action.get("name")
if name is None or action['entity_type'] == 'branch':
for action in payload["actions"]:
if action["entity_type"] == entity:
name = action["name"]
if name is None:
for ref in payload["references"]:
if ref["entity_type"] == entity:
name = ref["name"]
return name
def get_name_template(entity: str) -> str:
if entity == "story":
return STORY_NAME_TEMPLATE
return EPIC_NAME_TEMPLATE
EVENT_BODY_FUNCTION_MAPPER = {
"story_update_archived": partial(get_update_archived_body, entity='story'),
"epic_update_archived": partial(get_update_archived_body, entity='epic'),
"story_create": get_story_create_body,
"pull-request_create": partial(get_story_create_github_entity_body, entity='pull-request'),
"branch_create": partial(get_story_create_github_entity_body, entity='branch'),
"story_delete": get_delete_body,
"epic_delete": get_delete_body,
"story-task_create": partial(get_story_task_body, action="added to"),
"story-task_delete": partial(get_story_task_body, action="removed from"),
"story-task_update_complete": get_story_task_completed_body,
"story_update_epic": get_story_update_epic_body,
"story_update_estimate": get_story_update_estimate_body,
"story_update_attachment": get_story_update_attachment_body,
"story_update_label": get_story_label_body,
"story_update_owner": get_story_update_owner_body,
"story_update_project": get_story_update_project_body,
"story_update_type": get_story_update_type_body,
"epic_create": get_epic_create_body,
"epic-comment_create": partial(get_comment_added_body, entity='epic'),
"story-comment_create": partial(get_comment_added_body, entity='story'),
"epic_update_description": partial(get_update_description_body, entity='epic'),
"story_update_description": partial(get_update_description_body, entity='story'),
"epic_update_state": get_epic_update_state_body,
"story_update_state": get_story_update_state_body,
"epic_update_name": partial(get_update_name_body, entity='epic'),
"story_update_name": partial(get_update_name_body, entity='story'),
}
EVENT_TOPIC_FUNCTION_MAPPER = {
"story": partial(get_entity_name, entity='story'),
"pull-request": partial(get_entity_name, entity='story'),
"branch": partial(get_entity_name, entity='story'),
"story-comment": partial(get_entity_name, entity='story'),
"story-task": partial(get_entity_name, entity='story'),
"epic": partial(get_entity_name, entity='epic'),
"epic-comment": partial(get_entity_name, entity='epic'),
}
IGNORED_EVENTS = {
'story-comment_update',
}
@webhook_view('ClubHouse')
@has_request_variables
def api_clubhouse_webhook(
request: HttpRequest, user_profile: UserProfile,
payload: Optional[Dict[str, Any]]=REQ(argument_type='body'),
) -> HttpResponse:
# Clubhouse has a tendency to send empty POST requests to
# third-party endpoints. It is unclear as to which event type
# such requests correspond to. So, it is best to ignore such
# requests for now.
if payload is None:
return json_success()
event = get_event(payload)
if event is None:
return json_success()
body_func: Any = EVENT_BODY_FUNCTION_MAPPER.get(event)
topic_func = get_topic_function_based_on_type(payload)
if body_func is None or topic_func is None:
raise UnsupportedWebhookEventType(event)
topic = topic_func(payload)
body = body_func(payload)
if topic and body:
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
| {
"repo_name": "showell/zulip",
"path": "zerver/webhooks/clubhouse/view.py",
"copies": "3",
"size": "20302",
"license": "apache-2.0",
"hash": 1831216648865546000,
"line_mean": 36.3198529412,
"line_max": 105,
"alpha_frac": 0.6035858536,
"autogenerated": false,
"ratio": 3.55489406408685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.565847991768685,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, NamedTuple, Optional
from . import doc
class Response(
NamedTuple(
"Response", [("code", int), ("model", Any), ("description", Optional[str])]
)
):
"""
HTTP status code - returned object model pair with optional description.
If `model` is a class that has a docstring, the its docstring will be used
as description if `description` is not set.
"""
def __new__(cls, code: int, model: Any, description: Optional[str] = None):
return super().__new__(cls, code, model, description)
class API:
"""
Decorator factory class for documenting routes using `sanic_openapi` and optionally
registering them in a `sanic` application or blueprint.
Supported class attribute names match the corresponding `sanic_openapi.doc` decorator's
name and attribute values work exactly as if they were passed to the given decorator
unless explicitly documented otherwise. The supported class attributes (all of which
are optional) are as follows:
- `summary`: Its value should be the short summary of the route. If neither `summary`
nor `description` is specified, then the first paragraph of the API class'
documentation will be used instead. You may also set it to `None` to disable
automatic `summary` and `description` generation.
- `description`: A longer description of the route. If neither `summary` nor
`description` is specified, then the API class' documentation will be used
except its first paragraph that serves as the default summary. You may also
set it to `None` to disable automatic `summary` and `description` generation.
- `exclude`: Whether to exclude the route (and related models) from the API documentation.
- `consumes`: The model of the data the API route consumes. If `consumes` is a class
that has a docstring, then the docstring will be used as the description of th data.
- `consumes_content_type`: The content type of the data the API route consumes.
- `consumes_location`: The location where the data is expected (`query` or `body`).
- `consumes_required`: Whether the consumed data is required.
- `produces`: The model of the data the API route produces.
- `produces_content_type`: The content type of the data the API route produces.
- `produces_description`: The description of the data the API route produces. If
not specified but `produces` is a class that has a docstring, then the docstring
will be used as the default description.
- `response`: A `Response` instance or a sequence of `Response` instances that describe
the route's response for different HTTP status codes. The value of the `produces`
attribute corresponds to HTTP 200, you don't have to specify that here.
- `tag`: The tags/groups the API route belongs to.
Example:
```Python
class JSONConsumerAPI(API):
consumes_content_type = "application/json"
consumes_location = "body"
consumes_required = True
class JSONProducerAPI(API):
produces_content_type = "application/json"
class MyAPI(JSONConsumerAPI, JSONProducerAPI):
\"\"\"
Route *summary* in first paragraph.
First paragraph of route *description*.
Second paragraph of route *description*.
\"\"\"
class consumes:
foo = str
bar = str
class produces:
result = bool
# Document and register the route at once.
@MyAPI.post(app, "/my_route")
def my_route(request: Request):
return {"result": True}
# Or simply document a route.
@app.post("/my_route")
@MyAPI
def my_route(request: Request):
return {"result": True}
```
Additionally, you may specify a `decorators` class attribute, whose value must be a
sequence of decorators to apply on the decorated routes. These decorators will be
applied *before* the `sanic_openapi` decorators - and the `sanic` routing decorators
if the routing decorators provided by this class are used - in *reverse* order. It
means that the following cases are equivalent:
```Python
class Data(API):
class consumes:
stg = str
class DecoratedData(Data):
decorators = (first, second)
@DecoratedData.get(app, "/data")
def data_all_in_one(request: Request):
return "data"
@app.get("/data")
@DecoratedData
def data_doc_and_decorators_in_one(request: Request):
return "data"
@Data.get(app, "/data")
@first
@second
def data_routing_and_doc_in_one(request: Request):
return "data"
@app.get("/data")
@Data
@first
@second
def data(request: Request):
return "data"
```
It is possible to override all the described class attributes on a per decorator basis
simply by passing the desired custom value to the decorator as a keyword argument:
```Python
class JSONConsumerAPI(API):
consumes_content_type = "application/json"
consumes_location = "body"
consumes_required = True
class consumes:
foo = str
bar = str
# The consumed data is required.
@JSONConsumerAPI.post(app, "/data")
def data(request: Request):
return "data"
# The consumed data is optional.
@app.post("/data_optional")
@JSONConsumerAPI(consumes_required=False)
def data_consumed_not_required(request: Request):
return "data"
```
"""
__MISSING = "__MISSING"
def __new__(cls, func=None, **kwargs):
"""
Decorator that automaticaly documents the decorated route and returns the decorated method.
Arguments:
func: The decorated request handler function.
"""
if func is None:
return partial(cls, **kwargs)
def get_attribute(obj, name, default):
"""
Specialized attribute getter that checks every attribute name in
`kwargs` first to allow inline overrides of attributes.
Arguments:
obj: The object to get the attribute value from.
name: The name of the attribute to look up.
default: The default value to return if the `name` attribute doesn't exist.
"""
return kwargs[name] if name in kwargs else getattr(obj, name, default)
# The _add_decorators() call must precede everything else.
func = cls._add_decorators(func, get_attribute)
func = cls._add_base_data(func, get_attribute)
func = cls._add_consumes(func, get_attribute)
func = cls._add_produces(func, get_attribute)
func = cls._add_responses(func, get_attribute)
func = cls._add_tags(func, get_attribute)
return func
@classmethod
def _add_base_data(cls, func, get_attribute):
"""
Adds basic route documentation such as summary and description.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
summary = get_attribute(cls, "summary", cls.__MISSING)
description = get_attribute(cls, "description", cls.__MISSING)
# If there was no explicit summary or description, determine them from
# the class documentation if that exists.
if summary == cls.__MISSING and description == cls.__MISSING and cls.__doc__:
class_doc_parts = cls.__doc__.strip().split("\n\n")
if len(class_doc_parts) > 0:
summary = class_doc_parts[0].strip()
if len(class_doc_parts) > 1:
# Preserve paragraphs.
description = "<br><br>".join(
part.strip() for part in class_doc_parts[1:]
)
return doc.route(
summary=summary if summary != cls.__MISSING else None,
description=description if description != cls.__MISSING else None,
exclude=cls._exclude(get_attribute),
)(func)
@classmethod
def _add_consumes(cls, func, get_attribute):
"""
Adds the documentation of the consumed data to the route.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
value = get_attribute(cls, "consumes", None)
# Don't register the consumed model if the route is excluded.
if value is None or cls._exclude(get_attribute):
return func
# If value is a type (class), convert it to a doc.Object to be able to specify
# its name to avoid model name conflicts and have a more readable doc.
if isinstance(value, type):
value = doc.Object(
value, object_name=cls.__name__ + "Consumes", description=value.__doc__
)
# Use the same default values as in doc.consumes().
return doc.consumes(
value,
content_type=get_attribute(cls, "consumes_content_type", None),
location=get_attribute(cls, "consumes_location", "query"),
required=get_attribute(cls, "consumes_required", False),
)(func)
@classmethod
def _add_decorators(cls, func, get_attribute):
"""
Adds the custom route decorators from the `decorators` class attribute to the route.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
decorators = get_attribute(cls, "decorators", None)
if decorators is not None:
for decorator in reversed(decorators):
func = decorator(func)
return func
@classmethod
def _add_produces(cls, func, get_attribute):
"""
Adds the documentation of the produced data to the route.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
value = get_attribute(cls, "produces", None)
# Don't register the produced model if the route is excluded.
if value is None or cls._exclude(get_attribute):
return func
# If value is a type (class), convert it to a doc.Object to be able to specify
# its name to avoid model name conflicts and have a more readable doc.
if isinstance(value, type):
produces_doc = value.__doc__.strip() if value.__doc__ else None
produces_description = get_attribute(
cls, "produces_description", produces_doc
)
value = doc.Object(
value, object_name=cls.__name__ + "Produces", description=produces_doc
)
else:
produces_description = get_attribute(cls, "produces_description", None)
# User the same default values as in doc.produces().
return doc.produces(
value,
content_type=get_attribute(cls, "produces_content_type", None),
description=produces_description,
)(func)
@classmethod
def _add_response(cls, func, response):
"""
Adds the documentation of the behavior defined by the given `Response`
instance to the route.
Arguments:
func: The decorated request handler function.
response: The `Response` instance that defines the route's behavior.
"""
description = response.description
if description is None and isinstance(response.model, type):
description = (
response.model.__doc__.strip() if response.model.__doc__ else None
)
return doc.response(response.code, response.model, description=description)(
func
)
@classmethod
def _add_responses(cls, func, get_attribute):
"""
Adds the documentation of responses corresponding to specific HTTP status
codes to the route.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
response = get_attribute(cls, "response", None)
if response is None:
return func
if isinstance(response, Response):
return cls._add_response(func, response)
if isinstance(response, (list, tuple)):
for item in response:
func = cls._add_response(func, item)
return func
@classmethod
def _add_tags(cls, func, get_attribute):
"""
Adds tags to the route.
Arguments:
func: The decorated request handler function.
get_attribute: Attribute getter function to use.
"""
value = get_attribute(cls, "tag", None)
if isinstance(value, str):
func = doc.tag(value)(func)
elif isinstance(value, (list, tuple)):
for item in value:
func = doc.tag(item)(func)
return func
@classmethod
def _exclude(cls, get_attribute):
"""
Returns whether the route should be excluded from the documentation.
Arguments:
get_attribute: Attribute getter function to use.
"""
return get_attribute(cls, "exclude", None)
@classmethod
def delete(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `DELETE` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `delete()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.delete(uri, **kwargs)(cls(func))
return inner
@classmethod
def get(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `GET` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `get()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.get(uri, **kwargs)(cls(func))
return inner
@classmethod
def head(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `HEAD` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `head()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.head(uri, **kwargs)(cls(func))
return inner
@classmethod
def options(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `OPTIONS` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `options()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.options(uri, **kwargs)(cls(func))
return inner
@classmethod
def patch(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `PATCH` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `patch()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.patch(uri, **kwargs)(cls(func))
return inner
@classmethod
def post(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `POST` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `post()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.post(uri, **kwargs)(cls(func))
return inner
@classmethod
def put(cls, app, uri, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
The decorated method will be registered for `PUT` requests.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `put()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.put(uri, **kwargs)(cls(func))
return inner
@classmethod
def route(cls, app, uri, *, methods, **kwargs):
"""
Decorator that registers the decorated route in the given `sanic` application or
blueprint with the given URI, and also documents its API using `sanic_openapi`.
Keyword arguments that are not listed in arguments section will be passed on to the
`sanic` application's or blueprint's `route()` method as they are.
Arguments:
app: The `sanic` application or blueprint where the route should be registered.
uri: The URI the route should be accessible at.
"""
def inner(func):
return app.route(uri, methods=methods, **kwargs)(cls(func))
return inner
| {
"repo_name": "channelcat/sanic-openapi",
"path": "sanic_openapi/api.py",
"copies": "1",
"size": "19691",
"license": "mit",
"hash": 1297248265787035000,
"line_mean": 35.9437148218,
"line_max": 99,
"alpha_frac": 0.6228733939,
"autogenerated": false,
"ratio": 4.6650082918739635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5787881685773963,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Optional
from django.db.models import Model
from graphene_django.filter import DjangoFilterConnectionField
from graphql import ResolveInfo
from graphene_django import __version__
from graphene_permissions.permissions import AllowAny
from packaging import version
class AuthNode:
"""
Permission mixin for queries (nodes).
Allows for simple configuration of access to nodes via class system.
"""
permission_classes = (AllowAny,)
@classmethod
def get_node(cls, info: ResolveInfo, id: str) -> Optional[Model]:
if all((perm.has_node_permission(info, id) for perm in cls.permission_classes)):
try:
object_instance = cls._meta.model.objects.get(pk=id) # type: ignore
except cls._meta.model.DoesNotExist: # type: ignore
object_instance = None
return object_instance
else:
return None
class AuthMutation:
"""
Permission mixin for ClientIdMutation.
"""
permission_classes = (AllowAny,)
@classmethod
def has_permission(cls, root: Any, info: ResolveInfo, input: dict) -> bool:
return all(
(
perm.has_mutation_permission(root, info, input)
for perm in cls.permission_classes
)
)
class AuthFilterPre270(DjangoFilterConnectionField):
"""
Custom ConnectionField for permission system.
"""
permission_classes = (AllowAny,)
@classmethod
def has_permission(cls, info: ResolveInfo) -> bool:
return all(
(perm.has_filter_permission(info) for perm in cls.permission_classes)
)
@classmethod
def connection_resolver(
cls,
resolver,
connection,
default_manager,
max_limit,
enforce_first_or_last,
filterset_class,
filtering_args,
root,
info,
**args
):
if not cls.has_permission(info):
return super(DjangoFilterConnectionField, cls).connection_resolver(
resolver, connection, default_manager.none(), max_limit, enforce_first_or_last,
root, info, **args,
)
filter_kwargs = {k: v for k, v in args.items() if k in filtering_args}
qs = filterset_class(
data=filter_kwargs, queryset=default_manager.get_queryset()
).qs
return super(DjangoFilterConnectionField, cls).connection_resolver(
resolver,
connection,
qs,
max_limit,
enforce_first_or_last,
filterset_class,
filtering_args,
**args,
)
class AuthFilterPost270(DjangoFilterConnectionField):
"""
Custom ConnectionField for permission system.
"""
permission_classes = (AllowAny,)
@classmethod
def has_permission(cls, info: ResolveInfo) -> bool:
return all(
(perm.has_filter_permission(info) for perm in cls.permission_classes)
)
@classmethod
def connection_resolver(
cls,
resolver,
connection,
default_manager,
queryset_resolver,
max_limit,
enforce_first_or_last,
root,
info,
**args
):
if not cls.has_permission(info):
on_resolve = partial(cls.resolve_connection, connection, args)
return on_resolve([])
return super(DjangoFilterConnectionField, cls).connection_resolver(
resolver,
connection,
default_manager,
queryset_resolver,
max_limit,
enforce_first_or_last,
root,
info,
**args,
)
if version.parse(__version__) < version.parse("2.7.0"):
AuthFilter = AuthFilterPre270
else:
AuthFilter = AuthFilterPost270
| {
"repo_name": "redzej/graphene-permissions",
"path": "graphene_permissions/mixins.py",
"copies": "1",
"size": "3901",
"license": "mit",
"hash": 1662602884201816000,
"line_mean": 25.7191780822,
"line_max": 95,
"alpha_frac": 0.5952319918,
"autogenerated": false,
"ratio": 4.453196347031963,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5548428338831963,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Any, Optional, Sequence, Tuple
import numpy as np
from gym3.types import Discrete, Real, TensorType, ValType, multimap
def concat(xs: Sequence[Any], axis: int = 0) -> Any:
"""
Concatenate the (leaf) arrays from xs
:param xs: list of trees with the same shape, where the leaf values are numpy arrays
:param axis: axis to concatenate along
"""
return multimap(lambda *xs: np.concatenate(xs, axis=axis), *xs)
def stack(xs: Sequence[Any], axis: int = 0) -> Any:
"""
Stack the (leaf) arrays from xs
:param xs: list of trees with the same shape, where the leaf values are numpy arrays
:param axis: axis to stack along
"""
return multimap(lambda *xs: np.stack(xs, axis=axis), *xs)
def split(x: Any, sections: Sequence[int]) -> Sequence[Any]:
"""
Split the (leaf) arrays from the tree x
Examples:
split([1,2,3,4], [1,2,3,4]) => [[1], [2], [3], [4]]
split([1,2,3,4], [1,3,4]) => [[1], [2, 3], [4]]
:param x: a tree where the leaf values are numpy arrays
:param sections: list of indices to split at (not sizes of each split)
:returns: list of trees with length `len(sections)` with the same shape as x
where each leaf is the corresponding section of the leaf in x
"""
result = []
start = 0
for end in sections:
select_tree = multimap(lambda arr: arr[start:end], x)
start = end
result.append(select_tree)
return result
def dtype(tt: TensorType) -> np.dtype:
"""
:param tt: TensorType to get dtype for
:returns: numpy.dtype to use for tt
"""
assert isinstance(tt, TensorType)
return np.dtype(tt.eltype.dtype_name)
def zeros(vt: ValType, bshape: Tuple) -> Any:
"""
:param vt: ValType to create zeros for
:param bshape: batch shape to prepend to the shape of each numpy array created by this function
:returns: tree of numpy arrays matching vt
"""
return multimap(
lambda subdt: np.zeros(bshape + subdt.shape, dtype=dtype(subdt)), vt
)
def _sample_tensor(
tt: TensorType, bshape: Tuple, rng: Optional[np.random.RandomState] = None
) -> np.ndarray:
"""
:param tt: TensorType to create sample for
:param bshape: batch shape to prepend to the shape of each numpy array created by this function
:param rng: np.random.RandomState to use for sampling
:returns: numpy array matching tt
"""
if rng is None:
rng = np.random
assert isinstance(tt, TensorType)
eltype = tt.eltype
shape = bshape + tt.shape
if isinstance(eltype, Discrete):
return rng.randint(eltype.n, size=shape, dtype=dtype(tt))
elif isinstance(eltype, Real):
return rng.randn(*shape).astype(dtype(tt))
else:
raise ValueError(f"Expected ScalarType, got {type(eltype)}")
def sample(
vt: ValType, bshape: Tuple, rng: Optional[np.random.RandomState] = None
) -> Any:
"""
:param vt: ValType to create sample for
:param bshape: batch shape to prepend to the shape of each numpy array created by this function
:param rng: np.random.RandomState to use for sampling
:returns: tree of numpy arrays matching vt
"""
return multimap(partial(_sample_tensor, bshape=bshape, rng=rng), vt)
| {
"repo_name": "openai/gym3",
"path": "gym3/types_np.py",
"copies": "1",
"size": "3316",
"license": "mit",
"hash": 4958617241294901000,
"line_mean": 29.7037037037,
"line_max": 99,
"alpha_frac": 0.652291918,
"autogenerated": false,
"ratio": 3.6280087527352296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.97772998564239,
"avg_score": 0.0006001628622657892,
"num_lines": 108
} |
from functools import partial
from typing import Callable, Iterable, Tuple
import pywren_ibm_cloud as pywren
from pywren_ibm_cloud.executor import FunctionExecutor
from rechunker.executors.util import chunk_keys, split_into_direct_copies
from rechunker.types import CopySpec, CopySpecExecutor, ReadableArray, WriteableArray
# PywrenExecutor represents delayed execution tasks as functions that require
# a FunctionExecutor.
Task = Callable[[FunctionExecutor], None]
class PywrenExecutor(CopySpecExecutor[Task]):
"""An execution engine based on Pywren.
Supports zarr arrays as inputs. Outputs must be zarr arrays.
Any Pywren FunctionExecutor can be passed to the constructor. By default
a Pywren `local_executor` will be used
Execution plans for PywrenExecutor are functions that accept no arguments.
"""
def __init__(self, pywren_function_executor: FunctionExecutor = None):
self.pywren_function_executor = pywren_function_executor
def prepare_plan(self, specs: Iterable[CopySpec]) -> Task:
tasks = []
for spec in specs:
# Tasks for a single spec must be executed in series
spec_tasks = []
for direct_spec in split_into_direct_copies(spec):
spec_tasks.append(partial(_direct_array_copy, *direct_spec))
tasks.append(partial(_execute_in_series, spec_tasks))
# TODO: execute tasks for different specs in parallel
return partial(_execute_in_series, tasks)
def execute_plan(self, plan: Task, **kwargs):
if self.pywren_function_executor is None:
# No Pywren function executor specified, so use a local one, and shutdown after use
with pywren_local_function_executor() as pywren_function_executor:
plan(pywren_function_executor)
else:
plan(self.pywren_function_executor)
def pywren_local_function_executor():
return pywren.local_executor(
# Minimal config needed to avoid Pywren error if ~/.pywren_config is missing
config={"pywren": {"storage_bucket": "unused"}}
)
def _direct_array_copy(
source: ReadableArray,
target: WriteableArray,
chunks: Tuple[int, ...],
pywren_function_executor: FunctionExecutor,
) -> None:
"""Direct copy between arrays using Pywren for parallelism"""
iterdata = [(source, target, key) for key in chunk_keys(source.shape, chunks)]
def direct_copy(iterdata):
source, target, key = iterdata
target[key] = source[key]
futures = pywren_function_executor.map(direct_copy, iterdata)
pywren_function_executor.get_result(futures)
def _execute_in_series(
tasks: Iterable[Task], pywren_function_executor: FunctionExecutor
) -> None:
for task in tasks:
task(pywren_function_executor)
| {
"repo_name": "pangeo-data/rechunker",
"path": "rechunker/executors/pywren.py",
"copies": "1",
"size": "2812",
"license": "mit",
"hash": 1727004838163905500,
"line_mean": 35.5194805195,
"line_max": 95,
"alpha_frac": 0.6973684211,
"autogenerated": false,
"ratio": 3.883977900552486,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005955515882707549,
"num_lines": 77
} |
from functools import partial
from typing import Callable, List, Tuple
from graphql.utilities import build_schema
from graphql.validation import NoDeprecatedCustomRule
from .harness import assert_validation_errors
def build_assertions(
sdl_str: str,
) -> Tuple[Callable[[str], None], Callable[[str, List], None]]:
schema = build_schema(sdl_str)
assert_errors = partial(
assert_validation_errors, NoDeprecatedCustomRule, schema=schema
)
assert_valid = partial(assert_errors, errors=[])
return (
assert_valid,
assert_errors,
) # type: ignore
def describe_validate_no_deprecated():
def describe_no_deprecated_fields():
_assert_valid, _assert_errors = build_assertions(
"""
type Query {
normalField: String
deprecatedField: String @deprecated(reason: "Some field reason.")
}
"""
)
def ignores_fields_and_enum_values_that_are_not_deprecated():
_assert_valid(
"""
{
normalField
}
"""
)
def ignores_unknown_fields():
_assert_valid(
"""
{
unknownField
}
fragment UnknownFragment on UnknownType {
deprecatedField
}
"""
)
def reports_error_when_a_deprecated_field_is_selected():
message = (
"The field Query.deprecatedField is deprecated. Some field reason."
)
_assert_errors(
"""
{
deprecatedField
}
fragment QueryFragment on Query {
deprecatedField
}
""",
[
{
"message": message,
"locations": [(3, 19)],
},
{
"message": message,
"locations": [(7, 19)],
},
],
)
def describe_no_deprecated_arguments_on_fields():
_assert_valid, _assert_errors = build_assertions(
"""
type Query {
someField(
normalArg: String,
deprecatedArg: String @deprecated(reason: "Some arg reason."),
): String
}
"""
)
def ignores_arguments_that_are_not_deprecated():
_assert_valid(
"""
{
normalField(normalArg: "")
}
"""
)
def ignores_unknown_arguments():
_assert_valid(
"""
{
someField(unknownArg: "")
unknownField(deprecatedArg: "")
}
"""
)
def reports_error_when_a_deprecated_argument_is_used():
_assert_errors(
"""
{
someField(deprecatedArg: "")
}
""",
[
{
"message": "Field 'Query.someField' argument 'deprecatedArg'"
" is deprecated. Some arg reason.",
"locations": [(3, 31)],
}
],
)
def describe_no_deprecated_arguments_on_directives():
_assert_valid, _assert_errors = build_assertions(
"""
type Query {
someField: String
}
directive @someDirective(
normalArg: String,
deprecatedArg: String @deprecated(reason: "Some arg reason."),
) on FIELD
"""
)
def ignores_arguments_that_are_not_deprecated():
_assert_valid(
"""
{
someField @someDirective(normalArg: "")
}
"""
)
def ignores_unknown_arguments():
_assert_valid(
"""
{
someField @someDirective(unknownArg: "")
someField @unknownDirective(deprecatedArg: "")
}
"""
)
def reports_error_when_a_deprecated_argument_is_used():
_assert_errors(
"""
{
someField @someDirective(deprecatedArg: "")
}
""",
[
{
"message": "Directive '@someDirective' argument 'deprecatedArg'"
" is deprecated. Some arg reason.",
"locations": [(3, 44)],
}
],
)
def describe_no_deprecated_input_fields():
_assert_valid, _assert_errors = build_assertions(
"""
input InputType {
normalField: String
deprecatedField: String @deprecated(reason: "Some input field reason.")
}
type Query {
someField(someArg: InputType): String
}
directive @someDirective(someArg: InputType) on FIELD
"""
)
def ignores_input_fields_that_are_not_deprecated():
_assert_valid(
"""
{
someField(
someArg: { normalField: "" }
) @someDirective(someArg: { normalField: "" })
}
"""
)
def ignores_unknown_input_fields():
_assert_valid(
"""
{
someField(
someArg: { unknownField: "" }
)
someField(
unknownArg: { unknownField: "" }
)
unknownField(
unknownArg: { unknownField: "" }
)
}
"""
)
def reports_error_when_a_deprecated_input_field_is_used():
message = (
"The input field InputType.deprecatedField is deprecated."
" Some input field reason."
)
_assert_errors(
"""
{
someField(
someArg: { deprecatedField: "" }
) @someDirective(someArg: { deprecatedField: "" })
}
""",
[
{"message": message, "locations": [(4, 32)]},
{"message": message, "locations": [(5, 47)]},
],
)
def describe_no_deprecated_enum_values():
_assert_valid, _assert_errors = build_assertions(
"""
enum EnumType {
NORMAL_VALUE
DEPRECATED_VALUE @deprecated(reason: "Some enum reason.")
}
type Query {
someField(enumArg: EnumType): String
}
"""
)
def ignores_enum_values_that_are_not_deprecated():
_assert_valid(
"""
{
normalField(enumArg: NORMAL_VALUE)
}
"""
)
def ignores_unknown_enum_values():
_assert_valid(
"""
query (
$unknownValue: EnumType = UNKNOWN_VALUE
$unknownType: UnknownType = UNKNOWN_VALUE
) {
someField(enumArg: UNKNOWN_VALUE)
someField(unknownArg: UNKNOWN_VALUE)
unknownField(unknownArg: UNKNOWN_VALUE)
}
fragment SomeFragment on Query {
someField(enumArg: UNKNOWN_VALUE)
}
"""
)
def reports_error_when_a_deprecated_enum_value_is_used():
message = (
"The enum value 'EnumType.DEPRECATED_VALUE' is deprecated."
" Some enum reason."
)
_assert_errors(
"""
query (
$variable: EnumType = DEPRECATED_VALUE
) {
someField(enumArg: DEPRECATED_VALUE)
}
""",
[
{
"message": message,
"locations": [(3, 41)],
},
{
"message": message,
"locations": [(5, 38)],
},
],
)
| {
"repo_name": "graphql-python/graphql-core",
"path": "tests/validation/test_no_deprecated.py",
"copies": "1",
"size": "8895",
"license": "mit",
"hash": 4674486372149879000,
"line_mean": 27.786407767,
"line_max": 88,
"alpha_frac": 0.3975267004,
"autogenerated": false,
"ratio": 5.545511221945137,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6443037922345138,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Callable, List, Tuple
import pytest # type: ignore
import numpy as np
AR = np.array(0)
AR.setflags(write=False)
KACF = frozenset({None, "K", "A", "C", "F"})
ACF = frozenset({None, "A", "C", "F"})
CF = frozenset({None, "C", "F"})
order_list: List[Tuple[frozenset, Callable]] = [
(KACF, partial(np.ndarray, 1)),
(KACF, AR.tobytes),
(KACF, partial(AR.astype, int)),
(KACF, AR.copy),
(ACF, partial(AR.reshape, 1)),
(KACF, AR.flatten),
(KACF, AR.ravel),
(KACF, partial(np.array, 1)),
(CF, partial(np.zeros, 1)),
(CF, partial(np.ones, 1)),
(CF, partial(np.empty, 1)),
(CF, partial(np.full, 1, 1)),
(KACF, partial(np.zeros_like, AR)),
(KACF, partial(np.ones_like, AR)),
(KACF, partial(np.empty_like, AR)),
(KACF, partial(np.full_like, AR, 1)),
(KACF, partial(np.add, 1, 1)), # i.e. np.ufunc.__call__
(ACF, partial(np.reshape, AR, 1)),
(KACF, partial(np.ravel, AR)),
(KACF, partial(np.asarray, 1)),
(KACF, partial(np.asanyarray, 1)),
]
for order_set, func in order_list:
for order in order_set:
func(order=order)
invalid_orders = KACF - order_set
for order in invalid_orders:
with pytest.raises(ValueError):
func(order=order)
| {
"repo_name": "madphysicist/numpy",
"path": "numpy/typing/tests/data/pass/literal.py",
"copies": "10",
"size": "1299",
"license": "bsd-3-clause",
"hash": 723329053126815700,
"line_mean": 27.8666666667,
"line_max": 60,
"alpha_frac": 0.5950731332,
"autogenerated": false,
"ratio": 2.711899791231733,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 45
} |
from functools import partial
from typing import Dict
from csv import DictReader
from vnpy.event import Event, EventEngine
from vnpy.trader.engine import MainEngine
from vnpy.trader.ui import QtCore, QtWidgets
from ..engine import (APP_NAME, EVENT_RADAR_LOG, EVENT_RADAR_RULE,
EVENT_RADAR_UPDATE, RadarEngine)
class RadarManager(QtWidgets.QWidget):
""""""
signal_log = QtCore.pyqtSignal(Event)
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super().__init__()
self.main_engine: MainEngine = main_engine
self.event_engine: EventEngine = event_engine
self.radar_engine: RadarEngine = main_engine.get_engine(APP_NAME)
self.init_ui()
self.register_event()
self.radar_engine.init()
def init_ui(self) -> None:
""""""
self.setWindowTitle("市场雷达")
self.radar_monitor = RadarMonitor(self.radar_engine)
self.log_monitor = QtWidgets.QTextEdit()
self.log_monitor.setReadOnly(True)
self.log_monitor.setMaximumHeight(300)
self.name_line = QtWidgets.QLineEdit()
self.formula_line = QtWidgets.QLineEdit()
self.a_line = QtWidgets.QLineEdit()
self.b_line = QtWidgets.QLineEdit()
self.c_line = QtWidgets.QLineEdit()
self.d_line = QtWidgets.QLineEdit()
self.e_line = QtWidgets.QLineEdit()
self.ndigits_spin = QtWidgets.QSpinBox()
self.ndigits_spin.setMinimum(0)
self.ndigits_spin.setValue(2)
add_button = QtWidgets.QPushButton("添加")
add_button.clicked.connect(self.add_rule)
edit_button = QtWidgets.QPushButton("修改")
edit_button.clicked.connect(self.edit_rule)
load_button = QtWidgets.QPushButton("导入CSV")
load_button.clicked.connect(self.load_csv)
form = QtWidgets.QFormLayout()
form.addRow("名称", self.name_line)
form.addRow("公式", self.formula_line)
form.addRow("A", self.a_line)
form.addRow("B", self.b_line)
form.addRow("C", self.c_line)
form.addRow("D", self.d_line)
form.addRow("E", self.e_line)
form.addRow("小数", self.ndigits_spin)
form.addRow(add_button)
form.addRow(edit_button)
vbox = QtWidgets.QVBoxLayout()
vbox.addWidget(self.log_monitor)
vbox.addWidget(load_button)
hbox = QtWidgets.QHBoxLayout()
hbox.addLayout(form)
hbox.addStretch()
hbox.addLayout(vbox)
vbox2 = QtWidgets.QVBoxLayout()
vbox2.addWidget(self.radar_monitor)
vbox2.addLayout(hbox)
self.setLayout(vbox2)
def register_event(self) -> None:
""""""
self.signal_log.connect(self.process_log_event)
self.event_engine.register(EVENT_RADAR_LOG, self.signal_log.emit)
def process_log_event(self, event: Event) -> None:
""""""
log = event.data
time_str = log.time.strftime("%H:%M:%S")
msg = f"{time_str}\t{log.msg}"
self.log_monitor.append(msg)
def add_rule(self) -> None:
""""""
name, formula, params, ndigits = self.get_rule_setting()
self.radar_engine.add_rule(name, formula, params, ndigits)
self.radar_engine.save_setting()
def edit_rule(self) -> None:
""""""
name, formula, params, ndigits = self.get_rule_setting()
self.radar_engine.edit_rule(name, formula, params, ndigits)
self.radar_engine.save_setting()
def get_rule_setting(self) -> tuple:
""""""
name = self.name_line.text()
formula = self.formula_line.text()
a = self.a_line.text()
b = self.b_line.text()
c = self.c_line.text()
d = self.d_line.text()
e = self.e_line.text()
params = {}
if a:
params["A"] = a
if b:
params["B"] = b
if c:
params["C"] = c
if d:
params["D"] = d
if e:
params["E"] = e
ndigits = self.ndigits_spin.value()
return name, formula, params, ndigits
def show(self):
""""""
self.showMaximized()
def load_csv(self):
""""""
path, type_ = QtWidgets.QFileDialog.getOpenFileName(
self,
u"导入CSV配置",
"",
"CSV(*.csv)"
)
if not path:
return
# Create csv DictReader
with open(path, "r") as f:
reader = DictReader(f)
for row in reader:
name = row["名称"]
formula = row["公式"]
ndigits = int(row["小数"])
params = {}
for param in ["A", "B", "C", "D", "E"]:
vt_symbol = row.get(param, "")
if vt_symbol:
params[param] = vt_symbol
self.radar_engine.add_rule(name, formula, params, ndigits)
self.radar_engine.save_setting()
class RadarCell(QtWidgets.QTableWidgetItem):
""""""
def __init__(self, text: str = ""):
""""""
super().__init__(text)
self.setTextAlignment(QtCore.Qt.AlignCenter)
class RadarMonitor(QtWidgets.QTableWidget):
""""""
signal_rule = QtCore.pyqtSignal(Event)
signal_update = QtCore.pyqtSignal(Event)
def __init__(self, radar_engine: RadarEngine):
""""""
super().__init__()
self.radar_engine: RadarEngine = radar_engine
self.event_engine: EventEngine = radar_engine.event_engine
self.cells: Dict[str, Dict[str, RadarCell]] = {}
self.init_ui()
self.register_event()
def init_ui(self) -> None:
""""""
headers = [
"名称",
"数值",
"时间",
"公式",
"A",
"B",
"C",
"D",
"E",
"小数",
" "
]
self.setColumnCount(len(headers))
self.setHorizontalHeaderLabels(headers)
self.verticalHeader().setVisible(False)
self.setEditTriggers(self.NoEditTriggers)
self.setAlternatingRowColors(True)
h_header = self.horizontalHeader()
h_header.setSectionResizeMode(h_header.Stretch)
def register_event(self) -> None:
""""""
self.signal_rule.connect(self.process_rule_event)
self.signal_update.connect(self.process_update_event)
self.event_engine.register(EVENT_RADAR_RULE, self.signal_rule.emit)
self.event_engine.register(EVENT_RADAR_UPDATE, self.signal_update.emit)
def process_rule_event(self, event: Event) -> None:
""""""
rule_data = event.data
name = rule_data["name"]
formula = rule_data["formula"]
params = rule_data["params"]
ndigits = rule_data["ndigits"]
if name not in self.cells:
name_cell = RadarCell(name)
value_cell = RadarCell()
time_cell = RadarCell()
formula_cell = RadarCell(formula)
a_cell = RadarCell(params.get("A", ""))
b_cell = RadarCell(params.get("B", ""))
c_cell = RadarCell(params.get("C", ""))
d_cell = RadarCell(params.get("D", ""))
e_cell = RadarCell(params.get("E", ""))
ndigits_cell = RadarCell(str(ndigits))
remove_func = partial(self.remove_rule, name)
remove_button = QtWidgets.QPushButton("删除")
remove_button.clicked.connect(remove_func)
self.insertRow(0)
self.setItem(0, 0, name_cell)
self.setItem(0, 1, value_cell)
self.setItem(0, 2, time_cell)
self.setItem(0, 3, formula_cell)
self.setItem(0, 4, a_cell)
self.setItem(0, 5, b_cell)
self.setItem(0, 6, c_cell)
self.setItem(0, 7, d_cell)
self.setItem(0, 8, e_cell)
self.setItem(0, 9, ndigits_cell)
self.setCellWidget(0, 10, remove_button)
self.cells[name] = {
"name": name_cell,
"value": value_cell,
"time": time_cell,
"formula": formula_cell,
"a": a_cell,
"b": b_cell,
"c": c_cell,
"d": d_cell,
"e": e_cell,
"ndigits": ndigits_cell
}
else:
row_cells = self.cells[name]
row_cells["formula"].setText(formula)
row_cells["a"].setText(params.get("A", ""))
row_cells["b"].setText(params.get("B", ""))
row_cells["c"].setText(params.get("C", ""))
row_cells["d"].setText(params.get("D", ""))
row_cells["e"].setText(params.get("E", ""))
row_cells["ndigits"].setText(str(ndigits))
def process_update_event(self, event: Event) -> None:
""""""
radar_data = event.data
row_cells = self.cells.get(radar_data["name"], None)
if row_cells:
row_cells["value"].setText(str(radar_data["value"]))
row_cells["time"].setText(str(radar_data["time"]))
def remove_rule(self, name: str) -> None:
""""""
rule_names = list(self.cells.keys())
rule_names.reverse()
row = rule_names.index(name)
self.cells.pop(name)
self.removeRow(row)
self.radar_engine.remove_rule(name)
self.radar_engine.save_setting()
| {
"repo_name": "bigdig/vnpy",
"path": "vnpy/app/market_radar/ui/widget.py",
"copies": "1",
"size": "9585",
"license": "mit",
"hash": -3910310709087678500,
"line_mean": 29.0917721519,
"line_max": 79,
"alpha_frac": 0.538647597,
"autogenerated": false,
"ratio": 3.584244251790426,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4622891848790426,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Iterable, Callable, Dict
from pathlib import Path
from .loaders import cmu, dbpedia, imdb
def cmu_reader(
path: Path = None, *, freq_cutoff: int = 0, limit: int = 0, split=0.9
) -> Dict[str, Callable[["Language"], Iterable["Example"]]]:
from spacy.training.example import Example
# Deduce the categories above threshold by inspecting all data
all_train_data, _ = list(cmu(path, limit=0, split=1))
counted_cats = {}
for text, cats in all_train_data:
for cat in cats:
counted_cats[cat] = counted_cats.get(cat, 0) + 1
# filter labels by frequency
unique_labels = [
l for l in sorted(counted_cats.keys()) if counted_cats[l] >= freq_cutoff
]
train_data, dev_data = cmu(path, limit=limit, shuffle=False, labels=unique_labels, split=split)
def read_examples(data, nlp):
for text, cats in data:
doc = nlp.make_doc(text)
assert isinstance(cats, list)
cat_dict = {label: float(label in cats) for label in unique_labels}
yield Example.from_dict(doc, {"cats": cat_dict})
return {
"train": partial(read_examples, train_data),
"dev": partial(read_examples, dev_data),
}
def dbpedia_reader(
path: Path = None, *, train_limit: int = 0, dev_limit: int = 0
) -> Dict[str, Callable[["Language"], Iterable["Example"]]]:
from spacy.training.example import Example
all_train_data, _ = dbpedia(path, train_limit=0, dev_limit=1)
unique_labels = set()
for text, gold_label in all_train_data:
assert isinstance(gold_label, str)
unique_labels.add(gold_label)
train_data, dev_data = dbpedia(path, train_limit=train_limit, dev_limit=dev_limit)
def read_examples(data, nlp):
for text, gold_label in data:
doc = nlp.make_doc(text)
cat_dict = {label: float(gold_label == label) for label in unique_labels}
yield Example.from_dict(doc, {"cats": cat_dict})
return {
"train": partial(read_examples, train_data),
"dev": partial(read_examples, dev_data),
}
def imdb_reader(
path: Path = None, *, train_limit: int = 0, dev_limit: int = 0
) -> Dict[str, Callable[["Language"], Iterable["Example"]]]:
from spacy.training.example import Example
train_data, dev_data = imdb(path, train_limit=train_limit, dev_limit=dev_limit)
unique_labels = ["pos", "neg"]
def read_examples(data, nlp):
for text, gold_label in data:
doc = nlp.make_doc(text)
cat_dict = {label: float(gold_label == label) for label in unique_labels}
yield Example.from_dict(doc, {"cats": cat_dict})
return {
"train": partial(read_examples, train_data),
"dev": partial(read_examples, dev_data),
}
| {
"repo_name": "explosion/ml-datasets",
"path": "ml_datasets/spacy_readers.py",
"copies": "1",
"size": "2837",
"license": "mit",
"hash": -1347417513032458200,
"line_mean": 34.9113924051,
"line_max": 99,
"alpha_frac": 0.6242509693,
"autogenerated": false,
"ratio": 3.4057623049219687,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.952486478138928,
"avg_score": 0.001029698566538036,
"num_lines": 79
} |
from functools import partial
from typing import Optional, List
from thinc.types import Floats2d
from thinc.api import Model, reduce_mean, Linear, list2ragged, Logistic
from thinc.api import chain, concatenate, clone, Dropout, ParametricAttention
from thinc.api import SparseLinear, Softmax, softmax_activation, Maxout, reduce_sum
from thinc.api import with_cpu, Relu, residual, LayerNorm, resizable
from thinc.layers.chain import init as init_chain
from thinc.layers.resizable import resize_model, resize_linear_weighted
from ...attrs import ORTH
from ...util import registry
from ..extract_ngrams import extract_ngrams
from ..staticvectors import StaticVectors
from ...tokens import Doc
from .tok2vec import get_tok2vec_width
NEG_VALUE = -5000
@registry.architectures("spacy.TextCatCNN.v2")
def build_simple_cnn_text_classifier(
tok2vec: Model, exclusive_classes: bool, nO: Optional[int] = None
) -> Model[List[Doc], Floats2d]:
"""
Build a simple CNN text classifier, given a token-to-vector model as inputs.
If exclusive_classes=True, a softmax non-linearity is applied, so that the
outputs sum to 1. If exclusive_classes=False, a logistic non-linearity
is applied instead, so that outputs are in the range [0, 1].
"""
fill_defaults = {"b": 0, "W": 0}
with Model.define_operators({">>": chain}):
cnn = tok2vec >> list2ragged() >> reduce_mean()
nI = tok2vec.maybe_get_dim("nO")
if exclusive_classes:
output_layer = Softmax(nO=nO, nI=nI)
fill_defaults["b"] = NEG_VALUE
resizable_layer = resizable(
output_layer,
resize_layer=partial(
resize_linear_weighted, fill_defaults=fill_defaults
),
)
model = cnn >> resizable_layer
else:
output_layer = Linear(nO=nO, nI=nI)
resizable_layer = resizable(
output_layer,
resize_layer=partial(
resize_linear_weighted, fill_defaults=fill_defaults
),
)
model = cnn >> resizable_layer >> Logistic()
model.set_ref("output_layer", output_layer)
model.attrs["resize_output"] = partial(
resize_and_set_ref,
resizable_layer=resizable_layer,
)
model.set_ref("tok2vec", tok2vec)
model.set_dim("nO", nO)
model.attrs["multi_label"] = not exclusive_classes
return model
def resize_and_set_ref(model, new_nO, resizable_layer):
resizable_layer = resize_model(resizable_layer, new_nO)
model.set_ref("output_layer", resizable_layer.layers[0])
model.set_dim("nO", new_nO, force=True)
return model
@registry.architectures("spacy.TextCatBOW.v2")
def build_bow_text_classifier(
exclusive_classes: bool,
ngram_size: int,
no_output_layer: bool,
nO: Optional[int] = None,
) -> Model[List[Doc], Floats2d]:
fill_defaults = {"b": 0, "W": 0}
with Model.define_operators({">>": chain}):
sparse_linear = SparseLinear(nO=nO)
output_layer = None
if not no_output_layer:
fill_defaults["b"] = NEG_VALUE
output_layer = softmax_activation() if exclusive_classes else Logistic()
resizable_layer = resizable(
sparse_linear,
resize_layer=partial(resize_linear_weighted, fill_defaults=fill_defaults),
)
model = extract_ngrams(ngram_size, attr=ORTH) >> resizable_layer
model = with_cpu(model, model.ops)
if output_layer:
model = model >> with_cpu(output_layer, output_layer.ops)
model.set_dim("nO", nO)
model.set_ref("output_layer", sparse_linear)
model.attrs["multi_label"] = not exclusive_classes
model.attrs["resize_output"] = partial(
resize_and_set_ref, resizable_layer=resizable_layer
)
return model
@registry.architectures("spacy.TextCatEnsemble.v2")
def build_text_classifier_v2(
tok2vec: Model[List[Doc], List[Floats2d]],
linear_model: Model[List[Doc], Floats2d],
nO: Optional[int] = None,
) -> Model[List[Doc], Floats2d]:
exclusive_classes = not linear_model.attrs["multi_label"]
with Model.define_operators({">>": chain, "|": concatenate}):
width = tok2vec.maybe_get_dim("nO")
attention_layer = ParametricAttention(width)
maxout_layer = Maxout(nO=width, nI=width)
norm_layer = LayerNorm(nI=width)
cnn_model = (
tok2vec
>> list2ragged()
>> attention_layer
>> reduce_sum()
>> residual(maxout_layer >> norm_layer >> Dropout(0.0))
)
nO_double = nO * 2 if nO else None
if exclusive_classes:
output_layer = Softmax(nO=nO, nI=nO_double)
else:
output_layer = Linear(nO=nO, nI=nO_double) >> Logistic()
model = (linear_model | cnn_model) >> output_layer
model.set_ref("tok2vec", tok2vec)
if model.has_dim("nO") is not False:
model.set_dim("nO", nO)
model.set_ref("output_layer", linear_model.get_ref("output_layer"))
model.set_ref("attention_layer", attention_layer)
model.set_ref("maxout_layer", maxout_layer)
model.set_ref("norm_layer", norm_layer)
model.attrs["multi_label"] = not exclusive_classes
model.init = init_ensemble_textcat
return model
def init_ensemble_textcat(model, X, Y) -> Model:
tok2vec_width = get_tok2vec_width(model)
model.get_ref("attention_layer").set_dim("nO", tok2vec_width)
model.get_ref("maxout_layer").set_dim("nO", tok2vec_width)
model.get_ref("maxout_layer").set_dim("nI", tok2vec_width)
model.get_ref("norm_layer").set_dim("nI", tok2vec_width)
model.get_ref("norm_layer").set_dim("nO", tok2vec_width)
init_chain(model, X, Y)
return model
@registry.architectures("spacy.TextCatLowData.v1")
def build_text_classifier_lowdata(
width: int, dropout: Optional[float], nO: Optional[int] = None
) -> Model[List[Doc], Floats2d]:
# Don't document this yet, I'm not sure it's right.
# Note, before v.3, this was the default if setting "low_data" and "pretrained_dims"
with Model.define_operators({">>": chain, "**": clone}):
model = (
StaticVectors(width)
>> list2ragged()
>> ParametricAttention(width)
>> reduce_sum()
>> residual(Relu(width, width)) ** 2
>> Linear(nO, width)
)
if dropout:
model = model >> Dropout(dropout)
model = model >> Logistic()
return model
| {
"repo_name": "explosion/spaCy",
"path": "spacy/ml/models/textcat.py",
"copies": "1",
"size": "6574",
"license": "mit",
"hash": -3680994299074717700,
"line_mean": 37,
"line_max": 88,
"alpha_frac": 0.6285366596,
"autogenerated": false,
"ratio": 3.3764766307139187,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9503315447691687,
"avg_score": 0.0003395685244462631,
"num_lines": 173
} |
from functools import partial
from typing import Optional, List
import base58
from plenum.common.messages.internal_messages import NewViewCheckpointsApplied
from plenum.common.messages.node_messages import PrePrepare, Checkpoint
from plenum.server.consensus.view_change_service import ViewChangeService
from plenum.server.consensus.batch_id import BatchID
from plenum.test.consensus.helper import SimPool
from plenum.test.simulation.sim_random import SimRandom
def some_checkpoint(random: SimRandom, view_no: int, pp_seq_no: int) -> Checkpoint:
return Checkpoint(
instId=0, viewNo=view_no, seqNoStart=pp_seq_no, seqNoEnd=pp_seq_no,
digest=base58.b58encode(random.string(32)).decode())
def some_pool(random: SimRandom) -> (SimPool, List):
pool_size = random.integer(4, 8)
pool = SimPool(pool_size, random)
view_no = pool._initial_view_no
log_size = pool.nodes[0].config.LOG_SIZE
# Create simulated history
# TODO: Move into helper?
faulty = (pool_size - 1) // 3
seq_no_per_cp = 10
max_batches = 50
batches = [BatchID(view_no, view_no, n, random.string(40)) for n in range(1, max_batches)]
checkpoints = [some_checkpoint(random, view_no, n) for n in range(0, max_batches, seq_no_per_cp)]
# Preprepares
pp_count = [random.integer(0, len(batches)) for _ in range(pool_size)]
max_pp = sorted(pp_count)[faulty]
# Prepares
p_count = [random.integer(0, min(max_pp, pp)) for pp in pp_count]
max_p = sorted(p_count)[faulty]
# Checkpoints
cp_count = [1 + random.integer(0, min(max_p, p)) // seq_no_per_cp for p in pp_count]
max_stable_cp_indx = sorted(cp_count)[faulty]
stable_cp = [checkpoints[random.integer(0, min(max_stable_cp_indx, cp) - 1)].seqNoEnd for cp in cp_count]
# Initialize consensus data
for i, node in enumerate(pool.nodes):
high_watermark = stable_cp[i] + log_size
node._data.preprepared = batches[:min(high_watermark, pp_count[i])]
node._data.prepared = batches[:min(high_watermark, p_count[i])]
node._data.checkpoints.clear()
node._data.checkpoints.update(checkpoints[:cp_count[i]])
node._data.stable_checkpoint = stable_cp[i]
# Mock Ordering service to update preprepares for new view
for node in pool.nodes:
def update_shared_data(node, msg: NewViewCheckpointsApplied):
x = [
BatchID(view_no=msg.view_no, pp_view_no=batch_id.pp_view_no, pp_seq_no=batch_id.pp_seq_no,
pp_digest=batch_id.pp_digest)
for batch_id in msg.batches
]
node._orderer._data.preprepared = x
node._orderer._subscription.subscribe(node._orderer._stasher, NewViewCheckpointsApplied, partial(update_shared_data, node))
committed = []
for i in range(1, max_batches):
prepare_count = sum(1 for node in pool.nodes if i <= len(node._data.prepared))
has_prepared_cert = prepare_count >= pool_size - faulty
if has_prepared_cert:
batch_id = batches[i - 1]
committed.append(BatchID(batch_id.view_no + 1, batch_id.pp_view_no, batch_id.pp_seq_no, batch_id.pp_digest))
return pool, committed
def calc_committed(view_changes, max_pp_seq_no, n, f) -> List[BatchID]:
def check_prepared_in_vc(vc, batch_id):
# check that batch_id is present in VC's prepared and preprepared
for p_batch_id in vc.prepared:
if batch_id != p_batch_id:
continue
for pp_batch_id in vc.preprepared:
if batch_id == pp_batch_id:
return True
return False
def find_batch_id(pp_seq_no):
for vc in view_changes:
for batch_id in vc.prepared:
if batch_id[2] != pp_seq_no:
continue
prepared_count = sum(1 for vc in view_changes if check_prepared_in_vc(vc, batch_id))
if prepared_count < n - f:
continue
return batch_id
return None
committed = []
for pp_seq_no in range(1, max_pp_seq_no):
batch_id = find_batch_id(pp_seq_no)
if batch_id is not None:
committed.append(BatchID(*batch_id))
return committed
| {
"repo_name": "evernym/zeno",
"path": "plenum/test/consensus/view_change/helper.py",
"copies": "2",
"size": "4270",
"license": "apache-2.0",
"hash": 6843022131321691000,
"line_mean": 39.2830188679,
"line_max": 131,
"alpha_frac": 0.6351288056,
"autogenerated": false,
"ratio": 3.3516483516483517,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.998210059492945,
"avg_score": 0.0009353124637804087,
"num_lines": 106
} |
from functools import partial
from typing import (Optional,
List,
Dict)
from cetus.queries import (ALL_COLUMNS_ALIAS,
generate_select_query,
generate_group_wise_query)
from cetus.types import (ConnectionType,
RecordType,
ColumnValueType,
FiltersType,
OrderingType)
from .execution import (fetch_row,
fetch_rows)
from .utils import (normalize_pagination,
generate_table_columns_names,
generate_table_columns_aliases)
async def fetch_column_function(
*,
column_function_name: str,
table_name: str,
column_name: str,
filters: Optional[FiltersType] = None,
orderings: Optional[List[OrderingType]] = None,
is_mysql: bool,
connection: ConnectionType,
default: ColumnValueType = None) -> int:
column_alias = f'{column_function_name}_1'
function_column = (f'{column_function_name}({column_name}) '
f'AS {column_alias}')
query = generate_select_query(
table_name=table_name,
columns_names=[function_column],
filters=filters,
orderings=orderings)
res, = await fetch_row(query,
is_mysql=is_mysql,
connection=connection)
return res if res is not None else default
fetch_max_column_value = partial(fetch_column_function,
column_function_name='MAX')
fetch_min_column_value = partial(fetch_column_function,
column_function_name='MIN')
fetch_records_count = partial(fetch_column_function,
column_function_name='COUNT',
column_name=ALL_COLUMNS_ALIAS,
default=0)
async def group_wise_fetch_column_function(
*,
column_function_name: str,
table_name: str,
column_name: str = ALL_COLUMNS_ALIAS,
target_column_name: str,
groupings: List[str],
filters: Optional[FiltersType] = None,
is_maximum: bool = True,
is_mysql: bool,
connection: ConnectionType,
default: ColumnValueType = 0) -> int:
column_alias = f'{column_function_name}_1'
function_column = (f'{column_function_name}({column_name}) '
f'AS {column_alias}')
query = generate_group_wise_query(
table_name=table_name,
columns_names=[function_column],
target_column_name=target_column_name,
filters=filters,
groupings=groupings,
is_maximum=is_maximum,
is_mysql=is_mysql)
resp = await fetch_row(query,
is_mysql=is_mysql,
connection=connection)
return resp[0] if resp is not None else default
group_wise_fetch_max_column_value = partial(group_wise_fetch_column_function,
column_function_name='MAX')
group_wise_fetch_records_count = partial(group_wise_fetch_column_function,
column_function_name='COUNT')
async def fetch(
*, table_name: str,
columns_names: List[str],
columns_aliases: Optional[Dict[str, str]] = None,
filters: Optional[FiltersType] = None,
orderings: Optional[List[OrderingType]] = None,
groupings: List[str] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
is_mysql: bool,
connection: ConnectionType) -> List[RecordType]:
limit, offset = normalize_pagination(
limit=limit,
offset=offset,
is_mysql=is_mysql)
columns_aliases = generate_table_columns_aliases(
columns_names=columns_names,
columns_aliases=columns_aliases)
columns_names = generate_table_columns_names(
columns_names=columns_names,
columns_aliases=columns_aliases)
query = generate_select_query(
table_name=table_name,
columns_names=columns_names,
filters=filters,
orderings=orderings,
groupings=groupings,
limit=limit,
offset=offset)
resp = await fetch_rows(
query,
is_mysql=is_mysql,
connection=connection)
return resp
async def group_wise_fetch(
*, table_name: str,
columns_names: List[str],
columns_aliases: Optional[Dict[str, str]] = None,
target_column_name: str,
groupings: List[str],
filters: Optional[FiltersType] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
orderings: Optional[List[OrderingType]] = None,
is_maximum: bool = True,
is_mysql: bool,
connection: ConnectionType) -> List[RecordType]:
limit, offset = normalize_pagination(
limit=limit,
offset=offset,
is_mysql=is_mysql)
columns_aliases = generate_table_columns_aliases(
columns_names=columns_names,
columns_aliases=columns_aliases)
columns_names = generate_table_columns_names(
columns_names=columns_names,
columns_aliases=columns_aliases)
query = generate_group_wise_query(
table_name=table_name,
columns_names=columns_names,
target_column_name=target_column_name,
filters=filters,
groupings=groupings,
limit=limit,
offset=offset,
orderings=orderings,
is_maximum=is_maximum,
is_mysql=is_mysql)
resp = await fetch_rows(query,
is_mysql=is_mysql,
connection=connection)
return resp
async def fetch_max_connections(*, is_mysql: bool,
connection: ConnectionType
) -> int:
setting_name = 'max_connections'
if is_mysql:
resp = await fetch_mysql_setting(
setting_name=setting_name,
connection=connection)
else:
resp = await fetch_postgres_setting(
setting_name=setting_name,
connection=connection)
return int(resp)
async def fetch_mysql_setting(*, setting_name: str,
connection: ConnectionType
) -> ColumnValueType:
resp = await fetch_row(
f'SHOW VARIABLES LIKE \'{setting_name}\'',
is_mysql=True,
connection=connection)
return resp[1]
async def fetch_postgres_setting(*, setting_name: str,
connection: ConnectionType
) -> str:
resp = await fetch_row(f'SHOW {setting_name}',
is_mysql=False,
connection=connection)
return resp[0]
| {
"repo_name": "lycantropos/cetus",
"path": "cetus/data_access/reading.py",
"copies": "1",
"size": "6927",
"license": "mit",
"hash": -4265622967057383000,
"line_mean": 32.6262135922,
"line_max": 77,
"alpha_frac": 0.5633030172,
"autogenerated": false,
"ratio": 4.307835820895522,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5371138838095522,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Tuple
import attr
import numpy as np
import pandas as pd
from scipy.integrate import RK45
from pybaseball.analysis.trajectories.unit_conversions import RPM_TO_RAD_SEC
from pybaseball.analysis.trajectories.utils import spin_components, unit_vector
from pybaseball.datahelpers.postprocessing import check_between_zero_one
from .parameters import BattedBallConstants, DragForceCoefficients, EnvironmentalParameters, LiftForceCoefficients
@attr.s(kw_only=True)
class BattedBallTrajectory:
"""
Class for a batted ball trajectory. The algorithm is taken from
Alan Nathan's trajectory calculator,
http://baseball.physics.illinois.edu/trajectory-calculator-new.html
"""
x0: float = attr.ib(default=0, metadata={"units": "ft"})
y0: float = attr.ib(default=2.0, metadata={"units": "ft"})
z0: float = attr.ib(default=3.0, metadata={"units": "ft"})
spin: float = attr.ib(default=2675, metadata={"units": "revs_per_second"})
spin_phi: float = attr.ib(default=-18.5, metadata={"units": "degrees"})
drag_strength: float = attr.ib(default=1, validator=check_between_zero_one)
magnus_strength: float = attr.ib(default=1, validator=check_between_zero_one)
batted_ball_constants: BattedBallConstants = attr.ib(default=BattedBallConstants())
drag_force_coefs: DragForceCoefficients = attr.ib(default=DragForceCoefficients())
lift_force_coefs: LiftForceCoefficients = attr.ib(default=LiftForceCoefficients())
env_parameters: EnvironmentalParameters = attr.ib(default=EnvironmentalParameters())
def __attrs_post_init__(self) -> None:
self.initial_position = np.array((self.x0, self.y0, self.z0))
self.pi_30 = RPM_TO_RAD_SEC
self.c0 = (
0.07182
* self.env_parameters.air_density
* self.env_parameters.unit_conversions.KGM3_TO_LBFT3 # type: ignore
# TODO: https://github.com/python/mypy/issues/5439 Remove the ^ type: ignore after this is fixed in mypy
* (5.125 / self.batted_ball_constants.mass)
* (self.batted_ball_constants.circumference / 9.125) ** 2
)
def omega_fun(self, t: float, spin: float) -> float:
"""
angular speed.
:param t: float
:param spin: float
:return: float
"""
return spin * self.pi_30
def s_fun(self, t: float, vw: float, spin: float) -> float:
"""
spin. computed as a function of `t`, the time,
`vw` speed with respect to the wind, and `spin`, the initial spin
:param t: float
:param vw: float
:param spin: float
:return: float
"""
omega = self.omega_fun(t, spin)
romega = self.batted_ball_constants.circumference * omega / (24 * np.pi)
return (romega / vw) * np.exp(-t * vw / (self.lift_force_coefs.tau * 146.7))
def cl_fun(self, t: float, vw: float, spin: float) -> float:
"""
coefficient of lift. computed as a function of `t`, the time,
`vw` speed with respect to the wind, and `spin`, the spin
:param t: float
:param vw: float
:param spin: float
:return: float
"""
s = self.s_fun(t, vw, spin)
return (
self.lift_force_coefs.cl2
* s
/ (self.lift_force_coefs.cl0 + self.lift_force_coefs.cl1 * s)
)
def cd_fun(self, t: float, vw: float, spin: float) -> float:
"""
coefficient of drag. computed as a function of `t`, the time,
`vw`, the speed with respect to the wind, and `spin`, the spin.
:param t: float
:param vw: float
:param spin: float
:return: float
"""
return self.drag_force_coefs.cd0 + self.drag_force_coefs.cdspin * (
spin * 1e-3
) * np.exp(-t * vw / (self.lift_force_coefs.tau * 146.7))
def get_trajectory(
self,
initial_speed: float,
launch_angle: float,
launch_direction_angle: float,
initial_spin: float,
spin_angle: float,
delta_time: float = 0.01,
) -> pd.DataFrame:
# TODO: make the return value a trajectory object
"""
computes a batted ball trajectory. speed is in miles-per-hour,
angles in degrees, and spin in revolutions per minute
:param initial_speed: float
:param launch_angle: float
:param launch_direction_angle: float
:param initial_spin: float
:param spin_angle: float
:param delta_time: float
:return: pandas data frame
"""
initial_velocity = (
initial_speed
* self.env_parameters.unit_conversions.MPH_TO_FTS # type: ignore
# TODO: https://github.com/python/mypy/issues/5439 Remove the ^ type: ignore after this is fixed in mypy
* unit_vector(np.float64(launch_angle), np.float64(launch_direction_angle))
)
initial_conditions = np.concatenate(
(self.initial_position, initial_velocity), axis=0
)
rk_solution = RK45(
partial(
self.trajectory_fun,
launch_angle=launch_angle,
launch_direction_angle=launch_direction_angle,
spin=initial_spin,
spin_angle=spin_angle,
),
0,
initial_conditions,
t_bound=1000,
max_step=delta_time,
)
ans = []
z = self.initial_position[2]
while z >= 0:
rk_solution.step()
res = rk_solution.y
z = res[2]
ans.append([rk_solution.t] + list(res))
result_df = pd.DataFrame(np.array(ans).reshape(-1, 7))
result_df.columns = pd.Index(["t", "x", "y", "z", "vx", "vy", "vz"])
return result_df
def trajectory_fun(
self,
t: float,
trajectory_vars: Tuple[float, float, float, float, float, float],
spin: float = 2500,
spin_angle: float = 0,
launch_angle: float = 0,
launch_direction_angle: float = 0,
) -> np.ndarray:
"""
function for computing the trajectory using the 4th-order Runge-Kutta method.
trajectory vars are the 3 positions and 3 velocity components of the ball.
returns the derivatives of the input variables, i.e., the 3 velocity components,
and the 3 acceleration components.
:param t: float
:param trajectory_vars: tuple(float)
:param spin: float
:param spin_angle: float
:param launch_angle: float
:param launch_direction_angle: float
:return: numpy array
"""
# trajectory_vars = x, y, z, vx, vy, vz
_, _, _, vx, vy, vz = trajectory_vars
v = np.sqrt(vx ** 2 + vy ** 2 + vz ** 2)
wx, wy, wz = spin_components(spin, spin_angle, launch_angle, launch_direction_angle)
cd = self.cd_fun(t, v, spin)
cl = self.cl_fun(t, v, spin)
magnus_const = self.c0 * cl / self.omega_fun(t, spin) * v
magnus_const *= self.magnus_strength
drag_const = self.c0 * cd * v
drag_const *= self.drag_strength
fx = -drag_const * vx + magnus_const * (wy * vz - wz * vy)
fy = -drag_const * vy + magnus_const * (-wx * vz + wz * vx)
fz = (
-drag_const * vz
+ magnus_const * (wx * vy - wy * vx)
- self.env_parameters.g_gravity
)
gx = vx
gy = vy
gz = vz
return np.array([gx, gy, gz, fx, fy, fz])
| {
"repo_name": "jldbc/pybaseball",
"path": "pybaseball/analysis/trajectories/batted_balls/calculator.py",
"copies": "1",
"size": "7615",
"license": "mit",
"hash": -3520908764377798700,
"line_mean": 34.5841121495,
"line_max": 116,
"alpha_frac": 0.5848982272,
"autogenerated": false,
"ratio": 3.538568773234201,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46234670004342004,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Tuple
import matplotlib
# More info at
# http://matplotlib.org/faq/usage_faq.html#what-is-a-backend for details
# TODO: use this: https://stackoverflow.com/a/37605654/7851470
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from .utils import (bolometric_indexer,
bolometric_magnitude,
nan_array)
OBSERVATIONAL_STARS_COUNTS = np.array(
[3, 4, 5, 7, 12, 17, 17, 12, 20, 19, 37, 42, 52, 72, 96, 62, 20, 3, 1])
# TODO: replace by # 2 * pi * radius ** 3 / 3
FORTY_PARSEC_NORTHERN_HEMISPHERE_VOLUME = 134041.29
def plot(stars: pd.DataFrame,
*,
min_bolometric_magnitude: float = 6.,
max_bolometric_magnitude: float = 21.,
bin_size: float = 0.5,
min_observed_magnitude: float = 7.75,
observed_stars_counts: np.ndarray = OBSERVATIONAL_STARS_COUNTS,
# We choose these bins because they have many objects
# and don't lie in problematic regions
trusted_bins: frozenset = frozenset([15, 16, 17]),
filename: str = 'luminosity_function.ps',
figure_size: Tuple[float, float] = (7, 7),
ratio: float = 10 / 13,
xlabel: str = '$M_{bol}$',
ylabel: str = '$\log N (pc^{-3}M_{bol}^{-1})$',
xlimits: Tuple[float, float] = (7, 19),
ylimits: Tuple[float, float] = (-6, -2),
line_color: str = 'k',
marker: str = 's',
capsize: float = 5,
observational_line_color: str = 'r') -> None:
bolometric_index = bolometric_indexer(
min_magnitude=min_bolometric_magnitude,
stars_bin_size=bin_size)
stars_bins_count = np.asscalar(bolometric_index(max_bolometric_magnitude))
# Aligning observed stars counts with the scale
# defined by min and max bolometric magnitudes
initial_index = np.asscalar(bolometric_index(min_observed_magnitude))
observed_stars_counts = np.insert(arr=observed_stars_counts,
obj=0,
values=np.zeros(shape=initial_index,
dtype=np.int32))
observed_stars_counts = np.append(
arr=observed_stars_counts,
values=np.zeros(
shape=stars_bins_count - observed_stars_counts.size,
dtype=np.int32))
observational_luminosity_function = luminosity_function(
max_bolometric_magnitude=max_bolometric_magnitude,
min_bolometric_magnitude=min_bolometric_magnitude,
bin_size=bin_size,
stars_bins_count=stars_bins_count,
stars_counts=observed_stars_counts)
magnitudes = bolometric_magnitude(luminosities=stars['luminosity'])
bins_indexes = pd.Series(bolometric_index(magnitudes))
actual_stars_counts = bins_indexes.value_counts()
observed_stars_counts = pd.Series(observed_stars_counts)
normalized_stars_counts = actual_stars_counts * normalization_factor(
actual_stars_counts=actual_stars_counts,
observed_stars_counts=observed_stars_counts,
trusted_bins=trusted_bins)
synthetic_luminosity_function = luminosity_function(
max_bolometric_magnitude=max_bolometric_magnitude,
min_bolometric_magnitude=min_bolometric_magnitude,
bin_size=bin_size,
stars_bins_count=stars_bins_count,
stars_counts=normalized_stars_counts)
figure, subplot = plt.subplots(figsize=figure_size)
subplot.set(xlabel=xlabel,
ylabel=ylabel,
xlim=xlimits,
ylim=ylimits)
draw_errorbar = partial(subplot.errorbar,
marker=marker,
capsize=capsize)
draw_errorbar(x=synthetic_luminosity_function['magnitude'],
y=synthetic_luminosity_function['log_stars_count'],
yerr=[synthetic_luminosity_function['lower_errorbar'],
synthetic_luminosity_function['upper_errorbar']],
color=line_color,
zorder=2)
draw_errorbar(x=observational_luminosity_function['magnitude'],
y=observational_luminosity_function['log_stars_count'],
yerr=[observational_luminosity_function['lower_errorbar'],
observational_luminosity_function['upper_errorbar']],
color=observational_line_color,
zorder=1)
plt.minorticks_on()
subplot.xaxis.set_ticks_position('both')
subplot.yaxis.set_ticks_position('both')
subplot.set_aspect(ratio / subplot.get_data_ratio())
plt.savefig(filename)
def luminosity_function(*,
min_bolometric_magnitude: float,
max_bolometric_magnitude: float,
bin_size: float,
stars_bins_count: int,
stars_counts: np.ndarray,
max_errorbar_len: float = 6.) -> pd.DataFrame:
luminosity_function_template = dict(
magnitude=np.arange(min_bolometric_magnitude + bin_size / 2,
max_bolometric_magnitude,
bin_size),
log_stars_count=nan_array(stars_bins_count),
upper_errorbar=nan_array(stars_bins_count),
lower_errorbar=nan_array(stars_bins_count))
res = pd.DataFrame(data=luminosity_function_template)
res['log_stars_count'] = np.log10(stars_counts /
FORTY_PARSEC_NORTHERN_HEMISPHERE_VOLUME)
stars_counts_sqrt = np.sqrt(stars_counts)
inverse_stars_counts_sqrt = 1. / stars_counts_sqrt
res['upper_errorbar'] = np.log10(1. + inverse_stars_counts_sqrt)
res['lower_errorbar'] = -np.log10(1. - inverse_stars_counts_sqrt)
res.replace(to_replace=[np.inf, -np.inf],
value=np.nan,
inplace=True)
return replace_nans(df=res,
replacement=max_errorbar_len)
def trusted_bins_stars_count(stars_counts: pd.Series,
*,
trusted_bins: frozenset) -> float:
return stars_counts[stars_counts.index.isin(trusted_bins)].sum()
def normalization_factor(*,
trusted_bins: frozenset,
observed_stars_counts: pd.Series,
actual_stars_counts: pd.Series) -> float:
trusted_bins_stars_counter = partial(trusted_bins_stars_count,
trusted_bins=trusted_bins)
return (trusted_bins_stars_counter(observed_stars_counts)
/ trusted_bins_stars_counter(actual_stars_counts))
def replace_nans(df: pd.DataFrame,
replacement: float) -> pd.DataFrame:
result = pd.DataFrame(data=df,
copy=True)
missing_values_rows_mask = result.isnull().any(axis=1)
notnull_log_stars_count_rows_mask = result['log_stars_count'].notnull()
missing_values_rows_mask &= notnull_log_stars_count_rows_mask
missing_values_rows = result.loc[missing_values_rows_mask, :].copy()
missing_values_rows.fillna(value=replacement,
inplace=True)
result.loc[missing_values_rows_mask, :] = missing_values_rows
return result
| {
"repo_name": "lycantropos/alcor",
"path": "alcor/services/plots/luminosity_function.py",
"copies": "2",
"size": "7454",
"license": "mit",
"hash": -2452175654236313600,
"line_mean": 38.0261780105,
"line_max": 79,
"alpha_frac": 0.5914944996,
"autogenerated": false,
"ratio": 3.666502705361535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5257997204961535,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import TypeVar, Generic, Callable
from .typing import Functor, Monad, Applicative
TSource = TypeVar('TSource')
TResult = TypeVar('TResult')
class Identity(Generic[TSource]):
"""Identity monad.
The Identity monad is the simplest monad, which attaches no
information to values.
"""
def __init__(self, value: TSource) -> None:
self._value = value
@classmethod
def unit(cls, value: TSource) -> 'Identity[TSource]':
"""Initialize a new identity."""
return Identity(value)
def map(self, mapper: Callable[[TSource], TResult]) -> 'Identity[TResult]':
"""Map a function over wrapped values."""
result = mapper(self._value)
return Identity(result)
def bind(self, func: Callable[[TSource], 'Identity[TResult]']) -> 'Identity[TResult]':
return func(self._value)
@classmethod
def pure(cls, value: TSource):
return Identity(value)
def apply(self: 'Identity[Callable[[TSource], TResult]]', something: 'Identity[TSource]') -> 'Identity[TResult]':
def mapper(other_value):
try:
return self._value(other_value)
except TypeError:
return partial(self._value, other_value)
return something.map(mapper)
def run(self) -> TSource:
return self._value
def __call__(self) -> TSource:
return self.run()
def __eq__(self, other) -> bool:
return self._value == other()
def __str__(self) -> str:
return "Identity(%s)" % self._value
def __repr__(self) -> str:
return str(self)
assert isinstance(Identity, Functor)
assert isinstance(Identity, Applicative)
assert isinstance(Identity, Monad)
| {
"repo_name": "dbrattli/OSlash",
"path": "oslash/identity.py",
"copies": "1",
"size": "1761",
"license": "apache-2.0",
"hash": 4263747685006135300,
"line_mean": 26.9523809524,
"line_max": 117,
"alpha_frac": 0.6223736513,
"autogenerated": false,
"ratio": 3.9220489977728286,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5044422649072828,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from unittest import TestCase
from mock import Mock, MagicMock, patch
from cloudshell.cp.openstack.domain.services.waiters.instance import InstanceWaiter
from cloudshell.cp.openstack.models.exceptions import InstanceErrorStateException
class TestTaskWaiterService(TestCase):
def setUp(self):
self.cancellation_service = Mock()
self.instance_waiter_service = InstanceWaiter(cancellation_service=self.cancellation_service)
@patch("cloudshell.cp.openstack.domain.services.waiters.instance.time.sleep")
def test_wait_for_instance_active(self, sleep):
def instance_status_changer(instance_obj, new_state):
instance_obj.status = new_state
# Arrange
cancellation_context = Mock()
instance = Mock()
instance.status = None
instance_status_changer_command = partial(instance_status_changer,
instance,
self.instance_waiter_service.ACTIVE)
instance.get = MagicMock(side_effect=instance_status_changer_command)
# Act
result = self.instance_waiter_service.wait(instance=instance,
state=self.instance_waiter_service.ACTIVE,
cancellation_context=cancellation_context,
logger=Mock)
# Verify
instance.get.assert_called_once()
self.cancellation_service.check_if_cancelled.assert_called_once_with(cancellation_context=cancellation_context)
sleep.assert_called_once_with(self.instance_waiter_service._delay)
self.assertEquals(result, instance)
@patch("cloudshell.cp.openstack.domain.services.waiters.instance.time.sleep")
def test_wait_for_instance_error(self, sleep):
def instance_status_changer(instance_obj, new_state):
instance_obj.status = new_state
# Arrange
cancellation_context = Mock()
instance = MagicMock()
instance.fault['message'] = "bb"
instance.status = None
instance_status_changer_command = partial(instance_status_changer,
instance,
self.instance_waiter_service.ERROR)
instance.get = MagicMock(side_effect=instance_status_changer_command)
with self.assertRaises(InstanceErrorStateException) as context:
result = self.instance_waiter_service.wait(instance=instance,
state=self.instance_waiter_service.ERROR,
cancellation_context=cancellation_context,
logger=Mock)
# Verify
instance.get.assert_called_once()
self.assertEquals(context.exception.message, str(instance.fault['message']))
| {
"repo_name": "QualiSystems/OpenStack-Shell",
"path": "package/tests/test_cp/test_openstack/test_domain/test_services/test_waiter/test_instance_waiter.py",
"copies": "1",
"size": "3020",
"license": "isc",
"hash": -5916519722047662000,
"line_mean": 46.1875,
"line_max": 119,
"alpha_frac": 0.6003311258,
"autogenerated": false,
"ratio": 4.763406940063091,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002239128189388921,
"num_lines": 64
} |
from functools import partial
from unittest.mock import MagicMock, patch
from pip._internal.models.direct_url import (
DIRECT_URL_METADATA_NAME,
ArchiveInfo,
DirectUrl,
DirInfo,
VcsInfo,
)
from pip._internal.models.link import Link
from pip._internal.utils.direct_url_helpers import (
direct_url_as_pep440_direct_reference,
direct_url_from_link,
dist_get_direct_url,
)
from pip._internal.utils.urls import path_to_url
def test_as_pep440_requirement_archive():
direct_url = DirectUrl(
url="file:///home/user/archive.tgz",
info=ArchiveInfo(),
)
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ file:///home/user/archive.tgz"
)
direct_url.subdirectory = "subdir"
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ file:///home/user/archive.tgz#subdirectory=subdir"
)
direct_url.info.hash = "sha1=1b8c5bc61a86f377fea47b4276c8c8a5842d2220"
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ file:///home/user/archive.tgz"
"#sha1=1b8c5bc61a86f377fea47b4276c8c8a5842d2220&subdirectory=subdir"
)
def test_as_pep440_requirement_dir():
direct_url = DirectUrl(
url="file:///home/user/project",
info=DirInfo(editable=False),
)
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ file:///home/user/project"
)
def test_as_pep440_requirement_editable_dir():
# direct_url_as_pep440_direct_reference behaves the same
# irrespective of the editable flag. It's the responsibility of
# callers to render it as editable
direct_url = DirectUrl(
url="file:///home/user/project",
info=DirInfo(editable=True),
)
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ file:///home/user/project"
)
def test_as_pep440_requirement_vcs():
direct_url = DirectUrl(
url="https:///g.c/u/p.git",
info=VcsInfo(
vcs="git", commit_id="1b8c5bc61a86f377fea47b4276c8c8a5842d2220"
)
)
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ git+https:///g.c/u/p.git"
"@1b8c5bc61a86f377fea47b4276c8c8a5842d2220"
)
direct_url.subdirectory = "subdir"
direct_url.validate()
assert (
direct_url_as_pep440_direct_reference(direct_url, "pkg") ==
"pkg @ git+https:///g.c/u/p.git"
"@1b8c5bc61a86f377fea47b4276c8c8a5842d2220#subdirectory=subdir"
)
@patch("pip._internal.vcs.git.Git.get_revision")
def test_from_link_vcs(mock_get_backend_for_scheme):
_direct_url_from_link = partial(direct_url_from_link, source_dir="...")
direct_url = _direct_url_from_link(Link("git+https://g.c/u/p.git"))
assert direct_url.url == "https://g.c/u/p.git"
assert isinstance(direct_url.info, VcsInfo)
assert direct_url.info.vcs == "git"
direct_url = _direct_url_from_link(Link("git+https://g.c/u/p.git#egg=pkg"))
assert direct_url.url == "https://g.c/u/p.git"
direct_url = _direct_url_from_link(
Link("git+https://g.c/u/p.git#egg=pkg&subdirectory=subdir")
)
assert direct_url.url == "https://g.c/u/p.git"
assert direct_url.subdirectory == "subdir"
direct_url = _direct_url_from_link(Link("git+https://g.c/u/p.git@branch"))
assert direct_url.url == "https://g.c/u/p.git"
assert direct_url.info.requested_revision == "branch"
direct_url = _direct_url_from_link(
Link("git+https://g.c/u/p.git@branch#egg=pkg")
)
assert direct_url.url == "https://g.c/u/p.git"
assert direct_url.info.requested_revision == "branch"
direct_url = _direct_url_from_link(
Link("git+https://token@g.c/u/p.git")
)
assert direct_url.to_dict()["url"] == "https://g.c/u/p.git"
def test_from_link_vcs_with_source_dir_obtains_commit_id(script, tmpdir):
repo_path = tmpdir / 'test-repo'
repo_path.mkdir()
repo_dir = str(repo_path)
script.run('git', 'init', cwd=repo_dir)
(repo_path / "somefile").touch()
script.run('git', 'add', '.', cwd=repo_dir)
script.run('git', 'commit', '-m', 'commit msg', cwd=repo_dir)
commit_id = script.run(
'git', 'rev-parse', 'HEAD', cwd=repo_dir
).stdout.strip()
direct_url = direct_url_from_link(
Link("git+https://g.c/u/p.git"), source_dir=repo_dir
)
assert direct_url.url == "https://g.c/u/p.git"
assert direct_url.info.commit_id == commit_id
def test_from_link_vcs_without_source_dir(script, tmpdir):
direct_url = direct_url_from_link(
Link("git+https://g.c/u/p.git@1"), link_is_in_wheel_cache=True
)
assert direct_url.url == "https://g.c/u/p.git"
assert direct_url.info.commit_id == "1"
def test_from_link_archive():
direct_url = direct_url_from_link(Link("https://g.c/archive.tgz"))
assert direct_url.url == "https://g.c/archive.tgz"
assert isinstance(direct_url.info, ArchiveInfo)
direct_url = direct_url_from_link(
Link(
"https://g.c/archive.tgz"
"#sha1=1b8c5bc61a86f377fea47b4276c8c8a5842d2220"
)
)
assert isinstance(direct_url.info, ArchiveInfo)
assert (
direct_url.info.hash == "sha1=1b8c5bc61a86f377fea47b4276c8c8a5842d2220"
)
def test_from_link_dir(tmpdir):
dir_url = path_to_url(tmpdir)
direct_url = direct_url_from_link(Link(dir_url))
assert direct_url.url == dir_url
assert isinstance(direct_url.info, DirInfo)
def test_from_link_hide_user_password():
# Basic test only here, other variants are covered by
# direct_url.redact_url tests.
direct_url = direct_url_from_link(
Link("git+https://user:password@g.c/u/p.git@branch#egg=pkg"),
link_is_in_wheel_cache=True,
)
assert direct_url.to_dict()["url"] == "https://g.c/u/p.git"
direct_url = direct_url_from_link(
Link("git+ssh://git@g.c/u/p.git@branch#egg=pkg"),
link_is_in_wheel_cache=True,
)
assert direct_url.to_dict()["url"] == "ssh://git@g.c/u/p.git"
def test_dist_get_direct_url_no_metadata():
dist = MagicMock()
dist.has_metadata.return_value = False
assert dist_get_direct_url(dist) is None
dist.has_metadata.assert_called()
def test_dist_get_direct_url_bad_metadata():
dist = MagicMock()
dist.has_metadata.return_value = True
dist.get_metadata.return_value = "{}" # invalid direct_url.json
assert dist_get_direct_url(dist) is None
dist.get_metadata.assert_called_with(DIRECT_URL_METADATA_NAME)
def test_dist_get_direct_url_valid_metadata():
dist = MagicMock()
dist.has_metadata.return_value = True
dist.get_metadata.return_value = (
'{"url": "https://e.c/p.tgz", "archive_info": {}}'
)
direct_url = dist_get_direct_url(dist)
dist.get_metadata.assert_called_with(DIRECT_URL_METADATA_NAME)
assert direct_url.url == "https://e.c/p.tgz"
assert isinstance(direct_url.info, ArchiveInfo)
| {
"repo_name": "sbidoul/pip",
"path": "tests/unit/test_direct_url_helpers.py",
"copies": "4",
"size": "7206",
"license": "mit",
"hash": -1818698417693829400,
"line_mean": 33.3142857143,
"line_max": 79,
"alpha_frac": 0.6408548432,
"autogenerated": false,
"ratio": 2.9304595363969095,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 210
} |
from functools import partial
from unittest.mock import mock_open, ANY, call
from raptiformica.actions.mesh import configure_raptiformica_conf
from tests.testcase import TestCase
class TestConfigureRaptiformicaConf(TestCase):
def setUp(self):
self.exists = self.set_up_patch(
'raptiformica.actions.mesh.path.exists'
)
self.exists.return_value = False
self.mock_open = mock_open()
self.set_up_patch('raptiformica.actions.mesh.open', self.mock_open)
self.run_command_print_ready = self.set_up_patch(
'raptiformica.actions.mesh.run_command_print_ready'
)
def test_configure_raptiformica_conf_checks_if_path_exists(self):
configure_raptiformica_conf()
self.exists.assert_called_once_with(
'/usr/lib/systemd/system/'
)
def test_configure_raptiformica_conf_does_not_write_to_service_file(self):
configure_raptiformica_conf()
self.assertFalse(self.mock_open().write.called)
def test_configure_conf_writes_to_service_file_if_systemd(self):
self.exists.return_value = True
configure_raptiformica_conf()
self.mock_open().write.assert_called_once_with(
ANY
)
def test_configure_conf_enables_oneshot_if_systemd_and_new_unit_file(self):
self.exists.side_effect = (True, False)
configure_raptiformica_conf()
expected_calls = map(
partial(call, shell=True, buffered=False),
("systemctl daemon-reload",
"systemctl enable raptiformica")
)
self.assertCountEqual(
expected_calls, self.run_command_print_ready.mock_calls
)
def test_configure_conf_does_not_enable_oneshot_if_systemd_and_existing_unit_file(self):
self.exists.side_effect = (True, True)
configure_raptiformica_conf()
self.assertFalse(self.run_command_print_ready.called)
| {
"repo_name": "vdloo/raptiformica",
"path": "tests/unit/raptiformica/actions/mesh/test_configure_raptiformica_conf.py",
"copies": "1",
"size": "1951",
"license": "mit",
"hash": -2685761938856855000,
"line_mean": 31.5166666667,
"line_max": 92,
"alpha_frac": 0.6586365966,
"autogenerated": false,
"ratio": 3.5732600732600734,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9731000612512404,
"avg_score": 0.0001792114695340502,
"num_lines": 60
} |
from functools import partial
from urllib import urlencode
class Fields(object):
"""
Abstract class that shouldn't be initiated.
Field classes should inherit this class and run _init_values in their __init__ method
"""
def _init_values(self, simple_fields, complex_fields=None):
if complex_fields is None: complex_fields = {}
self._values = dict()
for key in simple_fields:
self._values[key] = False
method_key = "add_" + key.replace("-", "_")
function = self._get_simple_lambda(key)
function.__doc__ = "Add " + key
function.__name__ = method_key
setattr(self, method_key, function)
for key, class_type in complex_fields.items():
self._values[key] = False
method_key = "add_" + key.replace("-", "_")
function = self._get_complex_lambda(key, class_type)
function.__doc__ = "Add " + key
function.__name__ = method_key
setattr(self, method_key, function)
def _get_simple_lambda(self, key):
return lambda: self._set_field(key)
def _get_complex_lambda(self, key, class_type):
return lambda value=None: self._set_complex_field(key, class_type, value)
def __repr__(self):
rep = []
for key, value in self._values.items():
if value is True:
rep.append(key)
elif isinstance(value, Fields):
rep.append("{key}:({value})".format(key=key, value=repr(value)))
elif value:
rep.append(value)
return self.__class__.__name__ + " : " + repr(rep)
def _set_field(self, key):
self._check_key_valid(key)
self._values[key] = True
return self
def _check_key_valid(self, key):
if not self._values.has_key(key):
raise ValueError("{0} is not a valid field".format(key))
def _set_complex_field(self, key, class_type, value=None):
if not value:
return self._set_field(key)
self._check_key_valid(key)
if not isinstance(value, class_type):
raise ValueError("{0} is not of type {1}".format(value, class_type))
self._values[key] = value
return self
def get_url(self):
url_values = [key for key in self._values.keys() if self._values[key] is True]
for key, value in self._values.items():
if isinstance(value, Fields):
url_values.append("{key}:({value})".format(key=key, value=value.get_url()))
elif not isinstance(value, bool):
url_values.append(value)
return ",".join(url_values)
def all(self):
for key in self._values.keys():
self._values[key] = True
return self
class Location(Fields):
def __init__(self):
self._init_values(("name", "country"))
def all_with_nested(self):
return self.all()
class RelationToViewer(Fields):
def __init__(self):
self._init_values(("distance", "num-related-connections", "related-connections"))
def all_with_nested(self):
return self.all()
class MemberUrl(Fields):
def __init__(self):
self._init_values(("url", "name"))
def all_with_nested(self):
return self.all()
class HttpHeader(Fields):
def __init__(self):
self._init_values(("name", "value"))
def all_with_nested(self):
return self.all()
class HttpRequest(Fields):
def __init__(self):
self._init_values(("url",), {"headers" : HttpHeader})
def all_with_nested(self):
return self.add_url().add_headers(HttpHeader().all_with_nested())
class Company(Fields):
def __init__(self):
self._init_values(("id", "name", "type", "size", "industry", "ticker"))
def all_with_nested(self):
return self.all()
class Position(Fields):
def __init__(self):
self._init_values(("id", "title", "summary", "start-date",
"end-date", "is-current"),
{"company" : Company})
def all_with_nested(self):
return self.all().add_company(Company().all_with_nested())
class Author(Fields):
def __init__(self):
self._init_values(("id", "name", "person"))
def all_with_nested(self):
return self.all()
class Publication(Fields):
def __init__(self):
self._init_values(("id", "title", "publisher", "date", "url", "summary"),
{"authors" : Author})
def all_with_nested(self):
return self.all().add_author(Author().all_with_nested())
class PatentStatus(Fields):
def __init__(self):
self._init_values(("id", "name"))
def all_with_nested(self):
return self.all()
class Investor(Fields):
def __init__(self):
self._init_values(("id", "name"), {"person" : Profile})
def all_with_nested(self):
# We can't nest person because it will cause an infinite loop
return self.all()
class Patent(Fields):
def __init__(self):
self._init_values(("id", "title", "summary", "number", "office", "date", "url"),
{"status" : PatentStatus,
"investors" : Investor})
def all_with_nested(self):
return self.all().add_status(PatentStatus().all_with_nested()). \
add_investors(Investor().all_with_nested())
class Proficiency(Fields):
def __init__(self):
self._init_values(("level", "name"))
def all_with_nested(self):
return self.all()
class Language(Fields):
def __init__(self):
self._init_values(("id", "language"), {"proficiency" : Proficiency})
def all_with_nested(self):
return self.all().add_proficiency(Proficiency().all_with_nested())
class Year(Fields):
def __init__(self):
self._init_values(("id", "name"))
def all_with_nested(self):
return self.all()
class Skill(Fields):
def __init__(self):
self._init_values(("id", "skill"),
{"proficiency" : Proficiency,
"years" : Year})
def all_with_nested(self):
return self.all().add_proficiency(Proficiency().all_with_nested()). \
add_years(Year().all_with_nested())
class Certification(Fields):
def __init__(self):
self._init_values(("id", "name", "authority", "number", "start-date", "end-date"))
def all_with_nested(self):
return self.all()
class Education(Fields):
def __init__(self):
self._init_values(("id", "school-name", "field-of-study", "start-date", "end-date",
"degree", "activities", "notes"))
def all_with_nested(self):
return self.all()
class Recommendation(Fields):
def __init__(self):
self._init_values(("id", "recommendation-type", "recommender"))
def all_with_nested(self):
return self.all()
class Profile(Fields):
# TODO Dont forget about these params: https://developer.linkedin.com/thread/2286
def __init__(self):
simple_fields = ("id",
"first-name",
"last-name",
"headline",
"distance",
"current-share",
"connections",
"num-connections",
"num-connections-capped",
"summary",
"specialties",
"proposal-comments",
"associations",
"honors",
"interests",
"patents",
"num-recommenders",
"phone-numbers",
"im-accounts",
"twitter-accounts",
"date-of-birth",
"main-address",
"picture-url",
"public-profile-url",
"site-standard-profile-request",
"api-public-profile-request",
"site-public-profile-request",
)
complex_fields = {
"location" : Location,
"relation-to-viewer" : RelationToViewer,
"member-url-resources" : MemberUrl,
"api-standard-profile-request" : HttpRequest,
"positions" : Position,
"three-current-positions" : Position,
"three-past-positions" : Position,
"publications" : Publication,
"languages" : Language,
"skills" : Skill,
"certifications" : Certification,
"educations" : Education,
"recommendations-received" : Recommendation}
self._init_values(simple_fields, complex_fields)
self._id = None
self._url = None
self._public = False
def me(self):
self._id = None
self._url = None
return self
def set_url(self, url):
self._id = None
self._url = url
return self
def set_id(self, _id):
self._url = None
self._id = _id
return self
def public(self):
self._public = True
return self
def private(self):
self._public = False
return self
def all_with_nested(self):
self.all()
self._values["location"] = Location().all_with_nested()
self._values["relation-to-viewer"] = RelationToViewer().all_with_nested()
self._values["member-url-resources"] = MemberUrl().all_with_nested()
self._values["api-standard-profile-request"] = HttpRequest.all_with_nested()
return self
def default(self):
return self.add_first_name().add_last_name().add_headline() \
.add_site_standard_profile_request()
def get_url_for_api(self):
url = ""
if self._id:
url = urlencode({"id" : self._id})
elif self._url:
url = urlencode({"url" : self._url})
else:
url = "~"
if self._public:
url += ":public"
fields = self.get_url()
if fields:
url += ":(" + fields + ")"
return url | {
"repo_name": "openthejob/python-linkedin",
"path": "linkedin/params/__init__.py",
"copies": "7",
"size": "10405",
"license": "mit",
"hash": -8898560944143014000,
"line_mean": 30.4380664653,
"line_max": 91,
"alpha_frac": 0.522729457,
"autogenerated": false,
"ratio": 4.050214091086025,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8072943548086025,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib import urlencode
from everyplay.resource import wrapped_resource
from everyplay.request import make_request
class Client(object):
"""A client for interacting with Everyplay resources."""
use_ssl = True
host = 'everyplay.com'
site = 'api.everyplay.com'
def __init__(self, **kwargs):
"""Create a client instance with the provided options. Options should
be passed in as kwargs.
"""
self.use_ssl = kwargs.get('use_ssl', self.use_ssl)
self.host = kwargs.get('host', self.host)
self.site = kwargs.get('site', self.site)
self.scheme = self.use_ssl and 'https://' or 'http://'
self.options = kwargs
self._authorize_url = None
self.client_id = kwargs.get('client_id')
if 'access_token' in kwargs:
self.access_token = kwargs.get('access_token')
return
if 'client_id' not in kwargs:
raise TypeError("At least a client_id must be provided.")
if 'scope' in kwargs:
self.scope = kwargs.get('scope')
# decide which protocol flow to follow based on the arguments
# provided by the caller.
if self._options_for_authorization_code_flow_present():
self._authorization_code_flow()
elif self._options_for_credentials_flow_present():
self._credentials_flow()
elif self._options_for_token_refresh_present():
self._refresh_token_flow()
def exchange_token(self, code):
"""Given the value of the code parameter, request an access token."""
url = '%s%s/oauth2/access_token' % (self.scheme, self.site)
options = {
'grant_type': 'authorization_code',
'redirect_uri': self.options.get('redirect_uri'),
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'code': code,
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
return self.token
def authorize_url(self):
"""Return the authorization URL for OAuth2 authorization code flow."""
return self._authorize_url
def _authorization_code_flow(self):
"""Build the the auth URL so the user can authorize the app."""
options = {
'scope': getattr(self, 'scope', 'basic'),
'client_id': self.options.get('client_id'),
'response_type': 'code',
'redirect_uri': self.options.get('redirect_uri')
}
url = '%s%s/connect' % (self.scheme, self.host)
self._authorize_url = '%s?%s' % (url, urlencode(options))
def _refresh_token_flow(self):
"""Given a refresh token, obtain a new access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.site)
options = {
'grant_type': 'refresh_token',
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'refresh_token': self.options.get('refresh_token')
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
def _credentials_flow(self):
"""Given a username and password, obtain an access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.site)
options = {
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'username': self.options.get('username'),
'password': self.options.get('password'),
'scope': getattr(self, 'scope', ''),
'grant_type': 'password'
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
def _request(self, method, resource, **kwargs):
"""Given an HTTP method, a resource name and kwargs, construct a
request and return the response.
"""
url = self._resolve_resource_name(resource)
if hasattr(self, 'access_token'):
kwargs.update(dict(access_token=self.access_token))
if hasattr(self, 'client_id'):
kwargs.update(dict(client_id=self.client_id))
kwargs.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
return wrapped_resource(make_request(method, url, kwargs))
def __getattr__(self, name):
"""Translate an HTTP verb into a request method."""
if name not in ['get', 'post', 'put', 'head', 'delete']:
raise AttributeError
return partial(self._request, name)
def _resolve_resource_name(self, name):
"""Convert a resource name (e.g. videos) into a URI."""
if name[:4] == 'http': # already a url
if name[:4] != 'json' and name[-8:] not in ['download', 'stream']:
return '%s.json' % (name,)
return name
name = name.rstrip('/').lstrip('/')
return '%s%s/%s.json' % (self.scheme, self.site, name)
# Helper functions for testing arguments provided to the constructor.
def _options_present(self, options, kwargs):
return all(map(lambda k: k in kwargs, options))
def _options_for_credentials_flow_present(self):
required = ('client_id', 'client_secret', 'username', 'password')
return self._options_present(required, self.options)
def _options_for_authorization_code_flow_present(self):
required = ('client_id', 'redirect_uri')
return self._options_present(required, self.options)
def _options_for_token_refresh_present(self):
required = ('client_id', 'client_secret', 'refresh_token')
return self._options_present(required, self.options)
| {
"repo_name": "Everyplay/everyplay-python",
"path": "everyplay/client.py",
"copies": "1",
"size": "6463",
"license": "bsd-2-clause",
"hash": -1538121086608656400,
"line_mean": 38.4085365854,
"line_max": 78,
"alpha_frac": 0.5851771623,
"autogenerated": false,
"ratio": 3.9895061728395063,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5074683335139506,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib import urlencode
from soundcloud.resource import wrapped_resource
from soundcloud.request import make_request
class Client(object):
"""A client for interacting with Soundcloud resources."""
use_ssl = True
host = 'api.soundcloud.com'
def __init__(self, **kwargs):
"""Create a client instance with the provided options. Options should
be passed in as kwargs.
"""
self.use_ssl = kwargs.get('use_ssl', self.use_ssl)
self.host = kwargs.get('host', self.host)
self.scheme = self.use_ssl and 'https://' or 'http://'
self.options = kwargs
self._authorize_url = None
self.client_id = kwargs.get('client_id')
if 'access_token' in kwargs:
self.access_token = kwargs.get('access_token')
return
if 'client_id' not in kwargs:
raise TypeError("At least a client_id must be provided.")
if 'scope' in kwargs:
self.scope = kwargs.get('scope')
# decide which protocol flow to follow based on the arguments
# provided by the caller.
if self._options_for_authorization_code_flow_present():
self._authorization_code_flow()
elif self._options_for_credentials_flow_present():
self._credentials_flow()
elif self._options_for_token_refresh_present():
self._refresh_token_flow()
def exchange_token(self, code):
"""Given the value of the code parameter, request an access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'grant_type': 'authorization_code',
'redirect_uri': self._redirect_uri(),
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'code': code,
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
return self.token
def authorize_url(self):
"""Return the authorization URL for OAuth2 authorization code flow."""
return self._authorize_url
def _authorization_code_flow(self):
"""Build the the auth URL so the user can authorize the app."""
options = {
'scope': getattr(self, 'scope', 'non-expiring'),
'client_id': self.options.get('client_id'),
'response_type': 'code',
'redirect_uri': self._redirect_uri()
}
url = '%s%s/connect' % (self.scheme, self.host)
self._authorize_url = '%s?%s' % (url, urlencode(options))
def _refresh_token_flow(self):
"""Given a refresh token, obtain a new access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'grant_type': 'refresh_token',
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'refresh_token': self.options.get('refresh_token')
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
def _credentials_flow(self):
"""Given a username and password, obtain an access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'username': self.options.get('username'),
'password': self.options.get('password'),
'scope': getattr(self, 'scope', ''),
'grant_type': 'password'
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
def _request(self, method, resource, **kwargs):
"""Given an HTTP method, a resource name and kwargs, construct a
request and return the response.
"""
url = self._resolve_resource_name(resource)
if hasattr(self, 'access_token'):
kwargs.update(dict(oauth_token=self.access_token))
if hasattr(self, 'client_id'):
kwargs.update(dict(client_id=self.client_id))
kwargs.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
return wrapped_resource(make_request(method, url, kwargs))
def __getattr__(self, name, **kwargs):
"""Translate an HTTP verb into a request method."""
if name not in ('get', 'post', 'put', 'head', 'delete'):
raise AttributeError
return partial(self._request, name, **kwargs)
def _resolve_resource_name(self, name):
"""Convert a resource name (e.g. tracks) into a URI."""
if name[:4] == 'http': # already a url
if name[:4] != 'json' and name[-8:] not in ('download', 'stream'):
return '%s.json' % (name,)
return name
name = name.rstrip('/').lstrip('/')
if name[-13:] == 'contributions':
return '%s%s/%s' % (self.scheme, self.host, name)
return '%s%s/%s.json' % (self.scheme, self.host, name)
def _redirect_uri(self):
"""
Return the redirect uri. Checks for ``redirect_uri`` or common typo,
``redirect_url``
"""
return self.options.get(
'redirect_uri',
self.options.get('redirect_url', None))
# Helper functions for testing arguments provided to the constructor.
def _options_present(self, options, kwargs):
return all(map(lambda k: k in kwargs, options))
def _options_for_credentials_flow_present(self):
required = ('client_id', 'client_secret', 'username', 'password')
return self._options_present(required, self.options)
def _options_for_authorization_code_flow_present(self):
required = ('client_id', 'redirect_uri')
or_required = ('client_id', 'redirect_url')
return (self._options_present(required, self.options) or
self._options_present(or_required, self.options))
def _options_for_token_refresh_present(self):
required = ('client_id', 'client_secret', 'refresh_token')
return self._options_present(required, self.options)
| {
"repo_name": "phijor/soundcloud-python",
"path": "soundcloud/client.py",
"copies": "8",
"size": "6879",
"license": "bsd-2-clause",
"hash": -4775925116435053000,
"line_mean": 38.5344827586,
"line_max": 78,
"alpha_frac": 0.5807530164,
"autogenerated": false,
"ratio": 4.006406523005242,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8587159539405242,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib import urlencode
from twisted.internet.defer import inlineCallbacks, returnValue
from txsoundcloud.resource import wrapped_resource
from txsoundcloud.request import make_request
class Client(object):
"""A client for interacting with Soundcloud resources."""
use_ssl = True
host = 'api.soundcloud.com'
def __init__(self, **kwargs):
"""Create a client instance with the provided options. Options should
be passed in as kwargs.
"""
self.use_ssl = kwargs.get('use_ssl', self.use_ssl)
self.host = kwargs.get('host', self.host)
self.scheme = self.use_ssl and 'https://' or 'http://'
self.options = kwargs
self._authorize_url = None
self.client_id = kwargs.get('client_id')
if 'access_token' in kwargs:
self.access_token = kwargs.get('access_token')
return
if 'client_id' not in kwargs:
raise TypeError("At least a client_id must be provided.")
if 'scope' in kwargs:
self.scope = kwargs.get('scope')
# decide which protocol flow to follow based on the arguments
# provided by the caller.
if self._options_for_authorization_code_flow_present():
self._authorization_code_flow()
elif self._options_for_credentials_flow_present():
self._credentials_flow()
elif self._options_for_token_refresh_present():
self._refresh_token_flow()
def exchange_token(self, code):
"""Given the value of the code parameter, request an access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'grant_type': 'authorization_code',
'redirect_uri': self._redirect_uri(),
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'code': code,
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
return self.token
def authorize_url(self):
"""Return the authorization URL for OAuth2 authorization code flow."""
return self._authorize_url
def _authorization_code_flow(self):
"""Build the the auth URL so the user can authorize the app."""
options = {
'scope': getattr(self, 'scope', 'non-expiring'),
'client_id': self.options.get('client_id'),
'response_type': 'code',
'redirect_uri': self._redirect_uri()
}
url = '%s%s/connect' % (self.scheme, self.host)
self._authorize_url = '%s?%s' % (url, urlencode(options))
def _refresh_token_flow(self):
"""Given a refresh token, obtain a new access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'grant_type': 'refresh_token',
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'refresh_token': self.options.get('refresh_token')
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
def _credentials_flow(self):
"""Given a username and password, obtain an access token."""
url = '%s%s/oauth2/token' % (self.scheme, self.host)
options = {
'client_id': self.options.get('client_id'),
'client_secret': self.options.get('client_secret'),
'username': self.options.get('username'),
'password': self.options.get('password'),
'scope': getattr(self, 'scope', ''),
'grant_type': 'password'
}
options.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
self.token = wrapped_resource(
make_request('post', url, options))
self.access_token = self.token.access_token
@inlineCallbacks
def _request(self, method, resource, **kwargs):
"""Given an HTTP method, a resource name and kwargs, construct a
request and return the response.
"""
url = self._resolve_resource_name(resource)
if hasattr(self, 'access_token'):
kwargs.update(dict(oauth_token=self.access_token))
if hasattr(self, 'client_id'):
kwargs.update(dict(client_id=self.client_id))
kwargs.update({
'verify_ssl': self.options.get('verify_ssl', True),
'proxies': self.options.get('proxies', None)
})
response = yield make_request(method, url, kwargs)
returnValue(wrapped_resource(response))
def __getattr__(self, name, **kwargs):
"""Translate an HTTP verb into a request method."""
if name not in ('get', 'post', 'put', 'head', 'delete'):
raise AttributeError
return partial(self._request, name, **kwargs)
def _resolve_resource_name(self, name):
"""Convert a resource name (e.g. tracks) into a URI."""
if name[:4] == 'http': # already a url
if name[:4] != 'json' and name[-8:] not in ('download', 'stream'):
return '%s.json' % (name,)
return name
name = name.rstrip('/').lstrip('/')
if name[-13:] == 'contributions':
return '%s%s/%s' % (self.scheme, self.host, name)
return '%s%s/%s.json' % (self.scheme, self.host, name)
def _redirect_uri(self):
"""
Return the redirect uri. Checks for ``redirect_uri`` or common typo,
``redirect_url``
"""
return self.options.get(
'redirect_uri',
self.options.get('redirect_url', None))
# Helper functions for testing arguments provided to the constructor.
def _options_present(self, options, kwargs):
return all(map(lambda k: k in kwargs, options))
def _options_for_credentials_flow_present(self):
required = ('client_id', 'client_secret', 'username', 'password')
return self._options_present(required, self.options)
def _options_for_authorization_code_flow_present(self):
required = ('client_id', 'redirect_uri')
or_required = ('client_id', 'redirect_url')
return (self._options_present(required, self.options) or
self._options_present(or_required, self.options))
def _options_for_token_refresh_present(self):
required = ('client_id', 'client_secret', 'refresh_token')
return self._options_present(required, self.options)
| {
"repo_name": "dpnova/txsoundcloud",
"path": "txsoundcloud/client.py",
"copies": "1",
"size": "7009",
"license": "bsd-2-clause",
"hash": 7085080497406726000,
"line_mean": 38.3764044944,
"line_max": 78,
"alpha_frac": 0.5843914966,
"autogenerated": false,
"ratio": 4.018922018348624,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 178
} |
from functools import partial
from urllib.parse import quote, urlencode
from geopy.adapters import AdapterHTTPError
from geopy.exc import GeocoderQuotaExceeded
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("TomTom", )
class TomTom(Geocoder):
"""TomTom geocoder.
Documentation at:
https://developer.tomtom.com/search-api/search-api-documentation
"""
geocode_path = '/search/2/geocode/%(query)s.json'
reverse_path = '/search/2/reverseGeocode/%(position)s.json'
def __init__(
self,
api_key,
*,
scheme=None,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None,
domain='api.tomtom.com'
):
"""
:param str api_key: TomTom API key.
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
:param str domain: Domain where the target TomTom service
is hosted.
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.api_key = api_key
self.api = "%s://%s%s" % (self.scheme, domain, self.geocode_path)
self.api_reverse = "%s://%s%s" % (self.scheme, domain, self.reverse_path)
def geocode(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
limit=None,
typeahead=False,
language=None
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param int limit: Maximum amount of results to return from the service.
Unless exactly_one is set to False, limit will always be 1.
:param bool typeahead: If the "typeahead" flag is set, the query
will be interpreted as a partial input and the search will
enter predictive mode.
:param str language: Language in which search results should be
returned. When data in specified language is not
available for a specific field, default language is used.
List of supported languages (case-insensitive):
https://developer.tomtom.com/online-search/online-search-documentation/supported-languages
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = self._geocode_params(query)
params['typeahead'] = self._boolean_value(typeahead)
if limit:
params['limit'] = str(int(limit))
if exactly_one:
params['limit'] = '1'
if language:
params['language'] = language
quoted_query = quote(query.encode('utf-8'))
url = "?".join((self.api % dict(query=quoted_query),
urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
language=None
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param str language: Language in which search results should be
returned. When data in specified language is not
available for a specific field, default language is used.
List of supported languages (case-insensitive):
https://developer.tomtom.com/online-search/online-search-documentation/supported-languages
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
position = self._coerce_point_to_string(query)
params = self._reverse_params(position)
if language:
params['language'] = language
quoted_position = quote(position.encode('utf-8'))
url = "?".join((self.api_reverse % dict(position=quoted_position),
urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_reverse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _boolean_value(self, bool_value):
return 'true' if bool_value else 'false'
def _geocode_params(self, formatted_query):
return {
'key': self.api_key,
}
def _reverse_params(self, position):
return {
'key': self.api_key,
}
def _parse_json(self, resources, exactly_one):
if not resources or not resources['results']:
return None
if exactly_one:
return self._parse_search_result(resources['results'][0])
else:
return [self._parse_search_result(result)
for result in resources['results']]
def _parse_search_result(self, result):
latitude = result['position']['lat']
longitude = result['position']['lon']
return Location(result['address']['freeformAddress'],
(latitude, longitude), result)
def _parse_reverse_json(self, resources, exactly_one):
if not resources or not resources['addresses']:
return None
if exactly_one:
return self._parse_reverse_result(resources['addresses'][0])
else:
return [self._parse_reverse_result(result)
for result in resources['addresses']]
def _parse_reverse_result(self, result):
latitude, longitude = result['position'].split(',')
return Location(result['address']['freeformAddress'],
(latitude, longitude), result)
def _geocoder_exception_handler(self, error):
if not isinstance(error, AdapterHTTPError):
return
if error.status_code is None or error.text is None:
return
if error.status_code >= 400 and "Developer Over Qps" in error.text:
raise GeocoderQuotaExceeded("Developer Over Qps") from error
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/tomtom.py",
"copies": "1",
"size": "8205",
"license": "mit",
"hash": -8172562757850457000,
"line_mean": 35.1453744493,
"line_max": 102,
"alpha_frac": 0.596221816,
"autogenerated": false,
"ratio": 4.309348739495798,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5405570555495798,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import quote, urlencode
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.point import Point
from geopy.util import logger
__all__ = ("MapBox", )
class MapBox(Geocoder):
"""Geocoder using the Mapbox API.
Documentation at:
https://www.mapbox.com/api-documentation/
"""
api_path = '/geocoding/v5/mapbox.places/%(query)s.json/'
def __init__(
self,
api_key,
*,
scheme=None,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None,
domain='api.mapbox.com'
):
"""
:param str api_key: The API key required by Mapbox to perform
geocoding requests. API keys are managed through Mapox's account
page (https://www.mapbox.com/account/access-tokens).
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
:param str domain: base api domain for mapbox
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.api_key = api_key
self.domain = domain.strip('/')
self.api = "%s://%s%s" % (self.scheme, self.domain, self.api_path)
def _parse_json(self, json, exactly_one=True):
'''Returns location, (latitude, longitude) from json feed.'''
features = json['features']
if features == []:
return None
def parse_feature(feature):
location = feature['place_name']
longitude = feature['geometry']['coordinates'][0]
latitude = feature['geometry']['coordinates'][1]
return Location(location, (latitude, longitude), feature)
if exactly_one:
return parse_feature(features[0])
else:
return [parse_feature(feature) for feature in features]
def geocode(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
proximity=None,
country=None,
bbox=None
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param proximity: A coordinate to bias local results based on a provided
location.
:type proximity: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param country: Country to filter result in form of
ISO 3166-1 alpha-2 country code (e.g. ``FR``).
Might be a Python list of strings.
:type country: str or list
:param bbox: The bounding box of the viewport within which
to bias geocode results more prominently.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:type bbox: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {}
params['access_token'] = self.api_key
if bbox:
params['bbox'] = self._format_bounding_box(
bbox, "%(lon1)s,%(lat1)s,%(lon2)s,%(lat2)s")
if not country:
country = []
if isinstance(country, str):
country = [country]
if country:
params['country'] = ",".join(country)
if proximity:
p = Point(proximity)
params['proximity'] = "%s,%s" % (p.longitude, p.latitude)
quoted_query = quote(query.encode('utf-8'))
url = "?".join((self.api % dict(query=quoted_query),
urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {}
params['access_token'] = self.api_key
point = self._coerce_point_to_string(query, "%(lon)s,%(lat)s")
quoted_query = quote(point.encode('utf-8'))
url = "?".join((self.api % dict(query=quoted_query),
urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/mapbox.py",
"copies": "1",
"size": "6835",
"license": "mit",
"hash": 4587883792547882000,
"line_mean": 34.5989583333,
"line_max": 82,
"alpha_frac": 0.573518654,
"autogenerated": false,
"ratio": 4.117469879518072,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00025565223858396545,
"num_lines": 192
} |
from functools import partial
from urllib.parse import urlencode
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.test import TestCase
from django.http import HttpResponse
from generic_admin import views
from tests.models import MockEntityWithRelations, RelatedEntity, AnotherRelatedEntity
def related_entity_name_strategy(**kwargs):
return kwargs
class GenericTableEditor:
model = MockEntityWithRelations
relation_field_names = ['related_entity', 'another_related_entity']
excluded_model_fields = ['id', 'is_active']
excluded_related_model_fields = {
'another_related_entity': ['is_active'],
}
included_related_model_fields = {
'related_entity': ['name'],
}
field_controller = views.TableEditorFieldsControlMixin(
model,
relation_field_names=relation_field_names,
excluded_model_fields=excluded_model_fields,
included_related_model_fields=included_related_model_fields,
excluded_related_model_fields=excluded_related_model_fields,
)
class TableEditorAPI(GenericTableEditor, views.TableEditorAPI):
pattern_to_update_related_model = {
'related_entity': {
'name': related_entity_name_strategy
}
}
class TestTableEditorFieldsControl(TestCase):
@classmethod
def setUpClass(cls):
super(TestTableEditorFieldsControl, cls).setUpClass()
cls.model = TableEditorAPI.model
cls.field_controller = TableEditorAPI.field_controller
cls.relation_field_names = TableEditorAPI.relation_field_names
cls.excluded_model_fields = TableEditorAPI.excluded_model_fields
cls.included_related_model_fields = TableEditorAPI.included_related_model_fields
cls.excluded_related_model_fields = TableEditorAPI.excluded_related_model_fields
def test_get_all_field(self):
field_count = len(list(self.field_controller._get_all_fields(self.model)))
self.assertEqual(field_count, 5)
def test_get_field(self):
"""Get only one field from model."""
field_name = self.field_controller._get_field(self.model, 'name')
self.assertTrue(isinstance(field_name, models.Field))
self.assertEqual(field_name.model, self.model)
def test_get_not_excluded_field(self):
fields = list(self.field_controller._get_not_excluded_fields(
self.model, self.excluded_model_fields))
self.assertEqual(len(fields), 3)
self.assertTrue(all(
field.name not in self.excluded_model_fields
for field in fields
))
def test_get_related_model_fields(self):
related_entity, another_related_entity = list(
self.field_controller.get_related_model_fields()
)
self.assertEqual(len(related_entity), 2)
self.assertEqual(len(list(related_entity[1])), 1)
self.assertEqual(len(another_related_entity), 2)
self.assertEqual(len(list(another_related_entity[1])), 3)
self.assertEqual(related_entity[0].name, self.relation_field_names[0])
self.assertEqual(another_related_entity[0].name, self.relation_field_names[1])
self.assertTrue(all(
field.name not in self.excluded_related_model_fields
for field in related_entity[1]
))
self.assertTrue(all(
field.name not in self.included_related_model_fields
for field in another_related_entity[1]
))
def test_get_model_fields(self):
fields = list(self.field_controller.get_model_fields())
self.assertEqual(len(fields), 3)
self.assertTrue(all(
field.name not in self.excluded_model_fields
for field in fields
))
def test_value_to_python(self):
get_val = partial(self.field_controller.value_to_python, self.model, 'is_active')
first_falsy = get_val('0')
second_falsy = get_val('False')
first_truthy = get_val('1')
second_truthy = get_val('True')
self.assertIs(first_falsy, second_falsy, False)
self.assertIs(first_truthy, second_truthy, True)
class TestTableEditorApi(TestCase):
@classmethod
def setUpClass(cls):
super(TestTableEditorApi, cls).setUpClass()
cls.username = 'admin'
cls.email = 'admin@admin.com'
cls.password = 'asdfjkl'
cls.entities_count = 10
for i in range(cls.entities_count):
name = 'test entity #{}'.format(i)
is_active = bool(i % 2)
MockEntityWithRelations.objects.create(
name=name,
is_active=is_active,
related_entity=RelatedEntity.objects.create(
name='related {}'.format(name),
is_active=is_active
),
another_related_entity=AnotherRelatedEntity.objects.create(
name='another related {}'.format(name),
is_active=is_active,
)
)
cls.urlconf_name = 'test_table_editor_api'
@classmethod
def tearDownClass(cls):
super(TestTableEditorApi, cls).tearDownClass()
MockEntityWithRelations.objects.all().delete()
RelatedEntity.objects.all().delete()
AnotherRelatedEntity.objects.all().delete()
def setUp(self):
self.user = User.objects.create_superuser(self.username, self.email, self.password)
self.client.login(username=self.username, password=self.password)
def tearDown(self):
self.user.delete()
def test_get(self):
response = self.client.get(reverse(self.urlconf_name))
self.assertEqual(self.entities_count, len(response.json()))
self.assertTrue(isinstance(response, HttpResponse))
def test_put(self):
entity = MockEntityWithRelations.objects.all().order_by('id').first()
new_name = 'Newly come up name'
new_is_active = not entity.another_related_entity.is_active
response = self.client.put(
reverse(self.urlconf_name),
data=urlencode({
'id': entity.id,
'name': new_name,
'another_related_entity_is_active': new_is_active
})
)
self.assertEqual(response.status_code, 200)
updated_entity = MockEntityWithRelations.objects.get(id=entity.id)
self.assertNotEqual(entity.name, updated_entity.name)
self.assertNotEqual(
entity.another_related_entity.is_active,
updated_entity.another_related_entity.is_active
)
self.assertEqual(new_name, updated_entity.name)
self.assertEqual(
new_is_active,
updated_entity.another_related_entity.is_active
)
def test_delete(self):
entity_id = MockEntityWithRelations.objects.all().order_by('id').first().id
response = self.client.delete(
reverse(self.urlconf_name),
data=urlencode({
'id': entity_id,
})
)
self.assertEqual(response.status_code, 200)
self.assertFalse(MockEntityWithRelations.objects.filter(id=entity_id))
| {
"repo_name": "fidals/refarm-site",
"path": "tests/generic_admin/test_views.py",
"copies": "1",
"size": "7268",
"license": "mit",
"hash": 8569865243389379000,
"line_mean": 33.1220657277,
"line_max": 91,
"alpha_frac": 0.6382773803,
"autogenerated": false,
"ratio": 4.011037527593819,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5149314907893819,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urlencode
from geopy.adapters import AdapterHTTPError
from geopy.exc import GeocoderQuotaExceeded
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("LiveAddress", )
class LiveAddress(Geocoder):
"""Geocoder using the LiveAddress API provided by SmartyStreets.
Documentation at:
https://smartystreets.com/docs/cloud/us-street-api
"""
geocode_path = '/street-address'
def __init__(
self,
auth_id,
auth_token,
*,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None
):
"""
:param str auth_id: Valid `Auth ID` from SmartyStreets.
:param str auth_token: Valid `Auth Token` from SmartyStreets.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
"""
super().__init__(
scheme='https',
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.auth_id = auth_id
self.auth_token = auth_token
domain = 'api.smartystreets.com'
self.api = '%s://%s%s' % (self.scheme, domain, self.geocode_path)
def geocode(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
candidates=1
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param int candidates: An integer between 1 and 10 indicating the max
number of candidate addresses to return if a valid address
could be found.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if not (1 <= candidates <= 10):
raise ValueError('candidates must be between 1 and 10')
query = {
'auth-id': self.auth_id,
'auth-token': self.auth_token,
'street': query,
'candidates': candidates,
}
url = '{url}?{query}'.format(url=self.api, query=urlencode(query))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _geocoder_exception_handler(self, error):
search = "no active subscriptions found"
if isinstance(error, AdapterHTTPError):
if search in str(error).lower():
raise GeocoderQuotaExceeded(str(error)) from error
if search in (error.text or "").lower():
raise GeocoderQuotaExceeded(error.text) from error
def _parse_json(self, response, exactly_one=True):
"""
Parse responses as JSON objects.
"""
if not len(response):
return None
if exactly_one:
return self._format_structured_address(response[0])
else:
return [self._format_structured_address(c) for c in response]
def _format_structured_address(self, address):
"""
Pretty-print address and return lat, lon tuple.
"""
latitude = address['metadata'].get('latitude')
longitude = address['metadata'].get('longitude')
return Location(
", ".join((address['delivery_line_1'], address['last_line'])),
(latitude, longitude) if latitude and longitude else None,
address
)
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/smartystreets.py",
"copies": "1",
"size": "4739",
"license": "mit",
"hash": -674331825970376300,
"line_mean": 31.9097222222,
"line_max": 80,
"alpha_frac": 0.5889428149,
"autogenerated": false,
"ratio": 4.223707664884135,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5312650479784136,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urlencode
from geopy.exc import GeocoderParseError, GeocoderServiceError
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("Yandex", )
class Yandex(Geocoder):
"""Yandex geocoder.
Documentation at:
https://tech.yandex.com/maps/doc/geocoder/desc/concepts/input_params-docpage/
"""
api_path = '/1.x/'
def __init__(
self,
api_key,
*,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
scheme=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None
):
"""
:param str api_key: Yandex API key, mandatory.
The key can be created at https://developer.tech.yandex.ru/
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.api_key = api_key
domain = 'geocode-maps.yandex.ru'
self.api = '%s://%s%s' % (self.scheme, domain, self.api_path)
def geocode(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
lang=None
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param str lang: Language of the response and regional settings
of the map. List of supported values:
- ``tr_TR`` -- Turkish (only for maps of Turkey);
- ``en_RU`` -- response in English, Russian map features;
- ``en_US`` -- response in English, American map features;
- ``ru_RU`` -- Russian (default);
- ``uk_UA`` -- Ukrainian;
- ``be_BY`` -- Belarusian.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {
'geocode': query,
'format': 'json'
}
params['apikey'] = self.api_key
if lang:
params['lang'] = lang
if exactly_one:
params['results'] = 1
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
kind=None,
lang=None
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param str kind: Type of toponym. Allowed values: `house`, `street`, `metro`,
`district`, `locality`.
:param str lang: Language of the response and regional settings
of the map. List of supported values:
- ``tr_TR`` -- Turkish (only for maps of Turkey);
- ``en_RU`` -- response in English, Russian map features;
- ``en_US`` -- response in English, American map features;
- ``ru_RU`` -- Russian (default);
- ``uk_UA`` -- Ukrainian;
- ``be_BY`` -- Belarusian.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
try:
point = self._coerce_point_to_string(query, "%(lon)s,%(lat)s")
except ValueError:
raise ValueError("Must be a coordinate pair or Point")
params = {
'geocode': point,
'format': 'json'
}
params['apikey'] = self.api_key
if lang:
params['lang'] = lang
if kind:
params['kind'] = kind
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _parse_json(self, doc, exactly_one):
"""
Parse JSON response body.
"""
if doc.get('error'):
raise GeocoderServiceError(doc['error']['message'])
try:
places = doc['response']['GeoObjectCollection']['featureMember']
except KeyError:
raise GeocoderParseError('Failed to parse server response')
def parse_code(place):
"""
Parse each record.
"""
try:
place = place['GeoObject']
except KeyError:
raise GeocoderParseError('Failed to parse server response')
longitude, latitude = [
float(_) for _ in place['Point']['pos'].split(' ')
]
name_elements = ['name', 'description']
location = ', '.join([place[k] for k in name_elements if place.get(k)])
return Location(location, (latitude, longitude), place)
if exactly_one:
try:
return parse_code(places[0])
except IndexError:
return None
else:
return [parse_code(place) for place in places]
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/yandex.py",
"copies": "1",
"size": "7221",
"license": "mit",
"hash": -6238954652781168000,
"line_mean": 32.7429906542,
"line_max": 85,
"alpha_frac": 0.551170198,
"autogenerated": false,
"ratio": 4.15,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000225345920122418,
"num_lines": 214
} |
from functools import partial
from urllib.parse import urlencode
from geopy.exc import GeocoderQueryError
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("DataBC", )
class DataBC(Geocoder):
"""Geocoder using the Physical Address Geocoder from DataBC.
Documentation at:
http://www.data.gov.bc.ca/dbc/geographic/locate/geocoding.page
"""
geocode_path = '/pub/geocoder/addresses.geojson'
def __init__(
self,
*,
scheme=None,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None
):
"""
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
domain = 'apps.gov.bc.ca'
self.api = '%s://%s%s' % (self.scheme, domain, self.geocode_path)
def geocode(
self,
query,
*,
max_results=25,
set_back=0,
location_descriptor='any',
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param int max_results: The maximum number of resutls to request.
:param float set_back: The distance to move the accessPoint away
from the curb (in meters) and towards the interior of the parcel.
location_descriptor must be set to accessPoint for set_back to
take effect.
:param str location_descriptor: The type of point requested. It
can be any, accessPoint, frontDoorPoint, parcelPoint,
rooftopPoint and routingPoint.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {'addressString': query}
if set_back != 0:
params['setBack'] = set_back
if location_descriptor not in ['any',
'accessPoint',
'frontDoorPoint',
'parcelPoint',
'rooftopPoint',
'routingPoint']:
raise GeocoderQueryError(
"You did not provided a location_descriptor "
"the webservice can consume. It should be any, accessPoint, "
"frontDoorPoint, parcelPoint, rooftopPoint or routingPoint."
)
params['locationDescriptor'] = location_descriptor
if exactly_one:
max_results = 1
params['maxResults'] = max_results
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _parse_json(self, response, exactly_one):
# Success; convert from GeoJSON
if not len(response['features']):
return None
geocoded = []
for feature in response['features']:
geocoded.append(self._parse_feature(feature))
if exactly_one:
return geocoded[0]
return geocoded
def _parse_feature(self, feature):
properties = feature['properties']
coordinates = feature['geometry']['coordinates']
return Location(
properties['fullAddress'], (coordinates[1], coordinates[0]),
properties
)
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/databc.py",
"copies": "1",
"size": "4965",
"license": "mit",
"hash": 1615163369946739500,
"line_mean": 33.7202797203,
"line_max": 80,
"alpha_frac": 0.5724068479,
"autogenerated": false,
"ratio": 4.370598591549296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5443005439449295,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urlencode
from geopy.exc import GeocoderServiceError
from geopy.geocoders.base import DEFAULT_SENTINEL, ERROR_CODE_MAP, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("OpenCage", )
class OpenCage(Geocoder):
"""Geocoder using the OpenCageData API.
Documentation at:
https://opencagedata.com/api
.. versionchanged:: 2.2
Improved error handling by using the default errors map
(e.g. to raise :class:`.exc.GeocoderQuotaExceeded` instead of
:class:`.exc.GeocoderQueryError` for HTTP 402 error)
"""
api_path = '/geocode/v1/json'
def __init__(
self,
api_key,
*,
domain='api.opencagedata.com',
scheme=None,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None
):
"""
:param str api_key: The API key required by OpenCageData
to perform geocoding requests. You can get your key here:
https://opencagedata.com/
:param str domain: Currently it is ``'api.opencagedata.com'``, can
be changed for testing purposes.
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.api_key = api_key
self.domain = domain.strip('/')
self.api = '%s://%s%s' % (self.scheme, self.domain, self.api_path)
def geocode(
self,
query,
*,
bounds=None,
country=None,
language=None,
annotations=True,
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:type bounds: list or tuple of 2 items of :class:`geopy.point.Point` or
``(latitude, longitude)`` or ``"%(latitude)s, %(longitude)s"``.
:param bounds: Provides the geocoder with a hint to the region
that the query resides in. This value will help the geocoder
but will not restrict the possible results to the supplied
region. The bounds parameter should be specified as 2
coordinate points -- corners of a bounding box.
Example: ``[Point(22, 180), Point(-22, -180)]``.
:param country: Restricts the results to the specified
country or countries. The country code is a 2 character code as
defined by the ISO 3166-1 Alpha 2 standard (e.g. ``fr``).
Might be a Python list of strings.
:type country: str or list
:param str language: an IETF format language code (such as `es`
for Spanish or pt-BR for Brazilian Portuguese); if this is
omitted a code of `en` (English) will be assumed by the remote
service.
:param bool annotations: Enable
`annotations <https://opencagedata.com/api#annotations>`_
data, which can be accessed via :attr:`.Location.raw`.
Set to False if you don't need it to gain a little performance
win.
.. versionadded:: 2.2
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {
'key': self.api_key,
'q': query,
}
if not annotations:
params['no_annotations'] = 1
if bounds:
params['bounds'] = self._format_bounding_box(
bounds, "%(lon1)s,%(lat1)s,%(lon2)s,%(lat2)s")
if language:
params['language'] = language
if not country:
country = []
if isinstance(country, str):
country = [country]
if country:
params['countrycode'] = ",".join(country)
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
language=None,
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param str language: The language in which to return results.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {
'key': self.api_key,
'q': self._coerce_point_to_string(query),
}
if language:
params['language'] = language
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _parse_json(self, page, exactly_one=True):
'''Returns location, (latitude, longitude) from json feed.'''
places = page.get('results', [])
if not len(places):
self._check_status(page.get('status'))
return None
def parse_place(place):
'''Get the location, lat, lng from a single json place.'''
location = place.get('formatted')
latitude = place['geometry']['lat']
longitude = place['geometry']['lng']
return Location(location, (latitude, longitude), place)
if exactly_one:
return parse_place(places[0])
else:
return [parse_place(place) for place in places]
def _check_status(self, status):
status_code = status['code']
message = status['message']
if status_code == 200:
return
# https://opencagedata.com/api#codes
exc_cls = ERROR_CODE_MAP.get(status_code, GeocoderServiceError)
raise exc_cls(message)
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/opencage.py",
"copies": "1",
"size": "8081",
"license": "mit",
"hash": 6150901608332537000,
"line_mean": 33.8318965517,
"line_max": 80,
"alpha_frac": 0.5776512808,
"autogenerated": false,
"ratio": 4.2220480668756535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5299699347675653,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urlencode
from geopy.exc import (
GeocoderAuthenticationFailure,
GeocoderInsufficientPrivileges,
GeocoderQueryError,
GeocoderQuotaExceeded,
GeocoderServiceError,
)
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.timezone import (
ensure_pytz_is_installed,
from_fixed_gmt_offset,
from_timezone_name,
)
from geopy.util import logger
__all__ = ("GeoNames", )
class GeoNames(Geocoder):
"""GeoNames geocoder.
Documentation at:
http://www.geonames.org/export/geonames-search.html
Reverse geocoding documentation at:
http://www.geonames.org/export/web-services.html#findNearbyPlaceName
"""
geocode_path = '/searchJSON'
reverse_path = '/findNearbyPlaceNameJSON'
reverse_nearby_path = '/findNearbyJSON'
timezone_path = '/timezoneJSON'
def __init__(
self,
username,
*,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None,
scheme='http'
):
"""
:param str username: GeoNames username, required. Sign up here:
http://www.geonames.org/login
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`. Note that
at the time of writing GeoNames doesn't support `https`, so
the default scheme is `http`. The value of
:attr:`geopy.geocoders.options.default_scheme` is not respected.
This parameter is present to make it possible to switch to
`https` once GeoNames adds support for it.
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.username = username
domain = 'api.geonames.org'
self.api = (
"%s://%s%s" % (self.scheme, domain, self.geocode_path)
)
self.api_reverse = (
"%s://%s%s" % (self.scheme, domain, self.reverse_path)
)
self.api_reverse_nearby = (
"%s://%s%s" % (self.scheme, domain, self.reverse_nearby_path)
)
self.api_timezone = (
"%s://%s%s" % (self.scheme, domain, self.timezone_path)
)
def geocode(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
country=None,
country_bias=None
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param country: Limit records to the specified countries.
Two letter country code ISO-3166 (e.g. ``FR``). Might be
a single string or a list of strings.
:type country: str or list
:param str country_bias: Records from the country_bias are listed first.
Two letter country code ISO-3166.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = [
('q', query),
('username', self.username),
]
if country_bias:
params.append(('countryBias', country_bias))
if not country:
country = []
if isinstance(country, str):
country = [country]
for country_item in country:
params.append(('country', country_item))
if exactly_one:
params.append(('maxRows', 1))
url = "?".join((self.api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
feature_code=None,
lang=None,
find_nearby_type='findNearbyPlaceName'
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:param str feature_code: A GeoNames feature code
:param str lang: language of the returned ``name`` element (the pseudo
language code 'local' will return it in local language)
Full list of supported languages can be found here:
https://www.geonames.org/countries/
:param str find_nearby_type: A flag to switch between different
GeoNames API endpoints. The default value is ``findNearbyPlaceName``
which returns the closest populated place. Another currently
implemented option is ``findNearby`` which returns
the closest toponym for the lat/lng query.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
try:
lat, lng = self._coerce_point_to_string(query).split(',')
except ValueError:
raise ValueError("Must be a coordinate pair or Point")
if find_nearby_type == 'findNearbyPlaceName': # default
if feature_code:
raise ValueError(
"find_nearby_type=findNearbyPlaceName doesn't support "
"the `feature_code` param"
)
params = self._reverse_find_nearby_place_name_params(
lat=lat,
lng=lng,
lang=lang,
)
url = "?".join((self.api_reverse, urlencode(params)))
elif find_nearby_type == 'findNearby':
if lang:
raise ValueError(
"find_nearby_type=findNearby doesn't support the `lang` param"
)
params = self._reverse_find_nearby_params(
lat=lat,
lng=lng,
feature_code=feature_code,
)
url = "?".join((self.api_reverse_nearby, urlencode(params)))
else:
raise GeocoderQueryError(
'`%s` find_nearby_type is not supported by geopy' % find_nearby_type
)
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _reverse_find_nearby_params(self, lat, lng, feature_code):
params = {
'lat': lat,
'lng': lng,
'username': self.username,
}
if feature_code:
params['featureCode'] = feature_code
return params
def _reverse_find_nearby_place_name_params(self, lat, lng, lang):
params = {
'lat': lat,
'lng': lng,
'username': self.username,
}
if lang:
params['lang'] = lang
return params
def reverse_timezone(self, query, *, timeout=DEFAULT_SENTINEL):
"""
Find the timezone for a point in `query`.
GeoNames always returns a timezone: if the point being queried
doesn't have an assigned Olson timezone id, a ``pytz.FixedOffset``
timezone is used to produce the :class:`geopy.timezone.Timezone`.
:param query: The coordinates for which you want a timezone.
:type query: :class:`geopy.point.Point`, list or tuple of (latitude,
longitude), or string as "%(latitude)s, %(longitude)s"
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.timezone.Timezone`.
"""
ensure_pytz_is_installed()
try:
lat, lng = self._coerce_point_to_string(query).split(',')
except ValueError:
raise ValueError("Must be a coordinate pair or Point")
params = {
"lat": lat,
"lng": lng,
"username": self.username,
}
url = "?".join((self.api_timezone, urlencode(params)))
logger.debug("%s.reverse_timezone: %s", self.__class__.__name__, url)
return self._call_geocoder(url, self._parse_json_timezone, timeout=timeout)
def _raise_for_error(self, body):
err = body.get('status')
if err:
code = err['value']
message = err['message']
# http://www.geonames.org/export/webservice-exception.html
if message.startswith("user account not enabled to use"):
raise GeocoderInsufficientPrivileges(message)
if code == 10:
raise GeocoderAuthenticationFailure(message)
if code in (18, 19, 20):
raise GeocoderQuotaExceeded(message)
raise GeocoderServiceError(message)
def _parse_json_timezone(self, response):
self._raise_for_error(response)
timezone_id = response.get("timezoneId")
if timezone_id is None:
# Sometimes (e.g. for Antarctica) GeoNames doesn't return
# a `timezoneId` value, but it returns GMT offsets.
# Apparently GeoNames always returns these offsets -- for
# every single point on the globe.
raw_offset = response["rawOffset"]
return from_fixed_gmt_offset(raw_offset, raw=response)
else:
return from_timezone_name(timezone_id, raw=response)
def _parse_json(self, doc, exactly_one):
"""
Parse JSON response body.
"""
places = doc.get('geonames', [])
self._raise_for_error(doc)
if not len(places):
return None
def parse_code(place):
"""
Parse each record.
"""
latitude = place.get('lat', None)
longitude = place.get('lng', None)
if latitude and longitude:
latitude = float(latitude)
longitude = float(longitude)
else:
return None
placename = place.get('name')
state = place.get('adminName1', None)
country = place.get('countryName', None)
location = ', '.join(
[x for x in [placename, state, country] if x]
)
return Location(location, (latitude, longitude), place)
if exactly_one:
return parse_code(places[0])
else:
return [parse_code(place) for place in places]
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/geonames.py",
"copies": "1",
"size": "12560",
"license": "mit",
"hash": 2711181348003604500,
"line_mean": 33.9860724234,
"line_max": 84,
"alpha_frac": 0.5707802548,
"autogenerated": false,
"ratio": 4.226110363391656,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5296890618191655,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urlencode
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("BANFrance", )
class BANFrance(Geocoder):
"""Geocoder using the Base Adresse Nationale France API.
Documentation at:
https://adresse.data.gouv.fr/api
"""
geocode_path = '/search'
reverse_path = '/reverse'
def __init__(
self,
*,
domain='api-adresse.data.gouv.fr',
scheme=None,
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
adapter_factory=None
):
"""
:param str domain: Currently it is ``'api-adresse.data.gouv.fr'``, can
be changed for testing purposes.
:param str scheme:
See :attr:`geopy.geocoders.options.default_scheme`.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
:param callable adapter_factory:
See :attr:`geopy.geocoders.options.default_adapter_factory`.
.. versionadded:: 2.0
"""
super().__init__(
scheme=scheme,
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
adapter_factory=adapter_factory,
)
self.domain = domain.strip('/')
self.geocode_api = (
'%s://%s%s' % (self.scheme, self.domain, self.geocode_path)
)
self.reverse_api = (
'%s://%s%s' % (self.scheme, self.domain, self.reverse_path)
)
def geocode(
self,
query,
*,
limit=None,
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return a location point by address.
:param str query: The address or query you wish to geocode.
:param int limit: Defines the maximum number of items in the
response structure. If not provided and there are multiple
results the BAN API will return 5 results by default.
This will be reset to one if ``exactly_one`` is True.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
params = {
'q': query,
}
if limit is not None:
params['limit'] = limit
url = "?".join((self.geocode_api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def reverse(
self,
query,
*,
exactly_one=True,
timeout=DEFAULT_SENTINEL
):
"""
Return an address by location point.
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: ``None``, :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
try:
lat, lng = self._coerce_point_to_string(query).split(',')
except ValueError:
raise ValueError("Must be a coordinate pair or Point")
params = {
'lat': lat,
'lng': lng,
}
url = "?".join((self.reverse_api, urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
callback = partial(self._parse_json, exactly_one=exactly_one)
return self._call_geocoder(url, callback, timeout=timeout)
def _parse_feature(self, feature):
# Parse each resource.
latitude = feature.get('geometry', {}).get('coordinates', [])[1]
longitude = feature.get('geometry', {}).get('coordinates', [])[0]
placename = feature.get('properties', {}).get('label')
return Location(placename, (latitude, longitude), feature)
def _parse_json(self, response, exactly_one):
if response is None or 'features' not in response:
return None
features = response['features']
if not len(features):
return None
if exactly_one:
return self._parse_feature(features[0])
else:
return [self._parse_feature(feature) for feature in features]
| {
"repo_name": "geopy/geopy",
"path": "geopy/geocoders/banfrance.py",
"copies": "1",
"size": "5842",
"license": "mit",
"hash": 288396662898451100,
"line_mean": 31.4555555556,
"line_max": 80,
"alpha_frac": 0.5749743239,
"autogenerated": false,
"ratio": 4.155049786628734,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5230024110528734,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urllib.parse import urljoin
def extend(*ds):
"""
Shortcut for updating a dict in a single line. Useful when updating a
dict from a super class:
.. code-block:: python
class Parent(object):
def get_json(self):
return {
'foo': 1,
'bar': 2,
}
class Child(Parent):
def get_json(self):
return extend(super().get_json(), {
'bar': 3,
'quux': 4,
})
"""
out = {}
for d in ds:
out.update(d)
return out
def simple_type(type, value):
return {'@type': type, '@value': value}
Distance = partial(simple_type, 'Distance')
def image_ld(image, thumbnail_filter="max-200x200", base_url=None):
# Support custom image models with a to_json_ld() method
if hasattr(image, 'to_json_ld'):
return image.ld_entity()
thumbnail = image.get_rendition(thumbnail_filter)
url = urljoin(base_url, image.file.url)
return {
'@context': 'http://schema.org',
'@type': 'ImageObject',
'@id': url,
'name': image.title,
'url': url,
'contentUrl': url,
'contentSize': str(image.file.size),
'width': Distance('{} px'.format(image.width)),
'height': Distance('{} px'.format(image.height)),
'thumbnail': urljoin(base_url, thumbnail.url),
}
| {
"repo_name": "takeflight/wagtail-schema.org",
"path": "wagtailschemaorg/utils.py",
"copies": "1",
"size": "1481",
"license": "bsd-2-clause",
"hash": -4097771069604968000,
"line_mean": 24.5344827586,
"line_max": 73,
"alpha_frac": 0.528021607,
"autogenerated": false,
"ratio": 3.9388297872340425,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9966851394234042,
"avg_score": 0,
"num_lines": 58
} |
from functools import partial
from urllib.parse import urljoin, urlencode
import urllib
import os
import posixpath as pp
import os.path as op
import pandas as pd
import requests
import socket
import base64
import glob
import pkg_resources
from .schemas import SCHEMAS
from .fileops import (
read_table,
read_chromsizes,
read_gapfile,
read_ucsc_mrnafile,
extract_centromeres,
)
__all__ = [
"fetch_chromsizes",
"fetch_centromeres",
"UCSCClient",
"EncodeClient",
"FDNClient",
]
def _check_connectivity(reference="http://www.google.com"):
try:
urllib.request.urlopen(reference, timeout=5)
return True
except urllib.request.URLError:
return False
except socket.timeout:
return False
def fetch_chromsizes(
db,
provider=None,
filter_chroms=True,
chrom_patterns=(r"^chr[0-9]+$", r"^chr[XY]$", r"^chrM$"),
natsort=True,
as_bed=False,
**kwargs,
):
"""
Fetch chromsizes from the UCSC database or local storage.
Parameters
----------
provider : str
The provider of chromsizes. Currently, only 'ucsc' is implemented.
filter_chroms : bool, optional
Filter for chromosome names given in ``chrom_patterns``.
chrom_patterns : sequence, optional
Sequence of regular expressions to capture desired sequence names.
natsort : bool, optional
Sort each captured group of names in natural order. Default is True.
as_bed : bool, optional
If True, return chromsizes as an interval dataframe (chrom, start, end).
**kwargs :
Passed to :func:`pandas.read_csv`
Returns
-------
Series of integer bp lengths indexed by sequence name or an interval dataframe.
"""
if provider == "local":
fpath = f"data/{db}.chrom.sizes"
if pkg_resources.resource_exists("bioframe.io", fpath):
return read_chromsizes(
pkg_resources.resource_filename("bioframe.io", fpath)
)
else:
raise LookupError(f"Assembly '{db}' not found in local storage")
if provider == "ucsc" or provider is None:
return UCSCClient(db).fetch_chromsizes(
filter_chroms=filter_chroms,
chrom_patterns=chrom_patterns,
natsort=natsort,
as_bed=as_bed,
**kwargs,
)
else:
raise ValueError("Unknown provider '{}'".format(provider))
def fetch_centromeres(db, provider=None, merge=True, verbose=False):
""""""
# the priority goes as
# - Local
# - centromeres.txt
# - cytoBandIdeo
# - cytoBand
# - gap.txt
# if db in CENTROMERES:
# return CENTROMERES[db]
# if not _check_connectivity("http://www.google.com"):
# raise ConnectionError("No internet connection!")
# if not _check_connectivity("https://hgdownload.cse.ucsc.edu"):
# raise ConnectionError(
# "No connection to the genome database at hgdownload.cse.ucsc.edu!"
# )
if provider == "local":
fpath = f"data/{db}.centromeres"
if pkg_resources.resource_exists("bioframe.io", fpath):
return read_chromsizes(
pkg_resources.resource_filename("bioframe.io", fpath)
)
else:
raise LookupError(f"Centromeres for '{db}' not found in local storage")
if provider == "ucsc" or provider is None:
client = UCSCClient(db)
fetchers = [
("centromeres", client.fetch_centromeres),
("cytoband", client.fetch_cytoband),
("cytoband", partial(client.fetch_cytoband, ideo=True)),
("gap", client.fetch_gaps),
]
for schema, fetcher in fetchers:
try:
df = fetcher()
break
except urllib.error.HTTPError:
pass
else:
raise ValueError("No source for centromere data found.")
return extract_centromeres(df, schema=schema, merge=merge)
class UCSCClient:
BASE_URL = "http://hgdownload.cse.ucsc.edu/"
def __init__(self, db):
self._db = db
self._db_url = urljoin(self.BASE_URL, "goldenPath/{}/database/".format(db))
def fetch_chromsizes(
self,
filter_chroms=True,
chrom_patterns=(r"^chr[0-9]+$", r"^chr[XY]$", r"^chrM$"),
natsort=True,
as_bed=False,
**kwargs,
):
"""
Fetch chromsizes from the UCSC database.
Parameters
----------
filter_chroms : bool, optional
Filter for chromosome names given in ``chrom_patterns``.
chrom_patterns : sequence, optional
Sequence of regular expressions to capture desired sequence names.
natsort : bool, optional
Sort each captured group of names in natural order. Default is True.
as_bed : bool, optional
If True, return chromsizes as an interval dataframe (chrom, start, end).
**kwargs :
Passed to :func:`pandas.read_csv`
Returns
-------
Series of integer bp lengths indexed by sequence name or an interval dataframe.
"""
url = urljoin(self._db_url, "chromInfo.txt.gz")
return read_chromsizes(
url,
filter_chroms=filter_chroms,
chrom_patterns=chrom_patterns,
natsort=natsort,
as_bed=as_bed,
**kwargs,
)
def fetch_centromeres(self, **kwargs):
url = urljoin(self._db_url, "centromeres.txt.gz")
return read_table(url, schema="centromeres")
def fetch_gaps(self, **kwargs):
url = urljoin(self._db_url, "gap.txt.gz")
return read_gapfile(url, **kwargs)
def fetch_cytoband(self, ideo=False, **kwargs):
if ideo:
url = urljoin(self._db_url, "cytoBandIdeo.txt.gz")
else:
url = urljoin(self._db_url, "cytoBand.txt.gz")
return read_table(url, schema="cytoband")
def fetch_mrna(self, **kwargs):
url = urljoin(self._db_url, "all_mrna.txt.gz")
return read_ucsc_mrnafile(url, **kwargs)
class EncodeClient:
BASE_URL = "http://www.encodeproject.org/"
# 2020-05-15 compatible with ENCODE Metadata at:
METADATA_URL = "https://www.encodeproject.org/metadata/type=Experiment&status=released/metadata.tsv"
KNOWN_ASSEMBLIES = [
"GRCh38",
"GRCh38-minimal",
"ce10",
"ce11",
"dm3",
"dm6",
"hg19",
"mm10",
"mm10-minimal",
"mm9",
]
def __init__(self, cachedir, assembly, metadata=None):
if assembly not in self.KNOWN_ASSEMBLIES:
raise ValueError("assembly must be in:", self.KNOWN_ASSEMBLIES)
self.cachedir = op.join(cachedir, assembly)
if not op.isdir(self.cachedir):
os.makedirs(self.cachedir, exist_ok=True)
if metadata is None:
metadata_path = op.join(cachedir, "metadata.tsv")
if not op.exists(metadata_path):
print(
"getting metadata from ENCODE, please wait while (~240Mb) file downloads"
)
with requests.get(self.METADATA_URL, stream=True) as r:
r.raise_for_status()
with open(metadata_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
self._meta = pd.read_table(metadata_path, low_memory=False)
table_assemblies = sorted(
self._meta["File assembly"].dropna().unique().tolist()
)
if table_assemblies != self.KNOWN_ASSEMBLIES:
raise ValueError(
"Table assemblies do not match known assemblies, "
"check ENCODE metadata version"
)
self._meta = self._meta[self._meta["File assembly"] == assembly].copy()
self._meta = self._meta.set_index("File accession")
else:
self._meta = metadata
def _batch_download(self, args):
params = urlencode(args)
url = pp.join("batch_download", params)
url = urljoin(self.BASE_URL, url)
r = requests.get(url)
r.raise_for_status()
return r
def _metadata(self, args):
params = urlencode(args)
url = pp.join("metadata", params, "metadata.tsv")
url = urljoin(self.BASE_URL, url)
r = requests.get(url)
r.raise_for_status()
return r
@property
def meta(self):
return self._meta.copy()
def info(self, accession, width=850, height=450):
from IPython.display import HTML
url = urljoin(self.BASE_URL, pp.join("experiments", accession))
return HTML(
'<iframe width="{}px" height="{}px" src={}></iframe>'.format(
width, height, url
)
)
def fetch(self, accession):
url = self.meta.loc[accession, "File download URL"]
# sig = self.meta.loc[accession, 'md5sum']
filename = op.split(url)[1]
path = op.join(self.cachedir, filename)
if op.exists(path):
pass
# print('File "{}" available'.format(filename))
else:
print('Downloading "{}"'.format(filename))
r = requests.get(url)
r.raise_for_status()
with open(path, "wb") as f:
f.write(r.content)
return path
def fetch_all(self, accessions):
return list(map(self.fetch, accessions))
class FDNClient:
BASE_URL = "https://data.4dnucleome.org/"
def __init__(self, cachedir, assembly, metadata=None, key_id=None, key_secret=None):
self.cachedir = op.join(cachedir, assembly)
if not op.isdir(self.cachedir):
raise OSError("Directory doesn't exist: '{}'".format(cachedir))
if metadata is None:
metadata_paths = sorted(glob.glob(op.join(cachedir, "metadata*.tsv")))
metadata_path = metadata_paths[-1]
self._meta = pd.read_table(metadata_path, low_memory=False, comment="#")
if assembly == "GRCh38":
self._meta = self._meta[self._meta["Organism"] == "human"].copy()
self._meta = self._meta.set_index("File Accession")
else:
self._meta = metadata
if key_id is not None:
credential = (key_id + ":" + key_secret).encode("utf-8")
self._token = base64.b64encode(credential)
else:
self._token = None
@property
def meta(self):
return self._meta.copy()
def info(self, accession, width=850, height=450):
from IPython.display import HTML
url = urljoin(self.BASE_URL, pp.join("experiments", accession))
return HTML(
'<iframe width="{}px" height="{}px" src={}></iframe>'.format(
width, height, url
)
)
def fetch(self, accession):
url = self.meta.loc[accession, "File Download URL"]
# sig = self.meta.loc[accession, 'md5sum']
filename = op.split(url)[1]
path = op.join(self.cachedir, filename)
if op.exists(path):
pass
# print('File "{}" available'.format(filename))
else:
print('Downloading "{}"'.format(filename))
if self._token:
headers = {"Authorization": b"Basic " + self._token}
else:
headers = None
r = requests.get(url, headers=headers)
r.raise_for_status()
with open(path, "wb") as f:
f.write(r.content)
return path
def fetch_all(self, accessions):
return list(map(self.fetch, accessions))
| {
"repo_name": "open2c/bioframe",
"path": "bioframe/io/resources.py",
"copies": "1",
"size": "11832",
"license": "mit",
"hash": -8183569424939973000,
"line_mean": 30.2189973615,
"line_max": 104,
"alpha_frac": 0.5687964841,
"autogenerated": false,
"ratio": 3.8679306963059825,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4936727180405982,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urlparse import urlparse
import logging
import json
from types import GeneratorType
from rdflib import URIRef, Graph, RDF
from oldman.exception import OMUnauthorizedTypeChangeError, OMInternalError, OMUserError
from oldman.exception import OMAttributeAccessError, OMUniquenessError, OMWrongResourceError, OMEditError
from oldman.common import OBJECT_PROPERTY
class Resource(object):
"""A :class:`~oldman.resource.resource.Resource` object is a subject-centric representation of a Web resource.
A set of :class:`~oldman.resource.resource.Resource` objects is equivalent to a RDF graph.
In RDF, a resource is identified by an IRI (globally) or a blank node (locally).
Because blank node support is complex and limited (:class:`rdflib.plugins.stores.sparqlstore.SPARQLStore`
stores do not support them), **every** :class:`~oldman.resource.Resource` **object has an IRI**.
This IRI is either given or generated by a :class:`~oldman.iri.IriGenerator` object.
Some generators generate recognizable `skolem IRIs
<http://www.w3.org/TR/2014/REC-rdf11-concepts-20140225/#section-skolemization>`_
that are treated as blank nodes when the resource is serialized into JSON, JSON-LD
or another RDF format (for external consumption).
A resource is usually instance of some RDFS classes. These classes are grouped in its attribute `types`.
:class:`~oldman.model.Model` objects are found from these classes, by calling the method
:func:`oldman.resource.manager.ResourceManager.find_models_and_types`.
Models give access to Python methods and to :class:`~oldman.attribute.OMAttribute` objects.
Their ordering determines inheritance priorities.
The main model is the first one of this list.
Values of :class:`~oldman.attribute.OMAttribute` objects are accessible and modifiable
like ordinary Python attribute values.
However, these values are checked so some :class:`~oldman.exception.OMAccessError`
or :class:`~oldman.exception.OMEditError` errors may be raised.
This abstract class accepts two concrete classes: :class:`~oldman.resource.resource.StoreResource` and
:class:`~oldman.resource.resource.ClientResource`. The former is serializable and can be saved directly
by the datastore while the latter has to be converted into a :class:`~oldman.resource.resource.StoreResource`
so as to be saved.
Example::
>>> alice = StoreResource(model_manager, data_store, types=["http://schema.org/Person"], name=u"Alice")
>>> alice.id
u'http://localhost/persons/1'
>>> alice.name
u'Alice'
>>> alice.save()
>>> alice.name = "Alice A."
>>> print alice.to_jsonld()
{
"@context": "http://localhost/person.jsonld",
"id": "http://localhost/persons/1",
"types": [
"http://schema.org/Person"
],
"name": "Alice A."
}
>>> alice.name = 5
oldman.exception.OMAttributeTypeCheckError: 5 is not a (<type 'str'>, <type 'unicode'>)
.. admonition:: Resource creation
:class:`~oldman.resource.resource.Resource` objects are normally created by a
:class:`~oldman.model.model.Model` or a
:class:`~oldman.resource.manager.ResourceManager` object. Please use the
methods :func:`oldman.model.model.Model.create`, :func:`oldman.model.Model.new`,
:func:`oldman.resource.manager.ResourceManager.create` or
:func:`oldman.resource.manager.ResourceManager.new` for creating new
:class:`~oldman.resource.Resource` objects.
:param model_manager: :class:`~oldman.model.manager.ModelManager` object. Gives access to its models.
:param data_store: :class:`~oldman.store.datastore.DataStore` object. Datastore that has authority
on this resource.
:param id: IRI of the resource. If not given, this IRI is generated by the main model. Defaults to `None`.
:param types: IRI list or set of the RDFS classes the resource is instance of. Defaults to `set()`.
:param hashless_iri: Hash-less IRI that is given to the main model for generating a new IRI if no `id` is given.
The IRI generator may ignore it. Defaults to `None`. Must be `None` if `collection_iri` is given.
:param collection_iri: IRI of the controller to which this resource belongs. This information
is used to generate a new IRI if no `id` is given. The IRI generator may ignore it.
Defaults to `None`. Must be `None` if `hashless_iri` is given.
:param is_new: When is `True` and `id` given, checks that the IRI is not already existing in the
`data_store`. Defaults to `True`.
:param former_types: IRI list or set of the RDFS classes the resource was instance of. Defaults to `set()`.
:param kwargs: values indexed by their attribute names.
"""
_special_attribute_names = ["_models", "_id", "_types", "_is_blank_node", "_model_manager",
"_store", "_former_types", "_logger", "_resource_manager", "_is_new"]
_pickle_attribute_names = ["_id", '_types', '_is_new']
def __init__(self, model_manager, data_store, id=None, types=None, hashless_iri=None, collection_iri=None,
is_new=True, former_types=None, **kwargs):
"""Inits but does not save it (in the `data_graph`)."""
self._models, self._types = model_manager.find_models_and_types(types)
if former_types is not None:
self._former_types = set(former_types)
else:
self._former_types = set(self._types) if not is_new else set()
main_model = self._models[0]
self._model_manager = model_manager
self._store = data_store
self._is_new = is_new
if hashless_iri is not None and collection_iri is not None:
raise OMUserError(u"Hashless_iri (%s) and collection_iri (%s) cannot be given in the same time."
% (hashless_iri, collection_iri))
if id is not None:
# Anticipated because used in __hash__
self._id = id
if is_new and self._store.exists(id):
raise OMUniquenessError("Object %s already exist" % self._id)
else:
self._id = main_model.generate_iri(hashless_iri=hashless_iri,
collection_iri=collection_iri)
self._init_non_persistent_attributes(self._id)
for k, v in kwargs.iteritems():
if k in self._special_attribute_names:
raise AttributeError(u"Special attribute %s should not appear in **kwargs" % k)
setattr(self, k, v)
def _init_non_persistent_attributes(self, id):
"""Used at init and unpickling times."""
self._logger = logging.getLogger(__name__)
self._is_blank_node = is_blank_node(id)
@property
def types(self):
"""IRI list of the RDFS classes the resource is instance of."""
return list(self._types)
@property
def models(self):
"""TODO: describe"""
return list(self._models)
@property
def id(self):
"""IRI that identifies the resource."""
return self._id
@property
def hashless_iri(self):
"""Hash-less IRI of the `id` attribute.
Is obtained by removing the fragment from the IRI.
"""
return self._id.split('#')[0]
@property
def context(self):
""" An IRI, a `list` or a `dict` that describes the JSON-LD context.
Derived from :attr:`oldman.model.Model.context` attributes.
"""
if len(self._models) > 1:
raise NotImplementedError(u"TODO: merge contexts when a Resource has multiple models")
return list(self._models)[0].context
@property
def local_context(self):
"""Context that is locally accessible but that may not be advertised in the JSON-LD serialization."""
if len(self._models) > 1:
raise NotImplementedError(u"TODO: merge local contexts when a Resource has multiple models")
return list(self._models)[0].local_context
@property
def model_manager(self):
""":class:`~oldman.model.manager.ModelManager` object. Gives access to the
:class:`~oldman.model.model.Model` objects. """
return self._model_manager
@property
def store(self):
""":class:`~oldman.store.datastore.DataStore` object."""
return self._store
@property
def is_new(self):
"""True if the resource has never been saved."""
return self._is_new
@property
def former_types(self):
"""Not for end-users"""
return list(self._former_types)
@property
def non_model_types(self):
"""RDFS classes that are not associated to a `Model`."""
return set(self._types).difference({m.class_iri for m in self._models})
@property
def former_non_model_types(self):
"""RDFS classes that were not associated to a `Model`."""
if len(self._former_types) == 0:
return {}
possible_non_model_types = set(self._former_types).difference({m.class_iri
for m in self._models})
if len(possible_non_model_types) == 0:
return {}
corresponding_models, _ = self._model_manager.find_models_and_types(possible_non_model_types)
return possible_non_model_types.difference({m.class_iri for m in corresponding_models})
def is_valid(self):
"""Tests if the resource is valid.
:return: `False` if the resource is invalid, `True` otherwise.
"""
for model in self._models:
for attr in model.om_attributes.values():
if not attr.is_valid(self):
return False
return True
def is_blank_node(self):
"""Tests if `id` is a skolem IRI and should thus be considered as a blank node.
See :func:`~oldman.resource.is_blank_node` for further details.
:return: `True` if `id` is a locally skolemized IRI.
"""
return self._is_blank_node
def is_instance_of(self, model):
""" Tests if the resource is instance of the RDFS class of the model.
:param model: :class:`~oldman.model.Model` object.
:return: `True` if the resource is instance of the RDFS class.
"""
return model.class_iri in self._types
def in_same_document(self, other_resource):
"""Tests if two resources have the same hash-less IRI.
:return: `True` if these resources are in the same document.
"""
return self.hashless_iri == other_resource.hashless_iri
def get_operation(self, http_method):
"""TODO: describe """
for model in self._models:
operation = model.get_operation(http_method)
if operation is not None:
return operation
return None
def get_lightly(self, attribute_name):
"""If the attribute corresponds to an `owl:ObjectProperty`, returns a IRI or None.
Otherwise (if is a datatype), returns the value.
"""
return self.get_attribute(attribute_name).get_lightly(self)
def get_attribute(self, attribute_name):
"""Not for the end-user!"""
for model in self._models:
if attribute_name in model.om_attributes:
return model.access_attribute(attribute_name)
raise AttributeError("%s has no regular attribute %s" % (self, attribute_name))
def __getattr__(self, name):
"""Gets:
* A declared Python method ;
* A declared operation ;
* Or the value of a given :class:`~oldman.attribute.OMAttribute` object.
Note that attributes stored in the `__dict__` attribute are not concerned
by this method.
:class:`~oldman.attribute.OMAttribute` objects are made accessible
by :class:`~oldman.model.Model` objects.
The first method or :class:`~oldman.attribute.OMAttribute` object matching the requested
`name` is returned. This is why the ordering of models is so important.
:param name: attribute name.
:return: Its value.
"""
for model in self._models:
if name in model.om_attributes:
return model.access_attribute(name).get(self)
method = model.methods.get(name)
if method is not None:
# Make this function be a method (taking self as first parameter)
return partial(method, self)
operation = model.get_operation_by_name(name)
if operation is not None:
return partial(operation, self)
raise AttributeError("%s has no attribute %s" % (self, name))
def __setattr__(self, name, value):
"""Sets the value of one or multiple :class:`~oldman.attribute.OMAttribute` objects.
If multiple :class:`~oldman.attribute.OMAttribute` objects have the same
name, they will all receive the same value.
:param name: attribute name.
:param value: value to assign.
"""
if name in self._special_attribute_names:
self.__dict__[name] = value
return
found = False
for model in self._models:
if name in model.om_attributes:
model.access_attribute(name).set(self, value)
found = True
if not found:
raise AttributeError("%s has not attribute %s" % (self, name))
def add_type(self, additional_type):
"""Declares that the resource is instance of another RDFS class.
Note that it may introduce a new model to the list
and change its ordering.
:param additional_type: IRI or JSON-LD term identifying a RDFS class.
"""
if additional_type not in self._types:
new_types = set(self._types)
new_types.add(additional_type)
self._change_types(new_types)
def check_validity(self):
"""Checks its validity.
Raises an :class:`oldman.exception.OMEditError` exception if invalid.
"""
for model in self._models:
for attr in model.om_attributes.values():
attr.check_validity(self)
def receive_id(self, id):
"""Receives the permanent ID assigned by the store.
Useful when the permanent ID is given by an external server.
Replaces the temporary ID of the resource.
"""
# TODO: make sure the previous id was a temporary one
self._id = id
self._is_new = False
def save(self, is_end_user=True):
"""Saves it into the `data_store` and its `resource_cache`.
Raises an :class:`oldman.exception.OMEditError` exception if invalid.
:param is_end_user: `False` when an authorized user (not a regular end-user)
wants to force some rights. Defaults to `True`.
See :func:`~oldman.attribute.OMAttribute.check_validity` for further details.
:return: The :class:`~oldman.resource.resource.Resource` object itself.
"""
raise NotImplementedError("Have to be implemented by sub-classes")
def delete(self):
"""Removes the resource from the `data_store` and its `resource_cache`.
Cascade deletion is done for related resources satisfying the test
:func:`~oldman.resource.resource.should_delete_resource`.
"""
raise NotImplementedError("Have to be implemented by sub-classes")
def _extract_attribute_list(self):
""":return: An ordered list of list of :class:`~oldman.attribute.OMAttribute` objects."""
attributes = []
for model in self._models:
attributes += model.om_attributes.values()
return attributes
def to_dict(self, remove_none_values=True, include_different_contexts=False,
ignored_iris=None):
"""Serializes the resource into a JSON-like `dict`.
:param remove_none_values: If `True`, `None` values are not inserted into the dict.
Defaults to `True`.
:param include_different_contexts: If `True` local contexts are given to sub-resources.
Defaults to `False`.
:param ignored_iris: List of IRI of resources that should not be included in the `dict`.
Defaults to `set()`.
:return: A `dict` describing the resource.
"""
if ignored_iris is None:
ignored_iris = set()
ignored_iris.add(self._id)
dct = {attr.name: self._convert_value(getattr(self, attr.name), ignored_iris, remove_none_values,
include_different_contexts)
for attr in self._extract_attribute_list()
if not attr.is_write_only}
# filter None values
if remove_none_values:
dct = {k: v for k, v in dct.iteritems() if v is not None}
if not self.is_blank_node():
dct["id"] = self._id
if self._types and len(self._types) > 0:
dct["types"] = list(self._types)
return dct
def to_json(self, remove_none_values=True, ignored_iris=None):
"""Serializes the resource into pure JSON (not JSON-LD).
:param remove_none_values: If `True`, `None` values are not inserted into the dict.
Defaults to `True`.
:param ignored_iris: List of IRI of resources that should not be included in the `dict`.
Defaults to `set()`.
:return: A JSON-encoded string.
"""
return json.dumps(self.to_dict(remove_none_values=remove_none_values,
include_different_contexts=False,
ignored_iris=ignored_iris), sort_keys=True, indent=2)
def to_jsonld(self, remove_none_values=True, include_different_contexts=False,
ignored_iris=None):
"""Serializes the resource into JSON-LD.
:param remove_none_values: If `True`, `None` values are not inserted into the dict.
Defaults to `True`.
:param include_different_contexts: If `True` local contexts are given to sub-resources.
Defaults to `False`.
:param ignored_iris: List of IRI of resources that should not be included in the `dict`.
Defaults to `set()`.
:return: A JSON-LD encoded string.
"""
dct = self.to_dict(remove_none_values=remove_none_values,
include_different_contexts=include_different_contexts,
ignored_iris=ignored_iris)
dct['@context'] = self.context
return json.dumps(dct, sort_keys=True, indent=2)
def to_rdf(self, rdf_format="turtle"):
"""Serializes the resource into RDF.
:param rdf_format: content-type or keyword supported by RDFlib.
Defaults to `"turtle"`.
:return: A string in the chosen RDF format.
"""
g = Graph()
g.parse(data=self.to_json(), context=self.local_context, format="json-ld")
return g.serialize(format=rdf_format)
def __str__(self):
return self._id
def __repr__(self):
return u"%s(<%s>)" % (self.__class__.__name__, self._id)
def _convert_value(self, value, ignored_iris, remove_none_values, include_different_contexts=False):
"""Recursive method. Internals of :func:`~oldman.resource.Resource.to_dict`.
:return: JSON-compatible value or list of JSON-compatible values.
"""
# Containers
if isinstance(value, (list, set, GeneratorType)):
return [self._convert_value(v, ignored_iris, remove_none_values, include_different_contexts)
for v in value]
# Object
if isinstance(value, Resource):
# If non-blank or in the same document
if value.id not in ignored_iris and \
(value.is_blank_node() or self.in_same_document(value)):
value_dict = dict(value.to_dict(remove_none_values, include_different_contexts, ignored_iris))
# TODO: should we improve this test?
if include_different_contexts and value._context != self._context:
value_dict["@context"] = value._context
return value_dict
else:
# URI
return value.id
# Literal
return value
def update(self, full_dict, is_end_user=True, allow_new_type=False, allow_type_removal=False, save=True):
"""Updates the resource from a flat `dict`.
By flat, we mean that sub-resources are only represented by their IRIs:
there is no nested sub-object structure.
This dict is supposed to be exhaustive, so absent value is removed.
Some sub-resources may thus be deleted like if there were a cascade
deletion.
:param full_dict: Flat `dict` containing the attribute values to update.
:param is_end_user: `False` when an authorized user (not a regular end-user)
wants to force some rights. Defaults to `True`.
See :func:`~oldman.attribute.OMAttribute.check_validity` for further details.
:param allow_new_type: If `True`, new types can be added.
Please keep in mind that type change can:
- Modify the behavior of the resource by changing its model list.
- Interfere with the SPARQL requests using instance tests.
If enabled, this may represent a major **security concern**.
Defaults to `False`.
:param allow_type_removal: If `True`, new types can be removed. Same security concerns than above.
Defaults to `False`.
:param save: If `True` calls :func:`~oldman.resource.Resource.save` after updating. Defaults to `True`.
:return: The :class:`~oldman.resource.Resource` object itself.
"""
#if not self.is_blank_node() and "id" not in full_dict:
if "id" not in full_dict:
raise OMWrongResourceError(u"Cannot update an object without IRI")
elif full_dict["id"] != self._id:
raise OMWrongResourceError(u"Wrong IRI %s (%s was expected)" % (full_dict["id"], self._id))
attributes = self._extract_attribute_list()
attr_names = [a.name for a in attributes]
for key in full_dict:
if key not in attr_names and key not in ["@context", "id", "types"]:
raise OMAttributeAccessError(u"%s is not an attribute of %s" % (key, self._id))
# Type change resource
if "types" in full_dict:
try:
new_types = set(full_dict["types"])
except TypeError:
raise OMEditError(u"'types' attribute is not a list, a set or a string but is %s " % new_types)
self._check_and_update_types(new_types, allow_new_type, allow_type_removal)
for attr in attributes:
value = full_dict.get(attr.name)
# set is not a JSON structure (but a JSON-LD one)
if value is not None and attr.container == "@set":
value = set(value)
attr.set(self, value)
if save:
self.save(is_end_user)
return self
def update_from_graph(self, subgraph, initial=False, is_end_user=True, allow_new_type=False,
allow_type_removal=False, save=True):
"""Similar to :func:`~oldman.resource.Resource.full_update` but with
a RDF graph instead of a Python `dict`.
:param subgraph: :class:`rdflib.Graph` object containing the full description of the resource.
:param initial: `True` when the subgraph comes from the `data_graph` and is thus used
to load :class:`~oldman.resource.Resource` object from the triple store.
Defaults to `False`.
:param is_end_user: `False` when an authorized user (not a regular end-user)
wants to force some rights. Defaults to `True`.
See :func:`~oldman.attribute.OMAttribute.check_validity` for further details.
:param allow_new_type: If `True`, new types can be added. Defaults to `False`. See
:func:`~oldman.resource.Resource.full_update` for explanations about the
security concerns.
:param allow_type_removal: If `True`, new types can be removed. Same security concerns than above.
Defaults to `False`.
:param save: If `True` calls :func:`~oldman.resource.Resource.save` after updating. Defaults to `True`.
:return: The :class:`~oldman.resource.Resource` object itself.
"""
for attr in self._extract_attribute_list():
attr.update_from_graph(self, subgraph, initial=initial)
#Types
if not initial:
new_types = {unicode(t) for t in subgraph.objects(URIRef(self._id), RDF.type)}
self._check_and_update_types(new_types, allow_new_type, allow_type_removal)
if save:
self.save(is_end_user)
return self
def get_related_resource(self, id):
""" Not for end-users!
Must be implemented by concrete classes.
If cannot get the resource, return its IRI.
"""
raise NotImplementedError("To be implemented by a concrete sub-class")
def _check_and_update_types(self, new_types, allow_new_type, allow_type_removal):
current_types = set(self._types)
if new_types == current_types:
return
change = False
# Appending new types
additional_types = new_types.difference(current_types)
if len(additional_types) > 0:
if not allow_new_type:
raise OMUnauthorizedTypeChangeError(u"Adding %s to %s has not been allowed"
% (additional_types, self._id))
change = True
# Removal
missing_types = current_types.difference(new_types)
if len(missing_types) > 0:
implicit_types = {t for m in self._models for t in m.ancestry_iris}.difference(
{m.class_iri for m in self._models})
removed_types = missing_types.difference(implicit_types)
if len(removed_types) > 0:
if not allow_type_removal:
raise OMUnauthorizedTypeChangeError(u"Removing %s to %s has not been allowed"
% (removed_types, self._id))
change = True
if change:
self._models, types = self._model_manager.find_models_and_types(new_types)
self._change_types(types)
def _change_types(self, new_types):
self._types = new_types
def _get_om_attribute(self, name):
for model in self._models:
if name in model.om_attributes:
return model.access_attribute(name)
self._logger.debug(u"Models: %s, types: %s" % ([m.name for m in self._models], self._types))
#self._logger.debug(u"%s" % self._manager._registry.model_names)
raise AttributeError(u"%s has not attribute %s" % (self, name))
def _filter_objects_to_delete(self, ids):
raise NotImplementedError("Implemented by a sub-class")
class StoreResource(Resource):
"""StoreResource: resource manipulated by the data store.
End-users should not manipulate it.
Is serializable (pickable).
"""
@classmethod
def load_from_graph(cls, model_manager, data_store, id, subgraph, is_new=True, collection_iri=None):
"""Loads a new :class:`~oldman.resource.StoreResource` object from a sub-graph.
TODO: update the comments.
:param manager: :class:`~oldman.resource.manager.ResourceManager` object.
:param id: IRI of the resource.
:param subgraph: :class:`rdflib.Graph` object containing triples about the resource.
:param is_new: When is `True` and `id` given, checks that the IRI is not already existing in the
`union_graph`. Defaults to `True`.
:return: The :class:`~oldman.resource.Resource` object created.
"""
types = list({unicode(t) for t in subgraph.objects(URIRef(id), RDF.type)})
instance = cls(model_manager, data_store, id=id, types=types, is_new=is_new, collection_iri=collection_iri)
instance.update_from_graph(subgraph, is_end_user=True, save=False, initial=True)
return instance
def __getstate__(self):
"""Pickles this resource."""
state = {name: getattr(self, name) for name in self._pickle_attribute_names}
state["store_name"] = self._store.name
# Reversed order so that important models can overwrite values
reversed_models = self._models
reversed_models.reverse()
for model in reversed_models:
for name, attr in model.om_attributes.iteritems():
value = attr.get_lightly(self)
if isinstance(value, GeneratorType):
if attr.container == "@list":
value = list(value)
else:
value = set(value)
if value is not None:
state[name] = value
return state
def __setstate__(self, state):
"""Unpickles this resource from its serialized `state`."""
required_fields = self._pickle_attribute_names + ["store_name"]
for name in required_fields:
if name not in state:
#TODO: find a better exception (due to the cache)
raise OMInternalError(u"Required field %s is missing in the cached state" % name)
self._id = state["_id"]
self._is_new = state["_is_new"]
self._init_non_persistent_attributes(self._id)
# Store
from oldman.store.datastore import DataStore
self._store = DataStore.get_store(state["store_name"])
self._model_manager = self._store.model_manager
# Models and types
self._models, self._types = self._model_manager.find_models_and_types(state["_types"])
self._former_types = set(self._types)
# Attributes (Python attributes or OMAttributes)
for name, value in state.iteritems():
if name in ["store_name", "_id", "_types", "_is_new"]:
continue
elif name in self._special_attribute_names:
setattr(self, name, value)
# OMAttributes
else:
attribute = self._get_om_attribute(name)
attribute.set(self, value)
# Clears former values (allows modification)
attribute.receive_storage_ack(self)
def get_related_resource(self, id):
""" Gets a related `StoreResource` by calling the datastore directly. """
resource = self.store.get(id=id)
if resource is None:
return id
return resource
def save(self, is_end_user=True):
"""Saves it into the `data_store` and its `resource_cache`.
Raises an :class:`oldman.exception.OMEditError` exception if invalid.
:param is_end_user: `False` when an authorized user (not a regular end-user)
wants to force some rights. Defaults to `True`.
See :func:`~oldman.attribute.OMAttribute.check_validity` for further details.
:return: The :class:`~oldman.resource.resource.Resource` object itself."""
# Checks
attributes = self._extract_attribute_list()
for attr in attributes:
attr.check_validity(self, is_end_user)
# Find objects to delete
objects_to_delete = []
for attr in attributes:
if not attr.has_changed(self):
continue
# Some former objects may be deleted
if attr.om_property.type == OBJECT_PROPERTY:
former_value, value = attr.diff(self)
if isinstance(former_value, dict):
raise NotImplementedError("Object dicts are not yet supported.")
former_value = former_value if isinstance(former_value, (set, list)) else [former_value]
# Cache invalidation (because of possible reverse properties)
resources_to_invalidate = set(value) if isinstance(value, (set, list)) else {value}
resources_to_invalidate.update(former_value)
for r in resources_to_invalidate:
if r is not None:
self._store.resource_cache.remove_resource_from_id(r)
objects_to_delete += self._filter_objects_to_delete(former_value)
# Update literal values
self.store.save(self, attributes, self._former_types)
# Delete the objects
for obj in objects_to_delete:
obj.delete()
# Clears former values
self._former_types = self._types
for attr in attributes:
attr.receive_storage_ack(self)
return self
def delete(self):
"""Removes the resource from the `data_store` and its `resource_cache`.
Cascade deletion is done for related resources satisfying the test
:func:`~oldman.resource.resource.should_delete_resource`.
"""
attributes = self._extract_attribute_list()
for attr in attributes:
# Delete blank nodes recursively
if attr.om_property.type == OBJECT_PROPERTY:
value = getattr(self, attr.name)
if value is not None:
objs = value if isinstance(value, (list, set, GeneratorType)) else [value]
for obj in objs:
if should_delete_resource(obj):
self._logger.debug(u"%s deleted with %s" % (obj.id, self._id))
obj.delete()
else:
self._logger.debug(u"%s not deleted with %s" % (obj.id, self._id))
# Cache invalidation (because of possible reverse properties)
self._store.resource_cache.remove_resource(obj)
setattr(self, attr.name, None)
#Types
self._change_types(set())
self._store.delete(self, attributes, self._former_types)
# Clears former values
for attr in attributes:
attr.receive_storage_ack(self)
self._is_new = False
def _filter_objects_to_delete(self, ids):
return [self.store.get(id=id) for id in ids
if id is not None and is_blank_node(id)]
class ClientResource(Resource):
"""ClientResource: resource manipulated by the end-user.
Has access to the `resource_manager`.
Is not serializable.
"""
def __init__(self, resource_manager, model_manager, store, **kwargs):
Resource.__init__(self, model_manager, store, **kwargs)
self._resource_manager = resource_manager
@classmethod
def load_from_graph(cls, resource_manager, model_manager, data_store, id, subgraph, is_new=True,
collection_iri=None):
"""Loads a new :class:`~oldman.resource.ClientResource` object from a sub-graph.
TODO: update the comments.
:param manager: :class:`~oldman.resource.manager.ResourceManager` object.
:param id: IRI of the resource.
:param subgraph: :class:`rdflib.Graph` object containing triples about the resource.
:param is_new: When is `True` and `id` given, checks that the IRI is not already existing in the
`union_graph`. Defaults to `True`.
:return: The :class:`~oldman.resource.Resource` object created.
"""
types = list({unicode(t) for t in subgraph.objects(URIRef(id), RDF.type)})
instance = cls(resource_manager, model_manager, data_store, id=id, types=types, is_new=is_new,
collection_iri=collection_iri)
instance.update_from_graph(subgraph, is_end_user=True, save=False, initial=True)
return instance
def get_related_resource(self, id):
""" Gets a related `ClientResource` through the resource manager. """
resource = self._resource_manager.get(id=id)
if resource is None:
return id
return resource
def save(self, is_end_user=True):
"""Saves it into the `data_store` and its `resource_cache`.
Raises an :class:`oldman.exception.OMEditError` exception if invalid.
:param is_end_user: `False` when an authorized user (not a regular end-user)
wants to force some rights. Defaults to `True`.
See :func:`~oldman.attribute.OMAttribute.check_validity` for further details.
:return: The :class:`~oldman.resource.resource.Resource` object itself."""
attributes = self._extract_attribute_list()
for attr in attributes:
attr.check_validity(self, is_end_user)
store_resource = self.model_manager.convert_client_resource(self)
store_resource.save(is_end_user)
# Clears former values
self._former_types = self._types
# Clears former values
for attr in attributes:
attr.receive_storage_ack(self)
self._is_new = False
# The ID may be updated (if was a temporary IRI before)
self._id = store_resource.id
return self
def delete(self):
"""Removes the resource from the `data_store` and its `resource_cache`.
Cascade deletion is done for related resources satisfying the test
:func:`~oldman.resource.resource.should_delete_resource`.
"""
store_resource = self.model_manager.convert_client_resource(self)
store_resource.delete()
# Clears former values
self._former_types = self._types
# Clears values
for attr in self._extract_attribute_list():
setattr(self, attr.name, None)
attr.receive_storage_ack(self)
self._is_new = False
def __getstate__(self):
"""Cannot be pickled."""
#TODO: find the appropriate exception
raise Exception("A ClientResource is not serializable.")
def __setstate__(self, state):
"""Cannot be pickled."""
#TODO: find the appropriate exception
raise Exception("A ClientResource is not serializable.")
def _filter_objects_to_delete(self, ids):
"""TODO: consider other cases than blank nodes """
return [self._resource_manager.get(id=id) for id in ids
if id is not None and is_blank_node(id)]
# @property
# def resource_manager(self):
# return self._resource_manager
def is_blank_node(iri):
"""Tests if `id` is a locally skolemized IRI.
External skolemized blank nodes are not considered as blank nodes.
:param iri: IRI of the resource.
:return: `True` if is a blank node.
"""
id_result = urlparse(iri)
return (u"/.well-known/genid/" in id_result.path) and (id_result.hostname == u"localhost")
def should_delete_resource(resource):
"""Tests if a resource should be deleted.
:param resource: :class:`~oldman.resource.Resource` object to evaluate.
:return: `True` if it should be deleted.
"""
#TODO: make sure these blank nodes are not referenced somewhere else
return resource is not None and resource.is_blank_node()
| {
"repo_name": "oldm/OldMan",
"path": "oldman/resource/resource.py",
"copies": "1",
"size": "40239",
"license": "bsd-3-clause",
"hash": 3162740791258766000,
"line_mean": 42.881134133,
"line_max": 118,
"alpha_frac": 0.6035438256,
"autogenerated": false,
"ratio": 4.217924528301887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5321468353901887,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from urlparse import urlparse
from django import http
from django.conf import settings
from django.db import IntegrityError
from django.shortcuts import get_object_or_404, redirect
from django.contrib import auth
from django.template import Context, loader
from django.utils.datastructures import SortedDict
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from django.utils.encoding import smart_str
from django.utils.http import base36_to_int
from django.contrib.auth.tokens import default_token_generator
import commonware.log
import jingo
from radagast.wizard import Wizard
from ratelimit.decorators import ratelimit
from tower import ugettext as _, ugettext_lazy as _lazy
from session_csrf import anonymous_csrf, anonymous_csrf_exempt
from mobility.decorators import mobile_template
import waffle
from access.middleware import ACLMiddleware
import amo
from amo import messages
from amo.decorators import (json_view, login_required, permission_required,
write, post_required)
from amo.forms import AbuseForm
from amo.urlresolvers import reverse
from amo.utils import send_mail
from abuse.models import send_abuse_report
from addons.models import Addon
from addons.views import BaseFilter
from addons.decorators import addon_view_factory, can_be_purchased
from access import acl
from bandwagon.models import Collection
from stats.models import Contribution
from users.models import UserNotification
import users.notifications as notifications
from .models import UserProfile
from .signals import logged_out
from . import forms
from .utils import EmailResetCode, UnsubscribeCode
import tasks
log = commonware.log.getLogger('z.users')
addon_view = addon_view_factory(qs=Addon.objects.valid)
@login_required(redirect=False)
@json_view
def ajax(request):
"""Query for a user matching a given email."""
email = request.GET.get('q', '').strip()
u = get_object_or_404(UserProfile, email=email)
return dict(id=u.id, name=u.name)
def confirm(request, user_id, token):
user = get_object_or_404(UserProfile, id=user_id)
if not user.confirmationcode:
return http.HttpResponseRedirect(reverse('users.login'))
if user.confirmationcode != token:
log.info(u"Account confirmation failed for user (%s)", user)
messages.error(request, _('Invalid confirmation code!'))
return http.HttpResponseRedirect(reverse('users.login'))
user.confirmationcode = ''
user.save()
messages.success(request, _('Successfully verified!'))
log.info(u"Account confirmed for user (%s)", user)
return http.HttpResponseRedirect(reverse('users.login'))
def confirm_resend(request, user_id):
user = get_object_or_404(UserProfile, id=user_id)
if not user.confirmationcode:
return http.HttpResponseRedirect(reverse('users.login'))
# Potential for flood here if someone requests a confirmationcode and then
# re-requests confirmations. We may need to track requests in the future.
log.info(u"Account confirm re-requested for user (%s)", user)
user.email_confirmation_code()
msg = _(u'An email has been sent to your address {0} to confirm '
'your account. Before you can log in, you have to activate '
'your account by clicking on the link provided in this '
'email.').format(user.email)
messages.info(request, _('Confirmation Email Sent'), msg)
return http.HttpResponseRedirect(reverse('users.login'))
@login_required
def delete(request):
amouser = request.amo_user
if request.method == 'POST':
form = forms.UserDeleteForm(request.POST, request=request)
if form.is_valid():
messages.success(request, _('Profile Deleted'))
amouser.anonymize()
logout(request)
form = None
return http.HttpResponseRedirect(reverse('users.login'))
else:
form = forms.UserDeleteForm()
return jingo.render(request, 'users/delete.html',
{'form': form, 'amouser': amouser})
@login_required
def delete_photo(request):
u = request.amo_user
if request.method == 'POST':
u.picture_type = ''
u.save()
log.debug(u"User (%s) deleted photo" % u)
tasks.delete_photo.delay(u.picture_path)
messages.success(request, _('Photo Deleted'))
return http.HttpResponseRedirect(reverse('users.edit') +
'#user-profile')
return jingo.render(request, 'users/delete_photo.html', dict(user=u))
@write
@login_required
def edit(request):
# Don't use request.amo_user since it has too much caching.
amouser = UserProfile.objects.get(pk=request.user.id)
if request.method == 'POST':
# ModelForm alters the instance you pass in. We need to keep a copy
# around in case we need to use it below (to email the user)
original_email = amouser.email
form = forms.UserEditForm(request.POST, request.FILES, request=request,
instance=amouser)
if form.is_valid():
messages.success(request, _('Profile Updated'))
if amouser.email != original_email:
l = {'user': amouser,
'mail1': original_email,
'mail2': amouser.email}
log.info(u"User (%(user)s) has requested email change from"
"(%(mail1)s) to (%(mail2)s)" % l)
messages.info(request, _('Email Confirmation Sent'),
_(u'An email has been sent to {0} to confirm your new '
'email address. For the change to take effect, you '
'need to click on the link provided in this email. '
'Until then, you can keep logging in with your '
'current email address.').format(amouser.email))
domain = settings.DOMAIN
token, hash = EmailResetCode.create(amouser.id, amouser.email)
url = "%s%s" % (settings.SITE_URL,
reverse('users.emailchange', args=[amouser.id,
token, hash]))
t = loader.get_template('users/email/emailchange.ltxt')
c = {'domain': domain, 'url': url, }
send_mail(_(("Please confirm your email address "
"change at %s") % domain),
t.render(Context(c)), None, [amouser.email],
use_blacklist=False)
# Reset the original email back. We aren't changing their
# address until they confirm the new one
amouser.email = original_email
form.save()
return http.HttpResponseRedirect(reverse('users.edit'))
else:
messages.error(request, _('Errors Found'),
_('There were errors in the changes '
'you made. Please correct them and '
'resubmit.'))
else:
form = forms.UserEditForm(instance=amouser)
return jingo.render(request, 'users/edit.html',
{'form': form, 'amouser': amouser})
@write
@login_required
@permission_required('Admin', 'EditAnyUser')
def admin_edit(request, user_id):
amouser = get_object_or_404(UserProfile, pk=user_id)
if request.method == 'POST':
form = forms.AdminUserEditForm(request.POST, request.FILES,
request=request, instance=amouser)
if form.is_valid():
form.save()
messages.success(request, _('Profile Updated'))
return http.HttpResponseRedirect(reverse('zadmin.index'))
else:
form = forms.AdminUserEditForm(instance=amouser)
return jingo.render(request, 'users/edit.html',
{'form': form, 'amouser': amouser})
def emailchange(request, user_id, token, hash):
user = get_object_or_404(UserProfile, id=user_id)
try:
_uid, newemail = EmailResetCode.parse(token, hash)
except ValueError:
return http.HttpResponse(status=400)
if _uid != user.id:
# I'm calling this a warning because invalid hashes up to this point
# could be any number of things, but this is a targeted attack from
# one user account to another
log.warning((u"[Tampering] Valid email reset code for UID (%s) "
"attempted to change email address for user (%s)")
% (_uid, user))
return http.HttpResponse(status=400)
user.email = newemail
user.save()
l = {'user': user, 'newemail': newemail}
log.info(u"User (%(user)s) confirmed new email address (%(newemail)s)" % l)
messages.success(request, _('Your email address was changed successfully'),
_(u'From now on, please use {0} to log in.').format(newemail))
return http.HttpResponseRedirect(reverse('users.edit'))
def _clean_next_url(request):
gets = request.GET.copy()
url = gets['to']
if not url:
url = settings.LOGIN_REDIRECT_URL
# We want to not redirect outside of AMO via login/logout (also see
# "domain" below)
if '://' in url:
url = '/'
# TODO(davedash): This is a remora-ism, let's remove this after remora and
# since all zamboni 'to' parameters will begin with '/'.
if url and not url.startswith('/'):
url = '/' + url
gets['to'] = url
domain = gets.get('domain', None)
if domain in settings.VALID_LOGIN_REDIRECTS.keys():
gets['to'] = "%s%s" % (settings.VALID_LOGIN_REDIRECTS[domain], url)
request.GET = gets
return request
@anonymous_csrf
@post_required
@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def browserid_login(request):
if waffle.switch_is_active('browserid-login'):
logout(request)
user = auth.authenticate(assertion=request.POST['assertion'],
host=request.POST['audience'])
if user is not None:
profile = UserProfile.objects.get(user=user)
if profile.needs_tougher_password:
return http.HttpResponse("", status=400)
auth.login(request, user)
return http.HttpResponse(status=200)
return http.HttpResponse(status=401)
@addon_view
@can_be_purchased
@anonymous_csrf
@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def paypal_start(request, addon=None):
download = urlparse(request.GET.get('realurl', '')).path
data = {'addon': addon, 'is_ajax': request.is_ajax(), 'download': download}
if request.user.is_authenticated():
return jingo.render(request, 'addons/paypal_start.html', data)
return _login(request, data=data, template='addons/paypal_start.html',
dont_redirect=True)
@anonymous_csrf
@mobile_template('users/{mobile/}login_modal.html')
@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def login_modal(request, template=None):
return _login(request, template=template)
@anonymous_csrf
@mobile_template('users/{mobile/}login.html')
@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def login(request, template=None):
return _login(request, template=template)
def _login(request, template=None, data=None, dont_redirect=False):
data = data or {}
# In case we need it later. See below.
get_copy = request.GET.copy()
logout(request)
if 'to' in request.GET:
request = _clean_next_url(request)
limited = getattr(request, 'limited', 'recaptcha_shown' in request.POST)
user = None
if 'username' in request.POST:
try:
# We are doing all this before we try and validate the form.
user = UserProfile.objects.get(email=request.POST['username'])
limited = ((user.failed_login_attempts >=
settings.LOGIN_RATELIMIT_USER) or limited)
user.log_login_attempt(request, False)
except UserProfile.DoesNotExist:
pass
partial_form = partial(forms.AuthenticationForm, use_recaptcha=limited)
r = auth.views.login(request, template_name=template,
redirect_field_name='to',
authentication_form=partial_form,
extra_context=data)
if isinstance(r, http.HttpResponseRedirect):
# Django's auth.views.login has security checks to prevent someone from
# redirecting to another domain. Since we want to allow this in
# certain cases, we have to make a new response object here to replace
# the above.
if 'domain' in request.GET:
request.GET = get_copy
request = _clean_next_url(request)
r = http.HttpResponseRedirect(request.GET['to'])
# Succsesful log in according to django. Now we do our checks. I do
# the checks here instead of the form's clean() because I want to use
# the messages framework and it's not available in the request there.
user = request.user.get_profile()
if user.deleted:
logout(request)
log.warning(u'Attempt to log in with deleted account (%s)' % user)
messages.error(request, _('Wrong email address or password!'))
data.update({'form': partial_form()})
return jingo.render(request, template, data)
if user.confirmationcode:
logout(request)
log.info(u'Attempt to log in with unconfirmed account (%s)' % user)
msg1 = _(u'A link to activate your user account was sent by email '
'to your address {0}. You have to click it before you '
'can log in.').format(user.email)
url = "%s%s" % (settings.SITE_URL,
reverse('users.confirm.resend', args=[user.id]))
msg2 = _('If you did not receive the confirmation email, make '
'sure your email service did not mark it as "junk '
'mail" or "spam". If you need to, you can have us '
'<a href="%s">resend the confirmation message</a> '
'to your email address mentioned above.') % url
messages.error(request, _('Activation Email Sent'), msg1)
messages.info(request, _('Having Trouble?'), msg2,
title_safe=True)
data.update({'form': partial_form()})
return jingo.render(request, template, data)
rememberme = request.POST.get('rememberme', None)
if rememberme:
request.session.set_expiry(settings.SESSION_COOKIE_AGE)
log.debug((u'User (%s) logged in successfully with '
'"remember me" set') % user)
else:
user.log_login_attempt(request, True)
if dont_redirect:
# We're recalling the middleware to re-initialize amo_user
ACLMiddleware().process_request(request)
r = jingo.render(request, template, data)
return r
def logout(request):
# Not using get_profile() becuase user could be anonymous
user = request.user
if not user.is_anonymous():
log.debug(u"User (%s) logged out" % user)
auth.logout(request)
if 'to' in request.GET:
request = _clean_next_url(request)
next = request.GET.get('to') or settings.LOGOUT_REDIRECT_URL
response = http.HttpResponseRedirect(next)
# Fire logged out signal so we can be decoupled from cake.
logged_out.send(None, request=request, response=response)
return response
def profile(request, user_id):
user = get_object_or_404(UserProfile, id=user_id)
# Get user's own and favorite collections, if they allowed that.
if user.display_collections:
own_coll = (Collection.objects.listed().filter(author=user)
.order_by('-created'))[:10]
else:
own_coll = []
if user.display_collections_fav:
fav_coll = (Collection.objects.listed().filter(following__user=user)
.order_by('-following__created'))[:10]
else:
fav_coll = []
edit_any_user = acl.action_allowed(request, 'Admin', 'EditAnyUser')
own_profile = (request.user.is_authenticated() and
request.amo_user.id == user.id)
if user.is_developer:
addons = amo.utils.paginate(request,
user.addons_listed.order_by('-weekly_downloads'))
else:
addons = []
def get_addons(reviews):
if not reviews:
return
qs = Addon.objects.filter(id__in=set(r.addon_id for r in reviews))
addons = dict((addon.id, addon) for addon in qs)
for review in reviews:
review.addon = addons.get(review.addon_id)
reviews = user.reviews.transform(get_addons)
data = {'profile': user, 'own_coll': own_coll, 'reviews': reviews,
'fav_coll': fav_coll, 'edit_any_user': edit_any_user,
'addons': addons, 'own_profile': own_profile}
if not own_profile:
data['abuse_form'] = AbuseForm(request=request)
return jingo.render(request, 'users/profile.html', data)
@anonymous_csrf
def register(request):
if request.user.is_authenticated():
messages.info(request, _("You are already logged in to an account."))
form = None
elif request.method == 'POST':
form = forms.UserRegisterForm(request.POST)
if form.is_valid():
try:
u = form.save(commit=False)
u.set_password(form.cleaned_data['password'])
u.generate_confirmationcode()
u.save()
u.create_django_user()
log.info(u"Registered new account for user (%s)", u)
u.email_confirmation_code()
msg = _('Congratulations! Your user account was successfully '
'created.')
messages.success(request, msg)
msg = _(u'An email has been sent to your address {0} to '
'confirm your account. Before you can log in, you '
'have to activate your account by clicking on the '
'link provided in this email.').format(u.email)
messages.info(request, _('Confirmation Email Sent'), msg)
except IntegrityError, e:
# I was unable to reproduce this, but I suspect it happens
# when they POST twice quickly and the slaves don't have the
# new info yet (total guess). Anyway, I'm assuming the
# first one worked properly, so this is still a success
# case to tne end user so we just log it...
log.error("Failed to register new user (%s): %s" % (u, e))
return http.HttpResponseRedirect(reverse('users.login'))
else:
messages.error(request, _('There are errors in this form'),
_('Please correct them and resubmit.'))
else:
form = forms.UserRegisterForm()
return jingo.render(request, 'users/register.html', {'form': form, })
@anonymous_csrf_exempt
def report_abuse(request, user_id):
user = get_object_or_404(UserProfile, pk=user_id)
form = AbuseForm(request.POST or None, request=request)
if request.method == "POST" and form.is_valid():
send_abuse_report(request, user, form.cleaned_data['text'])
messages.success(request, _('User reported.'))
else:
return jingo.render(request, 'users/report_abuse_full.html',
{'profile': user, 'abuse_form': form, })
return redirect(reverse('users.profile', args=[user.pk]))
@never_cache
def password_reset_confirm(request, uidb36=None, token=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert uidb36 is not None and token is not None
user = None
try:
uid_int = base36_to_int(uidb36)
user = UserProfile.objects.get(id=uid_int)
except (ValueError, UserProfile.DoesNotExist):
pass
if user is not None and default_token_generator.check_token(user, token):
validlink = True
if request.method == 'POST':
form = forms.SetPasswordForm(user, request.POST)
if form.is_valid():
form.save()
return redirect(reverse('django.contrib.auth.'
'views.password_reset_complete'))
else:
form = forms.SetPasswordForm(user)
else:
validlink = False
form = None
return jingo.render(request, 'users/pwreset_confirm.html',
{'form': form, 'validlink': validlink})
@never_cache
def unsubscribe(request, hash=None, token=None, perm_setting=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert hash is not None and token is not None
user = None
try:
email = UnsubscribeCode.parse(token, hash)
user = UserProfile.objects.get(email=email)
except (ValueError, UserProfile.DoesNotExist):
pass
perm_settings = []
if user is not None:
unsubscribed = True
if not perm_setting:
# TODO: make this work. nothing currently links to it, though.
perm_settings = [l for l in notifications.NOTIFICATIONS
if not l.mandatory]
else:
perm_setting = notifications.NOTIFICATIONS_BY_SHORT[perm_setting]
UserNotification.update_or_create(update={'enabled': False},
user=user, notification_id=perm_setting.id)
perm_settings = [perm_setting]
else:
unsubscribed = False
return jingo.render(request, 'users/unsubscribe.html',
{'unsubscribed': unsubscribed, 'perm_settings': perm_settings})
class ContributionsFilter(BaseFilter):
opts = (('date', _lazy(u'Purchase Date')),
('price', _lazy(u'Price')),
('name', _lazy(u'Name')))
def filter(self, field):
qs = self.base_queryset
if field == 'date':
return qs.order_by('-created')
elif field == 'price':
return qs.order_by('amount')
elif field == 'name':
return qs.order_by('addon__name')
@login_required
def purchases(request, addon_id=None):
"""A list of purchases that a user has made through the marketplace."""
if not waffle.switch_is_active('marketplace'):
raise http.Http404
# TODO(ashort): this is where we'll need to get cunning about refunds.
cs = Contribution.objects.filter(user=request.amo_user,
type=amo.CONTRIB_PURCHASE)
if addon_id:
cs = cs.filter(addon=addon_id)
filter = ContributionsFilter(request, cs, key='sort', default='date')
purchases = amo.utils.paginate(request, filter.qs)
if addon_id and not purchases.object_list:
# User has requested a receipt for an addon they don't have.
raise http.Http404
return jingo.render(request, 'users/purchases.html',
{'purchases': purchases, 'filter': filter,
'url_base': reverse('users.purchases'),
'single': bool(addon_id)})
# Start of the Support wizard all of these are accessed through the
# SupportWizard below.
def plain(request, contribution, wizard):
# Simple view that just shows a template matching the step.
tpl = wizard.tpl('%s.html' % wizard.step)
return wizard.render(request, tpl, {'addon': contribution.addon,
'contribution': contribution})
def support_author(request, contribution, wizard):
addon = contribution.addon
form = forms.ContactForm(request.POST)
if request.method == 'POST':
if form.is_valid():
template = jingo.render_to_string(request,
wizard.tpl('emails/support-request.txt'),
context={'contribution': contribution,
'addon': addon, 'form': form,
'user': request.amo_user})
log.info('Support request to dev. by user: %s for addon: %s' %
(request.amo_user.pk, addon.pk))
# L10n: %s is the addon name.
send_mail(_(u'New Support Request for %s' % addon.name),
template, request.amo_user.email,
[smart_str(addon.support_email)])
return redirect(reverse('users.support',
args=[contribution.pk, 'author-sent']))
return wizard.render(request, wizard.tpl('author.html'),
{'addon': addon, 'form': form})
def support_mozilla(request, contribution, wizard):
addon = contribution.addon
form = forms.ContactForm(request.POST)
if request.method == 'POST':
if form.is_valid():
template = jingo.render_to_string(request,
wizard.tpl('emails/support-request.txt'),
context={'addon': addon, 'form': form,
'contribution': contribution,
'user': request.amo_user})
log.info('Support request to mozilla by user: %s for addon: %s' %
(request.amo_user.pk, addon.pk))
# L10n: %s is the addon name.
send_mail(_(u'New Support Request for %s' % addon.name),
template, request.amo_user.email, [settings.FLIGTAR])
return redirect(reverse('users.support',
args=[contribution.pk, 'mozilla-sent']))
return wizard.render(request, wizard.tpl('mozilla.html'),
{'addon': addon, 'form': form})
def refund_request(request, contribution, wizard):
addon = contribution.addon
form = forms.RemoveForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
return redirect(reverse('users.support',
args=[contribution.pk, 'reason']))
return wizard.render(request, wizard.tpl('request.html'),
{'addon': addon, 'form': form,
'contribution': contribution})
def refund_reason(request, contribution, wizard):
addon = contribution.addon
if not 'request' in wizard.get_progress():
return redirect(reverse('users.support',
args=[contribution.pk, 'request']))
form = forms.ContactForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
# if under 30 minutes, refund
# TODO(ashort): add in the logic for under 30 minutes.
template = jingo.render_to_string(request,
wizard.tpl('emails/refund-request.txt'),
context={'addon': addon, 'form': form,
'user': request.amo_user,
'contribution': contribution})
log.info('Refund request sent by user: %s for addon: %s' %
(request.amo_user.pk, addon.pk))
# L10n: %s is the addon name.
send_mail(_(u'New Refund Request for %s' % addon.name),
template, request.amo_user.email,
[smart_str(addon.support_email)])
return redirect(reverse('users.support',
args=[contribution.pk, 'refund-sent']))
return wizard.render(request, wizard.tpl('refund.html'),
{'contribut': addon, 'form': form})
class SupportWizard(Wizard):
title = _lazy('Support')
steps = SortedDict((('start', plain),
('site', plain),
('resources', plain),
('mozilla', support_mozilla),
('mozilla-sent', plain),
('author', support_author),
('author-sent', plain),
('request', refund_request),
('reason', refund_reason),
('refund-sent', plain)))
def tpl(self, x):
return 'users/support/%s' % x
@property
def wrapper(self):
return self.tpl('wrapper.html')
@method_decorator(login_required)
def dispatch(self, request, contribution_id, step='', *args, **kw):
contribution = get_object_or_404(Contribution, pk=contribution_id)
if contribution.user.pk != request.amo_user.pk:
raise http.Http404
args = [contribution] + list(args)
return super(SupportWizard, self).dispatch(request, step, *args, **kw)
| {
"repo_name": "jbalogh/zamboni",
"path": "apps/users/views.py",
"copies": "1",
"size": "29285",
"license": "bsd-3-clause",
"hash": -8285977884062196000,
"line_mean": 38.2560321716,
"line_max": 79,
"alpha_frac": 0.5957316032,
"autogenerated": false,
"ratio": 4.1433220147142045,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5239053617914204,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from UserDict import DictMixin
from django.db import models
from django.contrib.contenttypes.models import ContentType
from philo.utils.lazycompat import SimpleLazyObject
### AttributeMappers
class AttributeMapper(object, DictMixin):
"""
Given an :class:`~philo.models.base.Entity` subclass instance, this class allows dictionary-style access to the :class:`~philo.models.base.Entity`'s :class:`~philo.models.base.Attribute`\ s. In order to prevent unnecessary queries, the :class:`AttributeMapper` will cache all :class:`~philo.models.base.Attribute`\ s and the associated python values when it is first accessed.
:param entity: The :class:`~philo.models.base.Entity` subclass instance whose :class:`~philo.models.base.Attribute`\ s will be made accessible.
"""
def __init__(self, entity):
self.entity = entity
self.clear_cache()
def __getitem__(self, key):
"""Returns the ultimate python value of the :class:`~philo.models.base.Attribute` with the given ``key`` from the cache, populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache[key]
def __setitem__(self, key, value):
"""Given a python value, sets the value of the :class:`~philo.models.base.Attribute` with the given ``key`` to that value."""
# Prevent circular import.
from philo.models.base import JSONValue, ForeignKeyValue, ManyToManyValue, Attribute
old_attr = self.get_attribute(key)
if old_attr and old_attr.entity_content_type == ContentType.objects.get_for_model(self.entity) and old_attr.entity_object_id == self.entity.pk:
attribute = old_attr
else:
attribute = Attribute(key=key)
attribute.entity = self.entity
attribute.full_clean()
if isinstance(value, models.query.QuerySet):
value_class = ManyToManyValue
elif isinstance(value, models.Model):
value_class = ForeignKeyValue
else:
value_class = JSONValue
attribute.set_value(value=value, value_class=value_class)
self._cache[key] = attribute.value.value
self._attributes_cache[key] = attribute
def get_attributes(self):
"""Returns an iterable of all of the :class:`~philo.models.base.Entity`'s :class:`~philo.models.base.Attribute`\ s."""
return self.entity.attribute_set.all()
def get_attribute(self, key, default=None):
"""Returns the :class:`~philo.models.base.Attribute` instance with the given ``key`` from the cache, populating the cache if necessary, or ``default`` if no such attribute is found."""
if not self._cache_filled:
self._fill_cache()
return self._attributes_cache.get(key, default)
def keys(self):
"""Returns the keys from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.keys()
def items(self):
"""Returns the items from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.items()
def values(self):
"""Returns the values from the cache, first populating the cache if necessary."""
if not self._cache_filled:
self._fill_cache()
return self._cache.values()
def _fill_cache(self):
if self._cache_filled:
return
attributes = self.get_attributes()
value_lookups = {}
for a in attributes:
value_lookups.setdefault(a.value_content_type_id, []).append(a.value_object_id)
self._attributes_cache[a.key] = a
values_bulk = dict(((ct_pk, SimpleLazyObject(partial(ContentType.objects.get_for_id(ct_pk).model_class().objects.in_bulk, pks))) for ct_pk, pks in value_lookups.items()))
cache = {}
for a in attributes:
cache[a.key] = SimpleLazyObject(partial(self._lazy_value_from_bulk, values_bulk, a))
a._value_cache = cache[a.key]
self._cache.update(cache)
self._cache_filled = True
def _lazy_value_from_bulk(self, bulk, attribute):
v = bulk[attribute.value_content_type_id].get(attribute.value_object_id)
return getattr(v, 'value', None)
def clear_cache(self):
"""Clears the cache."""
self._cache = {}
self._attributes_cache = {}
self._cache_filled = False
class LazyAttributeMapperMixin(object):
"""In some cases, it may be that only one attribute value needs to be fetched. In this case, it is more efficient to avoid populating the cache whenever possible. This mixin overrides the :meth:`__getitem__` and :meth:`get_attribute` methods to prevent their populating the cache. If the cache has been populated (i.e. through :meth:`keys`, :meth:`values`, etc.), then the value or attribute will simply be returned from the cache."""
def __getitem__(self, key):
if key not in self._cache and not self._cache_filled:
self._add_to_cache(key)
return self._cache[key]
def get_attribute(self, key, default=None):
if key not in self._attributes_cache and not self._cache_filled:
self._add_to_cache(key)
return self._attributes_cache.get(key, default)
def _raw_get_attribute(self, key):
return self.get_attributes().get(key=key)
def _add_to_cache(self, key):
from philo.models.base import Attribute
try:
attr = self._raw_get_attribute(key)
except Attribute.DoesNotExist:
raise KeyError
else:
val = getattr(attr.value, 'value', None)
self._cache[key] = val
self._attributes_cache[key] = attr
class LazyAttributeMapper(LazyAttributeMapperMixin, AttributeMapper):
def get_attributes(self):
return super(LazyAttributeMapper, self).get_attributes().exclude(key__in=self._cache.keys())
class TreeAttributeMapper(AttributeMapper):
"""The :class:`~philo.models.base.TreeEntity` class allows the inheritance of :class:`~philo.models.base.Attribute`\ s down the tree. This mapper will return the most recently declared :class:`~philo.models.base.Attribute` among the :class:`~philo.models.base.TreeEntity`'s ancestors or set an attribute on the :class:`~philo.models.base.Entity` it is attached to."""
def get_attributes(self):
"""Returns a list of :class:`~philo.models.base.Attribute`\ s sorted by increasing parent level. When used to populate the cache, this will cause :class:`~philo.models.base.Attribute`\ s on the root to be overwritten by those on its children, etc."""
from philo.models import Attribute
ancestors = dict(self.entity.get_ancestors(include_self=True).values_list('pk', 'level'))
ct = ContentType.objects.get_for_model(self.entity)
attrs = Attribute.objects.filter(entity_content_type=ct, entity_object_id__in=ancestors.keys())
return sorted(attrs, key=lambda x: ancestors[x.entity_object_id])
class LazyTreeAttributeMapper(LazyAttributeMapperMixin, TreeAttributeMapper):
def get_attributes(self):
from philo.models import Attribute
ancestors = dict(self.entity.get_ancestors(include_self=True).values_list('pk', 'level'))
ct = ContentType.objects.get_for_model(self.entity)
attrs = Attribute.objects.filter(entity_content_type=ct, entity_object_id__in=ancestors.keys()).exclude(key__in=self._cache.keys())
return sorted(attrs, key=lambda x: ancestors[x.entity_object_id])
def _raw_get_attribute(self, key):
from philo.models import Attribute
ancestors = dict(self.entity.get_ancestors(include_self=True).values_list('pk', 'level'))
ct = ContentType.objects.get_for_model(self.entity)
try:
attrs = Attribute.objects.filter(entity_content_type=ct, entity_object_id__in=ancestors.keys(), key=key)
sorted_attrs = sorted(attrs, key=lambda x: ancestors[x.entity_object_id], reverse=True)
return sorted_attrs[0]
except IndexError:
raise Attribute.DoesNotExist
class PassthroughAttributeMapper(AttributeMapper):
"""
Given an iterable of :class:`Entities <philo.models.base.Entity>`, this mapper will fetch an :class:`AttributeMapper` for each one. Lookups will return the value from the first :class:`AttributeMapper` which has an entry for a given key. Assignments will be made to the first :class:`.Entity` in the iterable.
:param entities: An iterable of :class:`.Entity` subclass instances.
"""
def __init__(self, entities):
self._attributes = [e.attributes for e in entities]
super(PassthroughAttributeMapper, self).__init__(self._attributes[0].entity)
def _fill_cache(self):
if self._cache_filled:
return
for a in reversed(self._attributes):
a._fill_cache()
self._attributes_cache.update(a._attributes_cache)
self._cache.update(a._cache)
self._cache_filled = True
def get_attributes(self):
raise NotImplementedError
def clear_cache(self):
super(PassthroughAttributeMapper, self).clear_cache()
for a in self._attributes:
a.clear_cache()
class LazyPassthroughAttributeMapper(LazyAttributeMapperMixin, PassthroughAttributeMapper):
"""The :class:`LazyPassthroughAttributeMapper` is lazy in that it tries to avoid accessing the :class:`AttributeMapper`\ s that it uses for lookups. However, those :class:`AttributeMapper`\ s may or may not be lazy themselves."""
def _raw_get_attribute(self, key):
from philo.models import Attribute
for a in self._attributes:
attr = a.get_attribute(key)
if attr is not None:
return attr
raise Attribute.DoesNotExist | {
"repo_name": "ithinksw/philo",
"path": "philo/utils/entities.py",
"copies": "1",
"size": "9092",
"license": "isc",
"hash": 5306341947505296000,
"line_mean": 41.0972222222,
"line_max": 435,
"alpha_frac": 0.7315222173,
"autogenerated": false,
"ratio": 3.460982108869433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4692504326169433,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .util import transitive_get as walk
from .variable import Var, var, isvar
import itertools as it
from .dispatch import dispatch
from collections import Iterator
from toolz.compatibility import iteritems, map
from toolz import assoc
################
# Reificiation #
################
@dispatch(Iterator, dict)
def _reify(t, s):
return map(partial(reify, s=s), t)
# return (reify(arg, s) for arg in t)
@dispatch(tuple, dict)
def _reify(t, s):
return tuple(reify(iter(t), s))
@dispatch(list, dict)
def _reify(t, s):
return list(reify(iter(t), s))
@dispatch(dict, dict)
def _reify(d, s):
return dict((k, reify(v, s)) for k, v in d.items())
@dispatch(object, dict)
def _reify(o, s):
return o # catch all, just return the object
def reify(e, s):
""" Replace variables of expression with substitution
>>> from logpy.unification import reify, var
>>> x, y = var(), var()
>>> e = (1, x, (3, y))
>>> s = {x: 2, y: 4}
>>> reify(e, s)
(1, 2, (3, 4))
>>> e = {1: x, 3: (y, 5)}
>>> reify(e, s)
{1: 2, 3: (4, 5)}
"""
if isvar(e):
return reify(s[e], s) if e in s else e
return _reify(e, s)
###############
# Unification #
###############
seq = tuple, list, Iterator
@dispatch(seq, seq, dict)
def _unify(u, v, s):
# assert isinstance(u, tuple) and isinstance(v, tuple)
if len(u) != len(v):
return False
for uu, vv in zip(u, v): # avoiding recursion
s = unify(uu, vv, s)
if s is False:
return False
return s
@dispatch(dict, dict, dict)
def _unify(u, v, s):
# assert isinstance(u, dict) and isinstance(v, dict)
if len(u) != len(v):
return False
for key, uval in iteritems(u):
if key not in v:
return False
s = unify(uval, v[key], s)
if s is False:
return False
return s
@dispatch(object, object, dict)
def _unify(u, v, s):
return False # catch all
def unify(u, v, s): # no check at the moment
""" Find substitution so that u == v while satisfying s
>>> from logpy.unification import unify, var
>>> x = var('x')
>>> unify((1, x), (1, 2), {})
{~x: 2}
"""
u = walk(u, s)
v = walk(v, s)
if u == v:
return s
if isvar(u):
return assoc(s, u, v)
if isvar(v):
return assoc(s, v, u)
return _unify(u, v, s)
| {
"repo_name": "cpcloud/logpy",
"path": "logpy/unification.py",
"copies": "1",
"size": "2420",
"license": "bsd-3-clause",
"hash": 1744162968998807800,
"line_mean": 21.6168224299,
"line_max": 59,
"alpha_frac": 0.5512396694,
"autogenerated": false,
"ratio": 2.958435207823961,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8996184625771498,
"avg_score": 0.002698050290492387,
"num_lines": 107
} |
from functools import partial
from uuid import uuid4
import transaction
def get_key_and_value(obj, value, getter=None):
if isinstance(value, str):
getter = getter if getter else getattr
return value, getter(obj, value)
return value
class CRUDBasicTest:
def __init__(self, obj_factory, listing_page, navigator, dbsession, admin_user):
self.navigator = navigator
self.listing_page = listing_page
self.obj_factory = obj_factory
self.dbsession = dbsession
self.admin_user = admin_user
def create(self, fields, submit_kw, user=None):
user = user if user else self.admin_user
navigator = self.navigator(user=user)
obj = self.obj_factory.build()
navigator.submit(
self.listing_page.add_page.add_form,
data=dict(map(partial(get_key_and_value, obj), fields)),
**submit_kw)
return obj
def delete(self, user=None):
user = user if user else self.admin_user
with transaction.manager:
self.obj_factory()
navigator = self.navigator(user=user)
navigator.navigate(self.listing_page)
assert navigator.browser.is_text_present("Total 1 item")
navigator.browser.find_by_css('.btn-crud-listing-delete').click()
assert navigator.browser.is_text_present("Confirm delete")
navigator.browser.find_by_css('#btn-delete-yes').click()
assert navigator.browser.is_text_present("Deleted")
navigator.navigate(self.listing_page)
assert navigator.browser.is_text_present("No items")
def edit(self, submit_kw, fields, user=None):
user = user if user else self.admin_user
with transaction.manager:
obj = self.obj_factory()
self.dbsession.expunge_all()
navigator = self.navigator(user=user)
data = dict(map(partial(get_key_and_value, obj, getter=lambda x, y: uuid4().hex), fields))
navigator.submit(
self.listing_page.edit_page.edit_form,
obj=obj,
data=data,
**submit_kw)
| {
"repo_name": "enkidulan/enkiblog",
"path": "src/enkiblog/tests/utils.py",
"copies": "1",
"size": "2120",
"license": "apache-2.0",
"hash": -6928005943854535000,
"line_mean": 30.6417910448,
"line_max": 98,
"alpha_frac": 0.6297169811,
"autogenerated": false,
"ratio": 3.7789661319073082,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9907051343157803,
"avg_score": 0.00032635396990094096,
"num_lines": 67
} |
from functools import partial
from warnings import warn
import numpy as np
from menpo.transform import Translation
from .base import Image, _convert_patches_list_to_single_array
from .patches import set_patches
def pwa_point_in_pointcloud(pcloud, indices, batch_size=None):
"""
Make sure that the decision of whether a point is inside or outside
the PointCloud is exactly the same as how PWA calculates triangle
containment. Then, we use the trick of setting the mask to all the
points that were NOT outside the triangulation. Otherwise, all points
were inside and we just return those as ``True``. In general, points
on the boundary are counted as inside the polygon.
Parameters
----------
pcloud : :map:`PointCloud`
The pointcloud to use for the containment test.
indices : (d, n_dims) `ndarray`
The list of pixel indices to test.
batch_size : `int` or ``None``, optional
See constrain_to_pointcloud for more information about the batch_size
parameter.
Returns
-------
mask : (d,) `bool ndarray`
Whether each pixel index was in inside the convex hull of the
pointcloud or not.
"""
from menpo.transform.piecewiseaffine import PiecewiseAffine
from menpo.transform.piecewiseaffine import TriangleContainmentError
try:
pwa = PiecewiseAffine(pcloud, pcloud)
pwa.apply(indices, batch_size=batch_size)
return np.ones(indices.shape[0], dtype=np.bool)
except TriangleContainmentError as e:
return ~e.points_outside_source_domain
def convex_hull_point_in_pointcloud(pcloud, indices):
"""
Uses the matplotlib ``contains_points`` method, which in turn uses:
"Crossings Multiply algorithm of InsideTest"
By Eric Haines, 3D/Eye Inc, erich@eye.com
http://erich.realtimerendering.com/ptinpoly/
This algorithm uses a per-pixel test and thus tends to produce smoother
edges. We also guarantee that all points inside PointCloud will be
included by calculating the **convex hull** of the pointcloud before
doing the point inside test.
Points on the boundary are counted as **outside** the polygon.
Parameters
----------
pcloud : :map:`PointCloud`
The pointcloud to use for the containment test.
indices : (d, n_dims) `ndarray`
The list of pixel indices to test.
Returns
-------
mask : (d,) `bool ndarray`
Whether each pixel index was in inside the convex hull of the
pointcloud or not.
"""
from scipy.spatial import ConvexHull
from matplotlib.path import Path
c_hull = ConvexHull(pcloud.points)
polygon = pcloud.points[c_hull.vertices, :]
return Path(polygon).contains_points(indices)
class BooleanImage(Image):
r"""
A mask image made from binary pixels. The region of the image that is
left exposed by the mask is referred to as the 'masked region'. The
set of 'masked' pixels is those pixels corresponding to a ``True`` value in
the mask.
Parameters
----------
mask_data : ``(M, N, ..., L)`` `ndarray`
The binary mask data. Note that there is no channel axis - a 2D Mask
Image is built from just a 2D numpy array of mask_data.
Automatically coerced in to boolean values.
copy: `bool`, optional
If ``False``, the image_data will not be copied on assignment. Note that
if the array you provide is not boolean, there **will still be copy**.
In general this should only be used if you know what you are doing.
"""
def __init__(self, mask_data, copy=True):
# Add a channel dimension. We do this little reshape trick to add
# the axis because this maintains C-contiguous'ness
mask_data = mask_data.reshape((1,) + mask_data.shape)
# If we are trying not to copy, but the data we have isn't boolean,
# then unfortunately, we forced to copy anyway!
if mask_data.dtype != np.bool:
mask_data = np.array(mask_data, dtype=np.bool, copy=True,
order='C')
if not copy:
warn('The copy flag was NOT honoured. A copy HAS been made. '
'Please ensure the data you pass is C-contiguous.')
super(BooleanImage, self).__init__(mask_data, copy=copy)
@classmethod
def init_blank(cls, shape, fill=True, round='ceil', **kwargs):
r"""
Returns a blank :map:`BooleanImage` of the requested shape
Parameters
----------
shape : `tuple` or `list`
The shape of the image. Any floating point values are rounded
according to the ``round`` kwarg.
fill : `bool`, optional
The mask value to be set everywhere.
round: ``{ceil, floor, round}``, optional
Rounding function to be applied to floating point shapes.
Returns
-------
blank_image : :map:`BooleanImage`
A blank mask of the requested size
"""
from .base import round_image_shape
shape = round_image_shape(shape, round)
if fill:
mask = np.ones(shape, dtype=np.bool)
else:
mask = np.zeros(shape, dtype=np.bool)
return cls(mask, copy=False)
@classmethod
def init_from_channels_at_back(cls, pixels):
r"""
This method is not required for ``BooleanImage`` types as boolean images
do not expect a channel axis for construction.
Parameters
----------
pixels : ``(M, N ..., Q)`` `ndarray`
Array representing the image pixels, with NO channel axis.
Returns
-------
image : :map:`BooleanImage`
A new image from the given boolean pixels.
"""
return cls(pixels)
@classmethod
def init_from_pointcloud(cls, pointcloud, group=None, boundary=0,
constrain=True, fill=True):
r"""
Create an Image that is big enough to contain the given pointcloud.
The pointcloud will be translated to the origin and then translated
according to its bounds in order to fit inside the new image.
An optional boundary can be provided in order to increase the space
around the boundary of the pointcloud. The boundary will be added
to *all sides of the image* and so a boundary of 5 provides 10 pixels
of boundary total for each dimension.
By default, the mask will be constrained to the convex hull of the
provided pointcloud.
Parameters
----------
pointcloud : :map:`PointCloud`
Pointcloud to place inside the newly created image.
group : `str`, optional
If ``None``, the pointcloud will only be used to create the image.
If a `str` then the pointcloud will be attached as a landmark
group to the image, with the given string as key.
boundary : `float`
A optional padding distance that is added to the pointcloud bounds.
Default is ``0``, meaning the max/min of tightest possible
containing image is returned.
fill : `int`, optional
The value to fill all pixels with.
constrain : `bool`, optional
If ``True``, the ``True`` values will be image will be constrained
to the convex hull of the provided pointcloud. If ``False``,
the mask will be the value of ``fill``.
Returns
-------
image : :map:`MaskedImage`
A new image with the same size as the given pointcloud, optionally
with the pointcloud attached as landmarks and the mask constrained
to the convex hull of the pointcloud.
"""
# Translate pointcloud to the origin
minimum = pointcloud.bounds(boundary=boundary)[0]
origin_pc = Translation(-minimum).apply(pointcloud)
image_shape = origin_pc.range(boundary=boundary)
new_image = cls.init_blank(image_shape, fill=fill)
if constrain:
new_image = new_image.constrain_to_pointcloud(origin_pc)
if group is not None:
new_image.landmarks[group] = origin_pc
return new_image
def as_masked(self, mask=None, copy=True):
r"""
Impossible for a :map:`BooleanImage` to be transformed to a
:map:`MaskedImage`.
"""
raise NotImplementedError("as_masked cannot be invoked on a "
"BooleanImage.")
@property
def mask(self):
r"""
Returns the pixels of the mask with no channel axis. This is what
should be used to mask any k-dimensional image.
:type: ``(M, N, ..., L)``, `bool ndarray`
"""
return self.pixels[0, ...]
def n_true(self):
r"""
The number of ``True`` values in the mask.
:type: `int`
"""
return np.sum(self.pixels)
def n_false(self):
r"""
The number of ``False`` values in the mask.
:type: `int`
"""
return self.n_pixels - self.n_true()
def all_true(self):
r"""
``True`` iff every element of the mask is ``True``.
:type: `bool`
"""
return np.all(self.pixels)
def proportion_true(self):
r"""
The proportion of the mask which is ``True``.
:type: `float`
"""
return (self.n_true() * 1.0) / self.n_pixels
def proportion_false(self):
r"""
The proportion of the mask which is ``False``
:type: `float`
"""
return (self.n_false() * 1.0) / self.n_pixels
def true_indices(self):
r"""
The indices of pixels that are ``True``.
:type: ``(n_dims, n_true)`` `ndarray`
"""
if self.all_true():
return self.indices()
else:
# Ignore the channel axis
return np.vstack(np.nonzero(self.pixels[0])).T
def false_indices(self):
r"""
The indices of pixels that are ``Flase``.
:type: ``(n_dims, n_false)`` `ndarray`
"""
# Ignore the channel axis
return np.vstack(np.nonzero(~self.pixels[0])).T
def __str__(self):
return ('{} {}D mask, {:.1%} '
'of which is True'.format(self._str_shape(), self.n_dims,
self.proportion_true()))
def from_vector(self, vector, copy=True):
r"""
Takes a flattened vector and returns a new :map:`BooleanImage` formed
by reshaping the vector to the correct dimensions. Note that this is
rebuilding a boolean image **itself** from boolean values. The mask
is in no way interpreted in performing the operation, in contrast to
:map:`MaskedImage`, where only the masked region is used in
:meth:`from_vector` and :meth`as_vector`. Any image landmarks are
transferred in the process.
Parameters
----------
vector : ``(n_pixels,)`` `bool ndarray`
A flattened vector of all the pixels of a :map:`BooleanImage`.
copy : `bool`, optional
If ``False``, no copy of the vector will be taken.
Returns
-------
image : :map:`BooleanImage`
New BooleanImage of same shape as this image
Raises
------
Warning
If ``copy=False`` cannot be honored.
"""
mask = BooleanImage(vector.reshape(self.shape), copy=copy)
if self.has_landmarks:
mask.landmarks = self.landmarks
if hasattr(self, 'path'):
mask.path = self.path
return mask
def invert(self):
r"""
Returns a copy of this boolean image, which is inverted.
Returns
-------
inverted : :map:`BooleanImage`
A copy of this boolean mask, where all ``True`` values are ``False``
and all ``False`` values are ``True``.
"""
inverse = self.copy()
inverse.pixels = ~self.pixels
return inverse
def bounds_true(self, boundary=0, constrain_to_bounds=True):
r"""
Returns the minimum to maximum indices along all dimensions that the
mask includes which fully surround the ``True`` mask values. In the case
of a 2D Image for instance, the min and max define two corners of a
rectangle bounding the True pixel values.
Parameters
----------
boundary : `int`, optional
A number of pixels that should be added to the extent. A
negative value can be used to shrink the bounds in.
constrain_to_bounds: `bool`, optional
If ``True``, the bounding extent is snapped to not go beyond
the edge of the image. If ``False``, the bounds are left unchanged.
Returns
--------
min_b : ``(D,)`` `ndarray`
The minimum extent of the ``True`` mask region with the boundary
along each dimension. If ``constrain_to_bounds=True``,
is clipped to legal image bounds.
max_b : ``(D,)`` `ndarray`
The maximum extent of the ``True`` mask region with the boundary
along each dimension. If ``constrain_to_bounds=True``,
is clipped to legal image bounds.
"""
mpi = self.true_indices()
maxes = np.max(mpi, axis=0) + boundary
mins = np.min(mpi, axis=0) - boundary
if constrain_to_bounds:
maxes = self.constrain_points_to_bounds(maxes)
mins = self.constrain_points_to_bounds(mins)
return mins, maxes
def bounds_false(self, boundary=0, constrain_to_bounds=True):
r"""
Returns the minimum to maximum indices along all dimensions that the
mask includes which fully surround the False mask values. In the case
of a 2D Image for instance, the min and max define two corners of a
rectangle bounding the False pixel values.
Parameters
----------
boundary : `int` >= 0, optional
A number of pixels that should be added to the extent. A
negative value can be used to shrink the bounds in.
constrain_to_bounds: `bool`, optional
If ``True``, the bounding extent is snapped to not go beyond
the edge of the image. If ``False``, the bounds are left unchanged.
Returns
-------
min_b : ``(D,)`` `ndarray`
The minimum extent of the ``True`` mask region with the boundary
along each dimension. If ``constrain_to_bounds=True``,
is clipped to legal image bounds.
max_b : ``(D,)`` `ndarray`
The maximum extent of the ``True`` mask region with the boundary
along each dimension. If ``constrain_to_bounds=True``,
is clipped to legal image bounds.
"""
return self.invert().bounds_true(
boundary=boundary, constrain_to_bounds=constrain_to_bounds)
# noinspection PyMethodOverriding
def sample(self, points_to_sample, mode='constant', cval=False, **kwargs):
r"""
Sample this image at the given sub-pixel accurate points. The input
PointCloud should have the same number of dimensions as the image e.g.
a 2D PointCloud for a 2D multi-channel image. A numpy array will be
returned the has the values for every given point across each channel
of the image.
Parameters
----------
points_to_sample : :map:`PointCloud`
Array of points to sample from the image. Should be
`(n_points, n_dims)`
mode : ``{constant, nearest, reflect, wrap}``, optional
Points outside the boundaries of the input are filled according
to the given mode.
cval : `float`, optional
Used in conjunction with mode ``constant``, the value outside
the image boundaries.
Returns
-------
sampled_pixels : (`n_points`, `n_channels`) `bool ndarray`
The interpolated values taken across every channel of the image.
"""
# enforce the order as 0, as this is boolean data, then call super
return Image.sample(self, points_to_sample, order=0, mode=mode,
cval=cval)
# noinspection PyMethodOverriding
def warp_to_mask(self, template_mask, transform, warp_landmarks=True,
mode='constant', cval=False, batch_size=None,
return_transform=False):
r"""
Return a copy of this :map:`BooleanImage` warped into a different
reference space.
Note that warping into a mask is slower than warping into a full image.
If you don't need a non-linear mask, consider warp_to_shape instead.
Parameters
----------
template_mask : :map:`BooleanImage`
Defines the shape of the result, and what pixels should be
sampled.
transform : :map:`Transform`
Transform **from the template space back to this image**.
Defines, for each pixel location on the template, which pixel
location should be sampled from on this image.
warp_landmarks : `bool`, optional
If ``True``, result will have the same landmark dictionary
as self, but with each landmark updated to the warped position.
mode : ``{constant, nearest, reflect or wrap}``, optional
Points outside the boundaries of the input are filled according
to the given mode.
cval : `float`, optional
Used in conjunction with mode ``constant``, the value outside
the image boundaries.
batch_size : `int` or ``None``, optional
This should only be considered for large images. Setting this
value can cause warping to become much slower, particular for
cached warps such as Piecewise Affine. This size indicates
how many points in the image should be warped at a time, which
keeps memory usage low. If ``None``, no batching is used and all
points are warped at once.
return_transform : `bool`, optional
This argument is for internal use only. If ``True``, then the
:map:`Transform` object is also returned.
Returns
-------
warped_image : :map:`BooleanImage`
A copy of this image, warped.
transform : :map:`Transform`
The transform that was used. It only applies if
`return_transform` is ``True``.
"""
# enforce the order as 0, as this is boolean data, then call super
return Image.warp_to_mask(
self, template_mask, transform, warp_landmarks=warp_landmarks,
order=0, mode=mode, cval=cval, batch_size=batch_size,
return_transform=return_transform)
# noinspection PyMethodOverriding
def warp_to_shape(self, template_shape, transform, warp_landmarks=True,
mode='constant', cval=False, order=None,
batch_size=None, return_transform=False):
"""
Return a copy of this :map:`BooleanImage` warped into a different
reference space.
Note that the order keyword argument is in fact ignored, as any order
other than 0 makes no sense on a binary image. The keyword argument is
present only for compatibility with the :map:`Image` warp_to_shape API.
Parameters
----------
template_shape : ``(n_dims, )`` `tuple` or `ndarray`
Defines the shape of the result, and what pixel indices should be
sampled (all of them).
transform : :map:`Transform`
Transform **from the template_shape space back to this image**.
Defines, for each index on template_shape, which pixel location
should be sampled from on this image.
warp_landmarks : `bool`, optional
If ``True``, result will have the same landmark dictionary
as self, but with each landmark updated to the warped position.
mode : ``{constant, nearest, reflect or wrap}``, optional
Points outside the boundaries of the input are filled according
to the given mode.
cval : `float`, optional
Used in conjunction with mode ``constant``, the value outside
the image boundaries.
batch_size : `int` or ``None``, optional
This should only be considered for large images. Setting this
value can cause warping to become much slower, particular for
cached warps such as Piecewise Affine. This size indicates
how many points in the image should be warped at a time, which
keeps memory usage low. If ``None``, no batching is used and all
points are warped at once.
return_transform : `bool`, optional
This argument is for internal use only. If ``True``, then the
:map:`Transform` object is also returned.
Returns
-------
warped_image : :map:`BooleanImage`
A copy of this image, warped.
transform : :map:`Transform`
The transform that was used. It only applies if
`return_transform` is ``True``.
"""
# call the super variant and get ourselves an Image back
# note that we force the use of order=0 for BooleanImages.
warped = Image.warp_to_shape(self, template_shape, transform,
warp_landmarks=warp_landmarks, order=0,
mode=mode, cval=cval,
batch_size=batch_size)
# unfortunately we can't escape copying here, let BooleanImage
# convert us to np.bool
boolean_image = BooleanImage(warped.pixels.reshape(template_shape))
if warped.has_landmarks:
boolean_image.landmarks = warped.landmarks
if hasattr(warped, 'path'):
boolean_image.path = warped.path
# optionally return the transform
if return_transform:
return boolean_image, transform
else:
return boolean_image
def _build_warp_to_mask(self, template_mask, sampled_pixel_values,
**kwargs):
r"""
Builds the warped image from the template mask and sampled pixel values.
"""
# start from a copy of the template_mask
warped_img = template_mask.copy()
if warped_img.all_true():
# great, just reshape the sampled_pixel_values
warped_img.pixels = sampled_pixel_values.reshape(
(1,) + warped_img.shape)
else:
# we have to fill out mask with the sampled mask..
warped_img.pixels[:, warped_img.mask] = sampled_pixel_values
return warped_img
def constrain_to_landmarks(self, group=None, batch_size=None):
r"""
Returns a copy of this image whereby the ``True`` values in the image
are restricted to be equal to the convex hull around the landmarks
chosen. This is not a per-pixel convex hull, but instead relies on a
triangulated approximation. If the landmarks in question are an instance
of :map:`TriMesh`, the triangulation of the landmarks will be used in
the convex hull calculation. If the landmarks are an instance of
:map:`PointCloud`, Delaunay triangulation will be used to create a
triangulation.
Parameters
----------
group : `str`, optional
The key of the landmark set that should be used. If ``None``,
and if there is only one set of landmarks, this set will be used.
batch_size : `int` or ``None``, optional
This should only be considered for large images. Setting this value
will cause constraining to become much slower. This size indicates
how many points in the image should be checked at a time, which
keeps memory usage low. If ``None``, no batching is used and all
points are checked at once.
Returns
-------
constrained : :map:`BooleanImage`
The new boolean image, constrained by the given landmark group.
"""
return self.constrain_to_pointcloud(self.landmarks[group].lms,
batch_size=batch_size)
def constrain_to_pointcloud(self, pointcloud, batch_size=None,
point_in_pointcloud='pwa'):
r"""
Returns a copy of this image whereby the ``True`` values in the image
are restricted to be equal to the convex hull around a pointcloud. The
choice of whether a pixel is inside or outside of the pointcloud is
determined by the ``point_in_pointcloud`` parameter. By default a
Piecewise Affine transform is used to test for containment, which is
useful when aligning images by their landmarks. Triangluation will be
decided by Delauny - if you wish to customise it, a :map:`TriMesh`
instance can be passed for the ``pointcloud`` argument. In this case,
the triangulation of the Trimesh will be used to define the retained
region.
For large images, a faster and pixel-accurate method can be used (
'convex_hull'). Here, there is no specialization for
:map:`TriMesh` instances. Alternatively, a callable can be provided to
override the test. By default, the provided implementations are only
valid for 2D images.
Parameters
----------
pointcloud : :map:`PointCloud` or :map:`TriMesh`
The pointcloud of points that should be constrained to. See
`point_in_pointcloud` for how in some cases a :map:`TriMesh` may be
used to control triangulation.
batch_size : `int` or ``None``, optional
This should only be considered for large images. Setting this value
will cause constraining to become much slower. This size indicates
how many points in the image should be checked at a time, which
keeps memory usage low. If ``None``, no batching is used and all
points are checked at once. By default, this is only used for
the 'pwa' point_in_pointcloud choice.
point_in_pointcloud : {'pwa', 'convex_hull'} or `callable`
The method used to check if pixels in the image fall inside the
``pointcloud`` or not. If 'pwa', Menpo's :map:`PiecewiseAffine`
transform will be used to test for containment. In this case
``pointcloud`` should be a :map:`TriMesh`. If it isn't, Delauny
triangulation will be used to first triangulate ``pointcloud`` into
a :map:`TriMesh` before testing for containment.
If a callable is passed, it should take two parameters,
the :map:`PointCloud` to constrain with and the pixel locations
((d, n_dims) ndarray) to test and should return a (d, 1) boolean
ndarray of whether the pixels were inside (True) or outside (False)
of the :map:`PointCloud`.
Returns
-------
constrained : :map:`BooleanImage`
The new boolean image, constrained by the given pointcloud.
Raises
------
ValueError
If the image is not 2D and a default implementation is chosen.
ValueError
If the chosen ``point_in_pointcloud`` is unknown.
"""
copy = self.copy()
if point_in_pointcloud in {'pwa', 'convex_hull'} and self.n_dims != 2:
raise ValueError('Can only constrain mask on 2D images with the '
'default point_in_pointcloud implementations.'
'Please provide a custom callable for calculating '
'the new mask in this '
'{}D image'.format(self.n_dims))
if point_in_pointcloud == 'pwa':
point_in_pointcloud = partial(pwa_point_in_pointcloud,
batch_size=batch_size)
elif point_in_pointcloud == 'convex_hull':
point_in_pointcloud = convex_hull_point_in_pointcloud
elif not callable(point_in_pointcloud):
# Not a function, or a string, so we have an error!
raise ValueError('point_in_pointcloud must be a callable that '
'take two arguments: the Menpo PointCloud as a '
'boundary and the ndarray of pixel indices '
'to test. {} is an unknown option.'.format(
point_in_pointcloud))
# Only consider indices inside the bounding box of the PointCloud
bounds = pointcloud.bounds()
# Convert to integer to try and reduce boundary fp rounding errors.
bounds = [b.astype(np.int) for b in bounds]
indices = copy.indices()
# This loop is to ensure the code is multi-dimensional
for k in range(self.n_dims):
indices = indices[indices[:, k] >= bounds[0][k], :]
indices = indices[indices[:, k] <= bounds[1][k], :]
# Due to only testing bounding box indices, make sure the mask starts
# off as all False
copy.pixels[:] = False
# slice(0, 1) because we know we only have 1 channel
# Slice all the channels, only inside the bounding box (for setting
# the new mask values).
all_channels = [slice(0, 1)]
slices = all_channels + [slice(bounds[0][k], bounds[1][k] + 1)
for k in range(self.n_dims)]
copy.pixels[slices].flat = point_in_pointcloud(pointcloud, indices)
return copy
def set_patches(self, patches, patch_centers, offset=None,
offset_index=None):
r"""
Set the values of a group of patches into the correct regions in a copy
of this image. Given an array of patches and a set of patch centers,
the patches' values are copied in the regions of the image that are
centred on the coordinates of the given centers.
The patches argument can have any of the two formats that are returned
from the `extract_patches()` and `extract_patches_around_landmarks()`
methods. Specifically it can be:
1. ``(n_center, n_offset, self.n_channels, patch_shape)`` `ndarray`
2. `list` of ``n_center * n_offset`` :map:`Image` objects
Currently only 2D images are supported.
Parameters
----------
patches : `ndarray` or `list`
The values of the patches. It can have any of the two formats that
are returned from the `extract_patches()` and
`extract_patches_around_landmarks()` methods. Specifically, it can
either be an ``(n_center, n_offset, self.n_channels, patch_shape)``
`ndarray` or a `list` of ``n_center * n_offset`` :map:`Image`
objects.
patch_centers : :map:`PointCloud`
The centers to set the patches around.
offset : `list` or `tuple` or ``(1, 2)`` `ndarray` or ``None``, optional
The offset to apply on the patch centers within the image.
If ``None``, then ``(0, 0)`` is used.
offset_index : `int` or ``None``, optional
The offset index within the provided `patches` argument, thus the
index of the second dimension from which to sample. If ``None``,
then ``0`` is used.
Raises
------
ValueError
If image is not 2D
ValueError
If offset does not have shape (1, 2)
Returns
-------
new_image : :map:`BooleanImage`
A new boolean image where the provided patch locations have been
set to the provided values.
"""
# parse arguments
if self.n_dims != 2:
raise ValueError('Only two dimensional patch insertion is '
'currently supported.')
if offset is None:
offset = np.zeros([1, 2], dtype=np.intp)
elif isinstance(offset, tuple) or isinstance(offset, list):
offset = np.asarray([offset])
offset = np.require(offset, dtype=np.intp)
if not offset.shape == (1, 2):
raise ValueError('The offset must be a tuple, a list or a '
'numpy.array with shape (1, 2).')
if offset_index is None:
offset_index = 0
# if patches is a list, convert it to array
if isinstance(patches, list):
patches = _convert_patches_list_to_single_array(
patches, patch_centers.n_points)
copy = self.copy()
# convert pixels to uint8 so that they get recognized by cython
tmp_pixels = copy.pixels.astype(np.uint8)
# convert patches to uint8 as well and set them to pixels
set_patches(patches.astype(np.uint8), tmp_pixels, patch_centers.points,
offset, offset_index)
# convert pixels back to bool
copy.pixels = tmp_pixels.astype(np.bool)
return copy
| {
"repo_name": "yuxiang-zhou/menpo",
"path": "menpo/image/boolean.py",
"copies": "1",
"size": "33587",
"license": "bsd-3-clause",
"hash": 7245314587886942000,
"line_mean": 41.4614412137,
"line_max": 80,
"alpha_frac": 0.5974633043,
"autogenerated": false,
"ratio": 4.470517769200053,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5567981073500053,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from warnings import warn
from twisted.python import log
from twisted.python.failure import Failure
from txosc import async
from txosc import osc
from txosc import dispatch
from bl.player import Player, snd
def fallback(message, address):
log.msg('[fallback] %s %s' % (message, address))
class MessageSender(object):
"""
Example Usage:
client = txosc.async.DatagramClientProtocol()
clientPort = reactor.listenUDP(0, client)
sender = MessageSender(client, '127.0.0.1', 17779)
sender.send('/foo', 10000)
"""
def __init__(self, client, host='127.0.0.1', port=17779):
self.client = client
self.host = host
self.port = port
if isinstance(client, async.ClientFactory):
self._send = self._tcp_send
if isinstance(client, async.DatagramClientProtocol):
self._send = self._udp_send
def send(self, address, *args):
message = osc.Message(address, *args)
self._send(message)
def _udp_send(self, element):
self.client.send(element, (self.host, self.port))
def _tcp_send(self, element):
self.client.send(element)
class Play(object):
def __init__(self, instr, notes, velocity, stop=lambda: None, clock=None):
self.notes = notes
self.player = Player(instr, snd(self._iternotes()), velocity=velocity,
stop=stop, clock=clock)
def _iternotes(self):
while 1:
yield self.notes[self.index]
def play(self, index, on_off):
if on_off:
self.index = index
self.player.play()
def callbacks(self):
cbs = []
for index in range(len(self.notes)):
cbs.append(partial(self.play, index))
return cbs
###########################################
# NOTE
# Everything under this note is deprecated.
class AbstractDispatcher(object):
address = None
def __init__(self, address=None, transform=lambda v: v):
warn('AbstractDispatcher is deprecated for rilz')
self._listeners = []
self.address = address or self.address
self._transform = transform
def listen(self, handler):
self._listeners.append(handler)
def unlisten(self, handler):
while handler in self._listeners:
self._listeners.remove(handler)
def dispatch(self, *a, **k):
for handler in self._listeners:
try:
handler(*a, **k)
except Exception, e:
f = Failure(e)
f.printTraceback()
log.err(e)
def __call__(self):
return self
class TouchDispatcher(AbstractDispatcher):
address = "touch"
def handle(self, message, address):
log.msg('[TouchDispatcher.handle] %s, %s, %s' % (
message, message.arguments, address))
try:
x, y = message.arguments
self.dispatch(self._transform(float(x)), self._transform(float(y)))
except Exception, e:
log.msg('[TouchDispatcher.handle] error', e)
class BoolDispatcher(AbstractDispatcher):
def handle(self, message, address):
self.dispatch(message.arguments[0].value)
class FloatDispatcher(AbstractDispatcher):
def handle(self, message, address):
try:
(v,) = message.arguments
self.dispatch(self._transform(float(v)))
except Exception, e:
f = Failure(e)
f.printTraceback()
log.msg('[FloatDispatcher.handle] error', e)
class Float2Dispatcher(AbstractDispatcher):
def handle(self, message, address):
try:
(v1, v2) = message.arguments
self.dispatch(self._transform(float(v1)),
self._transform(float(v2)))
except Exception, e:
log.msg('[Float2Dispatcher.handle] error', e)
class Float3Dispatcher(AbstractDispatcher):
def handle(self, message, address):
try:
(v1, v2, v3) = message.arguments
self.dispatch(
self._transform(float(v1)),
self._transform(float(v2)),
self._transform(float(v3)))
except Exception, e:
log.msg('[Float3Dispatcher.handle] error', e)
class DispatcherHub(object):
def __init__(self, *dispatchers, **kw):
self.receiver = kw.get('receiver', None) or dispatch.Receiver()
self.receiver.fallback = self.fallback
self._addresses = {}
for d in dispatchers:
self.addDispatcher(d)
def addDispatcher(self, dispatcher):
if dispatcher.address in self._addresses:
raise ValueError('Dispatcher with address %s already added' %
dispatcher.address)
self._addresses[dispatcher.address] = dispatcher
prefix = ''
if dispatcher.address[0] != '/':
prefix = '/'
self.receiver.addCallback(prefix + dispatcher.address,
dispatcher.handle)
def fallback(self, message, address):
log.msg('[fallback] %s %s' % (message, address))
def __getitem__(self, address):
return self._addresses[address]
# Some generic device representations
class Device(object):
pass
class Accelerometer(Device):
pitch = 0
roll = 0
yaw = 0
def on_pitch(self, v):
self.pitch = v
def on_roll(self, v):
self.roll = v
def on_yaw(self, v):
self.yaw = v
class TouchPad(Device):
max_x = 640
max_y = 480
x = 0
y = 0
def on_x(self, v):
self.x = v
def on_y(self, v):
self.y = v
| {
"repo_name": "djfroofy/beatlounge",
"path": "bl/osc/base.py",
"copies": "1",
"size": "5700",
"license": "mit",
"hash": -9178537381184382000,
"line_mean": 24.7918552036,
"line_max": 79,
"alpha_frac": 0.5798245614,
"autogenerated": false,
"ratio": 4.0311173974540315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5110941958854032,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from warnings import warn
import networkx as nx
import numpy as np
import scipy.sparse as sp
from scipy.sparse.sputils import isdense
from sklearn.metrics import make_scorer
from sklearn.exceptions import UndefinedMetricWarning
from sklearn.utils.sparsefuncs import count_nonzero
def hierarchical_f_measure(tr, y_true, y_pred):
"""
Calculate hierarchical f-measure. This is defined as the f-measure precision and recall
calculated with the union of the ancestors
of the given labels (including the labels themselves and excluding the root).
Parameters
----------
tr: ThesaursReader
The thesaurus.
y_true: {sparse matrix, array-like}
The true labels
y_pred: {sparse matrix, array-like}
The predicited labels
Returns
-------
float
The hierarchical f_measure
"""
graph = tr.nx_graph
root = tr.nx_root
if not sp.issparse(y_true):
y_true = sp.coo_matrix(y_true)
y_pred = sp.coo_matrix(y_pred)
label_scores = []
for i in range(0, y_true.shape[0]):
row_true = y_true.getrow(i)
row_pred = y_pred.getrow(i)
true_ancestors = set.union(set(row_true.indices), *[nx.ancestors(graph, index) for index in row_true.indices])
true_ancestors.discard(root)
pred_ancestors = set.union(set(row_pred.indices), *[nx.ancestors(graph, index) for index in row_pred.indices])
pred_ancestors.discard(root)
intersection = len(pred_ancestors & true_ancestors)
try:
p = intersection / len(pred_ancestors)
r = intersection / len(true_ancestors)
label_scores.append(2 * p * r / (p + r))
except ZeroDivisionError:
warn('F_score is ill-defined and being set to 0.0 on samples with no predicted labels',
UndefinedMetricWarning, stacklevel=2)
label_scores.append(0)
return np.mean(label_scores)
def hierarchical_f_measure_scorer(graph):
measure = partial(hierarchical_f_measure, graph)
return make_scorer(measure)
def f1_per_sample(y_true, y_pred):
if isdense(y_true) or isdense(y_pred):
y_true = sp.csr_matrix(y_true)
y_pred = sp.csr_matrix(y_pred)
sum_axis = 1
true_and_pred = y_true.multiply(y_pred)
tp_sum = count_nonzero(true_and_pred, axis=sum_axis)
pred_sum = count_nonzero(y_pred, axis=sum_axis)
true_sum = count_nonzero(y_true, axis=sum_axis)
with np.errstate(divide='ignore', invalid='ignore'):
precision = _prf_divide(tp_sum, pred_sum)
recall = _prf_divide(tp_sum, true_sum)
f_score = (2 * precision * recall / (1 * precision + recall))
f_score[tp_sum == 0] = 0.0
return f_score
def _prf_divide(numerator, denominator):
result = numerator / denominator
mask = denominator == 0.0
if not np.any(mask):
return result
# remove infs
result[mask] = 0.0
return result
| {
"repo_name": "quadflor/Quadflor",
"path": "Code/lucid_ml/utils/metrics.py",
"copies": "1",
"size": "2971",
"license": "bsd-3-clause",
"hash": -5809228757760281000,
"line_mean": 32.3820224719,
"line_max": 118,
"alpha_frac": 0.6469202289,
"autogenerated": false,
"ratio": 3.562350119904077,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47092703488040766,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from warnings import warn
import numpy as np
from pandas import Series, DataFrame
from pandas.core.indexing import _NDFrameIndexer
from pandas.util.decorators import cache_readonly
import pyproj
from shapely.geometry import box, shape, Polygon, Point
from shapely.geometry.collection import GeometryCollection
from shapely.geometry.base import BaseGeometry
from shapely.ops import transform
from geopandas.plotting import plot_series
from geopandas.base import GeoPandasBase
OLD_PANDAS = issubclass(Series, np.ndarray)
def _is_empty(x):
try:
return x.is_empty
except:
return False
def _convert_array_args(args):
if len(args) == 1 and isinstance(args[0], BaseGeometry):
args = ([args[0]],)
return args
class _CoordinateIndexer(_NDFrameIndexer):
""" Indexing by coordinate slices """
def _getitem_tuple(self, tup):
obj = self.obj
xs, ys = tup
# handle numeric values as x and/or y coordinate index
if type(xs) is not slice:
xs = slice(xs, xs)
if type(ys) is not slice:
ys = slice(ys, ys)
# don't know how to handle step; should this raise?
if xs.step is not None or ys.step is not None:
warn("Ignoring step - full interval is used.")
xmin, ymin, xmax, ymax = obj.total_bounds
bbox = box(xs.start or xmin,
ys.start or ymin,
xs.stop or xmax,
ys.stop or ymax)
idx = obj.intersects(bbox)
return obj[idx]
class GeoSeries(GeoPandasBase, Series):
"""A Series object designed to store shapely geometry objects."""
_metadata = ['name', 'crs']
def __new__(cls, *args, **kwargs):
kwargs.pop('crs', None)
if OLD_PANDAS:
args = _convert_array_args(args)
arr = Series.__new__(cls, *args, **kwargs)
else:
arr = Series.__new__(cls)
if type(arr) is GeoSeries:
return arr
else:
return arr.view(GeoSeries)
def __init__(self, *args, **kwargs):
if not OLD_PANDAS:
args = _convert_array_args(args)
crs = kwargs.pop('crs', None)
super(GeoSeries, self).__init__(*args, **kwargs)
self.crs = crs
self._invalidate_sindex()
def append(self, *args, **kwargs):
return self._wrapped_pandas_method('append', *args, **kwargs)
@property
def geometry(self):
return self
@classmethod
def from_file(cls, filename, **kwargs):
"""
Alternate constructor to create a GeoSeries from a file
Parameters
----------
filename : str
File path or file handle to read from. Depending on which kwargs
are included, the content of filename may vary, see:
http://toblerity.github.io/fiona/README.html#usage
for usage details.
kwargs : key-word arguments
These arguments are passed to fiona.open, and can be used to
access multi-layer data, data stored within archives (zip files),
etc.
"""
import fiona
geoms = []
with fiona.open(filename, **kwargs) as f:
crs = f.crs
for rec in f:
geoms.append(shape(rec['geometry']))
g = GeoSeries(geoms)
g.crs = crs
return g
@property
def __geo_interface__(self):
"""Returns a GeoSeries as a python feature collection
"""
from geopandas import GeoDataFrame
return GeoDataFrame({'geometry': self}).__geo_interface__
def to_file(self, filename, driver="ESRI Shapefile", **kwargs):
from geopandas import GeoDataFrame
data = GeoDataFrame({"geometry": self,
"id":self.index.values},
index=self.index)
data.crs = self.crs
data.to_file(filename, driver, **kwargs)
#
# Implement pandas methods
#
@property
def _constructor(self):
return GeoSeries
def _wrapped_pandas_method(self, mtd, *args, **kwargs):
"""Wrap a generic pandas method to ensure it returns a GeoSeries"""
val = getattr(super(GeoSeries, self), mtd)(*args, **kwargs)
if type(val) == Series:
val.__class__ = GeoSeries
val.crs = self.crs
val._invalidate_sindex()
return val
def __getitem__(self, key):
return self._wrapped_pandas_method('__getitem__', key)
def sort_index(self, *args, **kwargs):
return self._wrapped_pandas_method('sort_index', *args, **kwargs)
def take(self, *args, **kwargs):
return self._wrapped_pandas_method('take', *args, **kwargs)
def select(self, *args, **kwargs):
return self._wrapped_pandas_method('select', *args, **kwargs)
@property
def _can_hold_na(self):
return False
def __finalize__(self, other, method=None, **kwargs):
""" propagate metadata from other to self """
# NOTE: backported from pandas master (upcoming v0.13)
for name in self._metadata:
object.__setattr__(self, name, getattr(other, name, None))
return self
def copy(self, order='C'):
"""
Make a copy of this GeoSeries object
Parameters
----------
deep : boolean, default True
Make a deep copy, i.e. also copy data
Returns
-------
copy : GeoSeries
"""
# FIXME: this will likely be unnecessary in pandas >= 0.13
return GeoSeries(self.values.copy(order), index=self.index,
name=self.name).__finalize__(self)
def isnull(self):
"""Null values in a GeoSeries are represented by empty geometric objects"""
non_geo_null = super(GeoSeries, self).isnull()
val = self.apply(_is_empty)
return np.logical_or(non_geo_null, val)
def fillna(self, value=None, method=None, inplace=False,
**kwargs):
"""Fill NA/NaN values with a geometry (empty polygon by default).
"method" is currently not implemented for pandas <= 0.12.
"""
if value is None:
value = Point()
if not OLD_PANDAS:
return super(GeoSeries, self).fillna(value=value, method=method,
inplace=inplace, **kwargs)
else:
# FIXME: this is an ugly way to support pandas <= 0.12
if method is not None:
raise NotImplementedError('Fill method is currently not implemented for GeoSeries')
if isinstance(value, BaseGeometry):
result = self.copy() if not inplace else self
mask = self.isnull()
result[mask] = value
if not inplace:
return GeoSeries(result)
else:
raise ValueError('Non-geometric fill values not allowed for GeoSeries')
def align(self, other, join='outer', level=None, copy=True,
fill_value=None, **kwargs):
if fill_value is None:
fill_value = Point()
left, right = super(GeoSeries, self).align(other, join=join,
level=level, copy=copy,
fill_value=fill_value,
**kwargs)
if isinstance(other, GeoSeries):
return GeoSeries(left), GeoSeries(right)
else: # It is probably a Series, let's keep it that way
return GeoSeries(left), right
def __contains__(self, other):
"""Allow tests of the form "geom in s"
Tests whether a GeoSeries contains a geometry.
Note: This is not the same as the geometric method "contains".
"""
if isinstance(other, BaseGeometry):
return np.any(self.geom_equals(other))
else:
return False
def plot(self, *args, **kwargs):
return plot_series(self, *args, **kwargs)
#
# Additional methods
#
def to_crs(self, crs=None, epsg=None):
"""Transform geometries to a new coordinate reference system
This method will transform all points in all objects. It has
no notion or projecting entire geometries. All segments
joining points are assumed to be lines in the current
projection, not geodesics. Objects crossing the dateline (or
other projection boundary) will have undesirable behavior.
"""
from fiona.crs import from_epsg
if self.crs is None:
raise ValueError('Cannot transform naive geometries. '
'Please set a crs on the object first.')
if crs is None:
try:
crs = from_epsg(epsg)
except TypeError:
raise TypeError('Must set either crs or epsg for output.')
proj_in = pyproj.Proj(preserve_units=True, **self.crs)
proj_out = pyproj.Proj(preserve_units=True, **crs)
project = partial(pyproj.transform, proj_in, proj_out)
result = self.apply(lambda geom: transform(project, geom))
result.__class__ = GeoSeries
result.crs = crs
result._invalidate_sindex()
return result
#
# Implement standard operators for GeoSeries
#
def __xor__(self, other):
"""Implement ^ operator as for builtin set type"""
return self.symmetric_difference(other)
def __or__(self, other):
"""Implement | operator as for builtin set type"""
return self.union(other)
def __and__(self, other):
"""Implement & operator as for builtin set type"""
return self.intersection(other)
def __sub__(self, other):
"""Implement - operator as for builtin set type"""
return self.difference(other)
GeoSeries._create_indexer('cx', _CoordinateIndexer)
| {
"repo_name": "koldunovn/geopandas",
"path": "geopandas/geoseries.py",
"copies": "8",
"size": "10052",
"license": "bsd-3-clause",
"hash": 1533804864181520400,
"line_mean": 32.9594594595,
"line_max": 99,
"alpha_frac": 0.5748109829,
"autogenerated": false,
"ratio": 4.310463121783877,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8885274104683878,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from webhelpers import paginate
from sapyens.crud.helpers import get_fields_from_model, Query
class Grid(object):
template_helpers = 'sapyens.crud:templates/grid.mako'
def __init__(self, model, fields_to_display):
super(Grid, self).__init__(model, fields_to_display)
self.mapping = get_fields_from_model(self._model)
self.fields_to_display = getattr(self, 'fields_to_display', self.mapping.keys())
def get_template(self):
assert self.template_helpers
return self.template_helpers
def render_field(self, name, obj):
field = self.mapping.get(name)
if field is not None:
return getattr(obj, name) # TODO: more fun there
else:
return 'nan'
def __call__(self, request):
params = Query(request.GET)
true_query = params.get_alchemy_query(self._model.query, self.mapping)
query = self.as_page(true_query, request, params = params, pagenum = params.page)
return {
'items' : query,
'grid' : self,
'params' : params,
}
# Helpers to paginator.
def page_generator(self, request, params, page):
""" This is for lambda """
url = params.get_page(request, page)
return url
def as_page(self, query, request, pagenum, params):
url_generator = partial(self.page_generator, request = request, params = params)
page = paginate.Page(query, page = pagenum, url = url_generator)
return page
| {
"repo_name": "fillest/sapyens",
"path": "src/sapyens/crud/grid.py",
"copies": "1",
"size": "1589",
"license": "apache-2.0",
"hash": 5344109330694784000,
"line_mean": 34.3111111111,
"line_max": 97,
"alpha_frac": 0.6060415356,
"autogenerated": false,
"ratio": 3.7388235294117647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4844865065011764,
"avg_score": null,
"num_lines": null
} |
from _functools import partial
from wpilib.timer import Timer
class Toggle:
"""Utility class for button toggle
Usage:
foo = Toggle(joystick, 3)
if foo:
toggleFunction()
if foo.on:
onToggle()
if foo.off:
offToggle()
"""
class _SteadyDebounce:
'''
Similar to ButtonDebouncer, but the output stays steady for
the given periodic_filter. E.g, if you set the period to 2
and press the button, the value will return true for 2 seconds.
Steady debounce will return true for the given period, allowing it to be
used with Toggle
'''
def __init__(self, joystick, button, period=0.5):
'''
:param joystick: Joystick object
:type joystick: :class:`wpilib.Joystick`
:param button: Number of button to retrieve
:type button: int
:param period: Period of time (in seconds) to wait before allowing new button
presses. Defaults to 0.5 seconds.
:type period: float
'''
self.joystick = joystick
self.button = button
self.debounce_period = float(period)
self.latest = - self.debounce_period # Negative latest prevents get from returning true until joystick is presed for the first time
self.timer = Timer
self.enabled = False
def set_debounce_period(self, period):
'''Set number of seconds to hold return value'''
self.debounce_period = float(period)
def get(self):
'''Returns the value of the joystick button. Once the button is pressed,
the return value will be True until the time expires
'''
now = self.timer.getFPGATimestamp()
if now - self.latest < self.debounce_period:
return True
if self.joystick.getRawButton(self.button):
self.latest = now
return True
else:
return False
def __init__(self, joystick, button, debouncePeriod=None):
"""
:param joystick: wpilib.Joystick that contains the button to toggle
:param button: Value of button that will act as toggle. Same value used in getRawButton()
"""
if debouncePeriod is not None:
self.joystick = Toggle._SteadyDebounce(joystick, button, debouncePeriod)
else:
self.joystick = joystick
self.joystick.get = partial(self.joystick.getRawButton, button)
self.released = False
self.toggle = False
self.state = False
def get(self):
"""
:return: State of toggle
:rtype: bool
"""
current_state = self.joystick.get()
if current_state and not self.released:
self.released = True
self.toggle = not self.toggle
self.state = not self.state # Toggles between 1 and 0.
elif not current_state and self.released:
self.released = False
return self.toggle
@property
def on(self):
self.get()
return self.state
@property
def off(self):
self.get()
return not self.state
__bool__ = get
| {
"repo_name": "Twinters007/robotpy-wpilib-utilities",
"path": "robotpy_ext/control/toggle.py",
"copies": "1",
"size": "3373",
"license": "bsd-3-clause",
"hash": -1337077795903283700,
"line_mean": 29.3873873874,
"line_max": 143,
"alpha_frac": 0.5627038245,
"autogenerated": false,
"ratio": 4.397653194263364,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007663526979727219,
"num_lines": 111
} |
from functools import partial
from wsgiref.handlers import format_date_time
import trio
import h11
class Answer:
def __init__(self, routes={}):
self.router = Router(routes)
pass
async def run(self, host='127.0.0.1', port=5000):
handler = partial(handle_connection, answer=self)
serve_tcp = partial(trio.serve_tcp, handler, port=port, host=host)
async with trio.open_nursery() as nursery:
await nursery.start(serve_tcp)
class Router:
routes = {}
def __init__(self, routes):
self.routes.update(routes)
def add_route(self, path, handler):
self.routes[path] = handler
def route(self, connection, request):
print(f'Routing request: {request}')
handler = self.routes.get(request.target)
return handler
class Request:
def __init__(self, method=None, target=None, headers=None, body=None):
self.method = method
self.target = target
self.headers = headers
self.body = body
def __str__(self):
return f'<Request method={self.method!r} target={self.target!r} headers={self.headers!r}>'
async def read_request(self, connection, request):
self.method = request.method.decode()
self.target = request.target.decode()
self.headers = dict([(name.decode(), value.decode()) for (name, value) in request.headers])
body = ''
while True:
event = await connection.next_event()
if type(event) is h11.EndOfMessage:
break
assert type(event) is h11.Data
body += event.data.decode()
return self
class Response:
def __init__(self, body, status_code=200, content_type='text/plain', headers={}):
self.body = body
self.status_code = status_code
self.content_type = content_type
self.headers = headers
async def send_response(connection, response):
headers = {name: value for (name, value) in connection.basic_headers()}
headers[b'Content-Type'] = response.content_type
headers[b'Content-Length'] = str(len(response.body))
headers.update(response.headers)
res = h11.Response(status_code=response.status_code, headers=headers.items())
await connection.send(res)
await connection.send(h11.Data(data=response.body))
await connection.send(h11.EndOfMessage())
async def bar(request):
return Response(b'YAAY!\n')
async def handle_connection(stream, answer):
connection = Connection(stream)
while True:
assert connection.conn.states == {h11.CLIENT: h11.IDLE, h11.SERVER: h11.IDLE}
try:
print('main loop waiting for request')
event = await connection.next_event()
print(f'main loop got event: {event}')
if type(event) is h11.Request:
request = await Request().read_request(connection, event)
handler = answer.router.route(connection, request)
response = await handler(request)
await send_response(connection, response)
except Exception as exc:
print("Error during response handler:", exc)
raise
if connection.conn.our_state is h11.MUST_CLOSE:
await connection.shutdown_and_clean_up()
return
else:
try:
connection.conn.start_next_cycle()
except h11.ProtocolError:
states = connection.conn.states
print(f'Unexpected state {states} -- bailing out')
await connection.shutdown_and_clean_up()
return
class Connection:
def __init__(self, stream):
self.stream = stream
self.conn = h11.Connection(our_role=h11.SERVER)
self.ident = ' '.join(['answers/0.0.0', h11.PRODUCT_ID]).encode('ascii')
async def send(self, event):
assert type(event) is not h11.ConnectionClosed
data = self.conn.send(event)
await self.stream.send_all(data)
async def next_event(self):
while True:
event = self.conn.next_event()
if event is h11.NEED_DATA:
await self._read_from_peer()
continue
return event
async def shutdown_and_clean_up(self):
await self.stream.send_eof()
try:
while True:
got = await self.stream.receive_some(4096)
if not got:
break
finally:
await self.stream.aclose()
def basic_headers(self):
return [(b'Date', format_date_time(None).encode()),
(b'Server', self.ident)]
async def _read_from_peer(self):
if self.conn.they_are_waiting_for_100_continue:
go_ahead = h11.InformationalResponse(status_code=100, headers=self.basic_headers())
await self.send(go_ahead)
data = await self.stream.receive_some(4096)
self.conn.receive_data(data)
| {
"repo_name": "yabok/answer",
"path": "answer/__init__.py",
"copies": "1",
"size": "4989",
"license": "isc",
"hash": -1423037893339253500,
"line_mean": 32.0397350993,
"line_max": 99,
"alpha_frac": 0.6019242333,
"autogenerated": false,
"ratio": 3.965818759936407,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005186376932763599,
"num_lines": 151
} |
from functools import partial
from zerver.lib.actions import check_send_stream_message
from zerver.lib.response import json_success
from zerver.decorator import api_key_only_webhook_view, REQ, has_request_variables
from zerver.lib.webhooks.git import get_push_commits_event_message, EMPTY_SHA,\
get_remove_branch_event_message, get_pull_request_event_message,\
get_issue_event_message, SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE,\
get_commits_comment_action_message, get_push_tag_event_message
from zerver.models import UserProfile
from django.http import HttpRequest, HttpResponse
from typing import Dict, Any, Iterable, Optional, Text
class UnknownEventType(Exception):
pass
def get_push_event_body(payload):
# type: (Dict[str, Any]) -> Text
if payload.get('after') == EMPTY_SHA:
return get_remove_branch_event_body(payload)
return get_normal_push_event_body(payload)
def get_normal_push_event_body(payload):
# type: (Dict[str, Any]) -> Text
compare_url = u'{}/compare/{}...{}'.format(
get_repository_homepage(payload),
payload['before'],
payload['after']
)
commits = [
{
'name': commit.get('author').get('name'),
'sha': commit.get('id'),
'message': commit.get('message'),
'url': commit.get('url')
}
for commit in payload['commits']
]
return get_push_commits_event_message(
get_user_name(payload),
compare_url,
get_branch_name(payload),
commits
)
def get_remove_branch_event_body(payload):
# type: (Dict[str, Any]) -> Text
return get_remove_branch_event_message(
get_user_name(payload),
get_branch_name(payload)
)
def get_tag_push_event_body(payload):
# type: (Dict[str, Any]) -> Text
return get_push_tag_event_message(
get_user_name(payload),
get_tag_name(payload),
action="pushed" if payload.get('checkout_sha') else "removed"
)
def get_issue_created_event_body(payload):
# type: (Dict[str, Any]) -> Text
return get_issue_event_message(
get_issue_user_name(payload),
'created',
get_object_url(payload),
payload['object_attributes'].get('iid'),
payload['object_attributes'].get('description'),
get_objects_assignee(payload)
)
def get_issue_event_body(payload, action):
# type: (Dict[str, Any], Text) -> Text
return get_issue_event_message(
get_issue_user_name(payload),
action,
get_object_url(payload),
payload['object_attributes'].get('iid'),
)
def get_merge_request_updated_event_body(payload):
# type: (Dict[str, Any]) -> Text
if payload['object_attributes'].get('oldrev'):
return get_merge_request_event_body(payload, "added commit(s) to")
return get_merge_request_open_or_updated_body(payload, "updated")
def get_merge_request_event_body(payload, action):
# type: (Dict[str, Any], Text) -> Text
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
type='MR',
)
def get_merge_request_open_or_updated_body(payload, action):
# type: (Dict[str, Any], Text) -> Text
pull_request = payload['object_attributes']
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
pull_request.get('url'),
pull_request.get('iid'),
pull_request.get('source_branch'),
pull_request.get('target_branch'),
pull_request.get('description'),
get_objects_assignee(payload),
type='MR',
)
def get_objects_assignee(payload):
# type: (Dict[str, Any]) -> Optional[Text]
assignee_object = payload.get('assignee')
if assignee_object:
return assignee_object.get('name')
return None
def get_commented_commit_event_body(payload):
# type: (Dict[str, Any]) -> Text
comment = payload['object_attributes']
action = u'[commented]({})'.format(comment['url'])
return get_commits_comment_action_message(
get_issue_user_name(payload),
action,
payload['commit'].get('url'),
payload['commit'].get('id'),
comment['note'],
)
def get_commented_merge_request_event_body(payload):
# type: (Dict[str, Any]) -> Text
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/merge_requests/{}'.format(
payload['project'].get('web_url'),
payload['merge_request'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['merge_request'].get('iid'),
message=comment['note'],
type='MR'
)
def get_commented_issue_event_body(payload):
# type: (Dict[str, Any]) -> Text
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/issues/{}'.format(
payload['project'].get('web_url'),
payload['issue'].get('iid')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['issue'].get('iid'),
message=comment['note'],
type='Issue'
)
def get_commented_snippet_event_body(payload):
# type: (Dict[str, Any]) -> Text
comment = payload['object_attributes']
action = u'[commented]({}) on'.format(comment['url'])
url = u'{}/snippets/{}'.format(
payload['project'].get('web_url'),
payload['snippet'].get('id')
)
return get_pull_request_event_message(
get_issue_user_name(payload),
action,
url,
payload['snippet'].get('id'),
message=comment['note'],
type='Snippet'
)
def get_wiki_page_event_body(payload, action):
# type: (Dict[str, Any], Text) -> Text
return u"{} {} [Wiki Page \"{}\"]({}).".format(
get_issue_user_name(payload),
action,
payload['object_attributes'].get('title'),
payload['object_attributes'].get('url'),
)
def get_build_hook_event_body(payload):
# type: (Dict[str, Any]) -> Text
build_status = payload.get('build_status')
if build_status == 'created':
action = 'was created'
elif build_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(build_status)
return u"Build {} from {} stage {}.".format(
payload.get('build_name'),
payload.get('build_stage'),
action
)
def get_pipeline_event_body(payload):
# type: (Dict[str, Any]) -> Text
pipeline_status = payload['object_attributes'].get('status')
if pipeline_status == 'pending':
action = 'was created'
elif pipeline_status == 'running':
action = 'started'
else:
action = 'changed status to {}'.format(pipeline_status)
builds_status = u""
for build in payload['builds']:
builds_status += u"* {} - {}\n".format(build.get('name'), build.get('status'))
return u"Pipeline {} with build(s):\n{}.".format(action, builds_status[:-1])
def get_repo_name(payload):
# type: (Dict[str, Any]) -> Text
return payload['project']['name']
def get_user_name(payload):
# type: (Dict[str, Any]) -> Text
return payload['user_name']
def get_issue_user_name(payload):
# type: (Dict[str, Any]) -> Text
return payload['user']['name']
def get_repository_homepage(payload):
# type: (Dict[str, Any]) -> Text
return payload['repository']['homepage']
def get_branch_name(payload):
# type: (Dict[str, Any]) -> Text
return payload['ref'].replace('refs/heads/', '')
def get_tag_name(payload):
# type: (Dict[str, Any]) -> Text
return payload['ref'].replace('refs/tags/', '')
def get_object_iid(payload):
# type: (Dict[str, Any]) -> Text
return payload['object_attributes']['iid']
def get_object_url(payload):
# type: (Dict[str, Any]) -> Text
return payload['object_attributes']['url']
EVENT_FUNCTION_MAPPER = {
'Push Hook': get_push_event_body,
'Tag Push Hook': get_tag_push_event_body,
'Issue Hook open': get_issue_created_event_body,
'Issue Hook close': partial(get_issue_event_body, action='closed'),
'Issue Hook reopen': partial(get_issue_event_body, action='reopened'),
'Issue Hook update': partial(get_issue_event_body, action='updated'),
'Note Hook Commit': get_commented_commit_event_body,
'Note Hook MergeRequest': get_commented_merge_request_event_body,
'Note Hook Issue': get_commented_issue_event_body,
'Note Hook Snippet': get_commented_snippet_event_body,
'Merge Request Hook open': partial(get_merge_request_open_or_updated_body, action='created'),
'Merge Request Hook update': get_merge_request_updated_event_body,
'Merge Request Hook merge': partial(get_merge_request_event_body, action='merged'),
'Merge Request Hook close': partial(get_merge_request_event_body, action='closed'),
'Merge Request Hook reopen': partial(get_merge_request_event_body, action='reopened'),
'Wiki Page Hook create': partial(get_wiki_page_event_body, action='created'),
'Wiki Page Hook update': partial(get_wiki_page_event_body, action='updated'),
'Build Hook': get_build_hook_event_body,
'Pipeline Hook': get_pipeline_event_body,
}
@api_key_only_webhook_view("Gitlab")
@has_request_variables
def api_gitlab_webhook(request, user_profile,
stream=REQ(default='gitlab'),
payload=REQ(argument_type='body'),
branches=REQ(default=None)):
# type: (HttpRequest, UserProfile, Text, Dict[str, Any], Optional[Text]) -> HttpResponse
event = get_event(request, payload, branches)
if event is not None:
body = get_body_based_on_event(event)(payload)
subject = get_subject_based_on_event(event, payload)
check_send_stream_message(user_profile, request.client, stream, subject, body)
return json_success()
def get_body_based_on_event(event):
# type: (str) -> Any
return EVENT_FUNCTION_MAPPER[event]
def get_subject_based_on_event(event, payload):
# type: (str, Dict[str, Any]) -> Text
if event == 'Push Hook':
return u"{} / {}".format(get_repo_name(payload), get_branch_name(payload))
elif event == 'Build Hook':
return u"{} / {}".format(payload['repository'].get('name'), get_branch_name(payload))
elif event == 'Pipeline Hook':
return u"{} / {}".format(
get_repo_name(payload),
payload['object_attributes'].get('ref').replace('refs/heads/', ''))
elif event.startswith('Merge Request Hook'):
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event.startswith('Issue Hook'):
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['object_attributes'].get('iid'),
title=payload['object_attributes'].get('title')
)
elif event == 'Note Hook Issue':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Issue',
id=payload['issue'].get('iid'),
title=payload['issue'].get('title')
)
elif event == 'Note Hook MergeRequest':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='MR',
id=payload['merge_request'].get('iid'),
title=payload['merge_request'].get('title')
)
elif event == 'Note Hook Snippet':
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
repo=get_repo_name(payload),
type='Snippet',
id=payload['snippet'].get('id'),
title=payload['snippet'].get('title')
)
return get_repo_name(payload)
def get_event(request, payload, branches):
# type: (HttpRequest, Dict[str, Any], Optional[Text]) -> Optional[str]
event = request.META['HTTP_X_GITLAB_EVENT']
if event == 'Issue Hook':
action = payload['object_attributes'].get('action')
event = "{} {}".format(event, action)
elif event == 'Note Hook':
action = payload['object_attributes'].get('noteable_type')
event = "{} {}".format(event, action)
elif event == 'Merge Request Hook':
action = payload['object_attributes'].get('action')
event = "{} {}".format(event, action)
elif event == 'Wiki Page Hook':
action = payload['object_attributes'].get('action')
event = "{} {}".format(event, action)
elif event == 'Push Hook':
if branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event
raise UnknownEventType(u'Event {} is unknown and cannot be handled'.format(event))
| {
"repo_name": "Galexrt/zulip",
"path": "zerver/webhooks/gitlab/view.py",
"copies": "2",
"size": "13294",
"license": "apache-2.0",
"hash": -3620951634102455300,
"line_mean": 35.027100271,
"line_max": 97,
"alpha_frac": 0.6132841884,
"autogenerated": false,
"ratio": 3.6213565785889403,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5234640766988939,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import argparse
import asyncio
import logging
from dsmr_parser.clients import create_dsmr_reader, create_tcp_dsmr_reader
def console():
"""Output DSMR data to console."""
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--device', default='/dev/ttyUSB0',
help='port to read DSMR data from')
parser.add_argument('--host', default=None,
help='alternatively connect using TCP host.')
parser.add_argument('--port', default=None,
help='TCP port to use for connection')
parser.add_argument('--version', default='2.2', choices=['2.2', '4', '5', '5B', '5L'],
help='DSMR version (2.2, 4, 5, 5B, 5L)')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
# create tcp or serial connection depending on args
if args.host and args.port:
create_connection = partial(create_tcp_dsmr_reader,
args.host, args.port, args.version,
print_callback, loop=loop)
else:
create_connection = partial(create_dsmr_reader,
args.device, args.version,
print_callback, loop=loop)
try:
# connect and keep connected until interrupted by ctrl-c
while True:
# create serial or tcp connection
conn = create_connection()
transport, protocol = loop.run_until_complete(conn)
# wait until connection it closed
loop.run_until_complete(protocol.wait_closed())
# wait 5 seconds before attempting reconnect
loop.run_until_complete(asyncio.sleep(5))
except KeyboardInterrupt:
# cleanup connection after user initiated shutdown
transport.close()
loop.run_until_complete(asyncio.sleep(0))
finally:
loop.close()
| {
"repo_name": "ndokter/dsmr_parser",
"path": "dsmr_parser/__main__.py",
"copies": "1",
"size": "2384",
"license": "mit",
"hash": -2764696539898552000,
"line_mean": 35.6769230769,
"line_max": 90,
"alpha_frac": 0.5847315436,
"autogenerated": false,
"ratio": 4.406654343807763,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000169061707523246,
"num_lines": 65
} |
from functools import partial
import colander
import deform
from babel.dates import format_date
from websauna.system.admin import views as adminviews
from websauna.system.core.viewconfig import view_overrides
from websauna.system.crud.formgenerator import SQLAlchemyFormGenerator
from websauna.system.form.sqlalchemy import UUIDModelSet
from websauna.system.crud import listing
from websauna.utils.slug import uuid_to_slug
from enkiblog.core.deform.widgets import CKEditorWidget
from enkiblog.admins import PostAdmin
from enkiblog.models import Post, Tag
from enkiblog.core.utils import slugify
@colander.deferred
def deferred_tags_widget(_, kw):
dbsession = kw['request'].dbsession
vocab = [
(uuid_to_slug(uuid), title)
for uuid, title in dbsession.query(Tag.uuid, Tag.title).all()]
return deform.widget.Select2Widget(
values=vocab, multiple=True, css_class='tags-select2w')
@colander.deferred
def deferred_ckeditor_widget(*_):
options = {}
return CKEditorWidget(options=options)
@colander.deferred
def deferred_state_choices_widget(_, kw):
request = kw['request']
workflow = request.workflow
workflow.state_info(None, request)
context = None # XXX: should be a resource model
choices = [(w['name'], w['title']) for w in workflow.state_info(context, kw['request'])]
return deform.widget.SelectWidget(values=choices)
@colander.deferred
def deferred_state_default(_, kw):
workflow = kw['request'].workflow
return workflow.initial_state
POST_EDITABLE_FIELDS = [
"title",
colander.SchemaNode(
colander.String(),
name="description",
required=True),
colander.SchemaNode(
UUIDModelSet(model=Tag, match_column="uuid"),
name='tags',
widget=deferred_tags_widget,
missing=None),
colander.SchemaNode(
colander.String(),
name="body",
required=True,
widget=deferred_ckeditor_widget),
colander.SchemaNode(
colander.String(),
name="state",
required=True,
default=deferred_state_default,
widget=deferred_state_choices_widget),
]
POST_VIEWABLE_FIELDS = POST_EDITABLE_FIELDS + [
"uuid",
"created_at",
"published_at",
"updated_at",
"slug",
"author",
]
@view_overrides(context=PostAdmin)
class PostAdd(adminviews.Add):
form_generator = SQLAlchemyFormGenerator(includes=POST_EDITABLE_FIELDS)
def add_object(self, obj):
dbsession = self.context.get_dbsession()
obj.author = self.request.user
with dbsession.no_autoflush:
obj.slug = slugify(obj.title, Post.slug, dbsession)
dbsession.add(obj)
dbsession.flush()
@view_overrides(context=PostAdmin.Resource)
class PostEdit(adminviews.Edit):
form_generator = SQLAlchemyFormGenerator(includes=POST_VIEWABLE_FIELDS)
# TODO: on publishing publish all related content
@view_overrides(context=PostAdmin.Resource)
class PostShow(adminviews.Show):
form_generator = SQLAlchemyFormGenerator(includes=POST_VIEWABLE_FIELDS)
def get_human_readable_date(field_name, _view, _column, obj):
time = getattr(obj, field_name)
return format_date(time) if time else ''
def post_navigate_url_getter(request, resource):
return request.route_url('post', slug=resource.obj.slug)
@view_overrides(context=PostAdmin)
class PostsListing(adminviews.Listing):
table = listing.Table(
columns=[
listing.Column("title", "Title", navigate_url_getter=post_navigate_url_getter),
listing.Column("state", "State"),
listing.Column(
"created_at",
"Created",
getter=partial(get_human_readable_date, 'created_at')),
listing.Column(
"published_at",
"Published",
getter=partial(get_human_readable_date, 'published_at')),
listing.ControlsColumn(),
]
)
def order_query(self, query):
"""Sort the query."""
return query.order_by('created_at')
| {
"repo_name": "enkidulan/enkiblog",
"path": "src/enkiblog/adminviews/post.py",
"copies": "1",
"size": "4077",
"license": "apache-2.0",
"hash": -705511923553568400,
"line_mean": 28.5434782609,
"line_max": 92,
"alpha_frac": 0.675006132,
"autogenerated": false,
"ratio": 3.726691042047532,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4901697174047532,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import contextlib
import inspect
import operator
import warnings
import numpy as np
import pandas as pd
from . import npcompat
from .pycompat import PY3, dask_array_type
from .nputils import nanfirst, nanlast, array_eq, array_ne
try:
import bottleneck as bn
has_bottleneck = True
except ImportError:
# use numpy methods instead
bn = np
has_bottleneck = False
try:
import dask.array as da
has_dask = True
except ImportError:
has_dask = False
UNARY_OPS = ['neg', 'pos', 'abs', 'invert']
CMP_BINARY_OPS = ['lt', 'le', 'ge', 'gt']
NUM_BINARY_OPS = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'mod',
'pow', 'and', 'xor', 'or']
if not PY3:
NUM_BINARY_OPS.append('div')
# methods which pass on the numpy return value unchanged
# be careful not to list methods that we would want to wrap later
NUMPY_SAME_METHODS = ['item', 'searchsorted']
# methods which don't modify the data shape, so the result should still be
# wrapped in an Variable/DataArray
NUMPY_UNARY_METHODS = ['astype', 'argsort', 'clip', 'conj', 'conjugate']
PANDAS_UNARY_FUNCTIONS = ['isnull', 'notnull']
# methods which remove an axis
REDUCE_METHODS = ['all', 'any']
NAN_REDUCE_METHODS = ['argmax', 'argmin', 'max', 'min', 'mean', 'prod', 'sum',
'std', 'var', 'median']
BOTTLENECK_ROLLING_METHODS = {'move_sum': 'sum', 'move_mean': 'mean',
'move_std': 'std', 'move_min': 'min',
'move_max': 'max'}
# TODO: wrap cumprod/cumsum, take, dot, sort
def _dask_or_eager_func(name, eager_module=np, list_of_args=False,
n_array_args=1):
if has_dask:
def f(*args, **kwargs):
dispatch_args = args[0] if list_of_args else args
if any(isinstance(a, da.Array)
for a in dispatch_args[:n_array_args]):
module = da
else:
module = eager_module
return getattr(module, name)(*args, **kwargs)
else:
def f(data, *args, **kwargs):
return getattr(eager_module, name)(data, *args, **kwargs)
return f
def _fail_on_dask_array_input(values, msg=None, func_name=None):
if isinstance(values, dask_array_type):
if msg is None:
msg = '%r is not a valid method on dask arrays'
if func_name is None:
func_name = inspect.stack()[1][3]
raise NotImplementedError(msg % func_name)
around = _dask_or_eager_func('around')
isclose = _dask_or_eager_func('isclose')
isnull = _dask_or_eager_func('isnull', pd)
notnull = _dask_or_eager_func('notnull', pd)
transpose = _dask_or_eager_func('transpose')
where = _dask_or_eager_func('where', n_array_args=3)
insert = _dask_or_eager_func('insert')
take = _dask_or_eager_func('take')
broadcast_to = _dask_or_eager_func('broadcast_to', npcompat)
concatenate = _dask_or_eager_func('concatenate', list_of_args=True)
stack = _dask_or_eager_func('stack', npcompat, list_of_args=True)
array_all = _dask_or_eager_func('all')
array_any = _dask_or_eager_func('any')
tensordot = _dask_or_eager_func('tensordot', n_array_args=2)
def asarray(data):
return data if isinstance(data, dask_array_type) else np.asarray(data)
def as_like_arrays(*data):
if all(isinstance(d, dask_array_type) for d in data):
return data
else:
return tuple(np.asarray(d) for d in data)
def allclose_or_equiv(arr1, arr2, rtol=1e-5, atol=1e-8):
"""Like np.allclose, but also allows values to be NaN in both arrays
"""
arr1, arr2 = as_like_arrays(arr1, arr2)
if arr1.shape != arr2.shape:
return False
return bool(isclose(arr1, arr2, rtol=rtol, atol=atol, equal_nan=True).all())
def array_equiv(arr1, arr2):
"""Like np.array_equal, but also allows values to be NaN in both arrays
"""
arr1, arr2 = as_like_arrays(arr1, arr2)
if arr1.shape != arr2.shape:
return False
flag_array = (arr1 == arr2)
# GH837, GH861
# isnull fcn from pandas will throw TypeError when run on numpy structured array
# therefore for dims that are np structured arrays we skip testing for nan
try:
flag_array |= (isnull(arr1) & isnull(arr2))
except TypeError:
pass
return bool(flag_array.all())
def _call_possibly_missing_method(arg, name, args, kwargs):
try:
method = getattr(arg, name)
except AttributeError:
_fail_on_dask_array_input(arg, func_name=name)
if hasattr(arg, 'data'):
_fail_on_dask_array_input(arg.data, func_name=name)
raise
else:
return method(*args, **kwargs)
def _values_method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self.data, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _func_slash_method_wrapper(f, name=None):
# try to wrap a method, but if not found use the function
# this is useful when patching in a function as both a DataArray and
# Dataset method
if name is None:
name = f.__name__
def func(self, *args, **kwargs):
try:
return getattr(self, name)(*args, **kwargs)
except AttributeError:
return f(self, *args, **kwargs)
func.__name__ = name
func.__doc__ = f.__doc__
return func
_REDUCE_DOCSTRING_TEMPLATE = \
"""Reduce this {cls}'s data by applying `{name}` along some
dimension(s).
Parameters
----------
{extra_args}
skipna : bool, optional
If True, skip missing values (as marked by NaN). By default, only
skips missing values for float dtypes; other dtypes either do not
have a sentinel missing value (int) or skipna=True has not been
implemented (object, datetime64 or timedelta64).
keep_attrs : bool, optional
If True, the attributes (`attrs`) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
**kwargs : dict
Additional keyword arguments passed on to `{name}`.
Returns
-------
reduced : {cls}
New {cls} object with `{name}` applied to its data and the
indicated dimension(s) removed.
"""
_ROLLING_REDUCE_DOCSTRING_TEMPLATE = \
"""Reduce this DataArrayRolling's data windows by applying `{name}`
along its dimension.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to `{name}`.
Returns
-------
reduced : DataArray
New DataArray object with `{name}` applied along its rolling dimnension.
"""
def count(data, axis=None):
"""Count the number of non-NA in this array along the given axis or axes
"""
return sum(~isnull(data), axis=axis)
def fillna(data, other):
"""Fill missing values in this object with data from the other object.
Follows normal broadcasting and alignment rules.
"""
return where(isnull(data), other, data)
def where_method(data, cond, other=np.nan):
"""Select values from this object that are True in cond. Everything else
gets masked with other. Follows normal broadcasting and alignment rules.
"""
return where(cond, data, other)
@contextlib.contextmanager
def _ignore_warnings_if(condition):
if condition:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
yield
else:
yield
def _create_nan_agg_method(name, numeric_only=False, coerce_strings=False):
def f(values, axis=None, skipna=None, **kwargs):
# ignore keyword args inserted by np.mean and other numpy aggregators
# automatically:
kwargs.pop('dtype', None)
kwargs.pop('out', None)
values = asarray(values)
if coerce_strings and values.dtype.kind in 'SU':
values = values.astype(object)
if skipna or (skipna is None and values.dtype.kind in 'cf'):
if values.dtype.kind not in ['i', 'f', 'c']:
raise NotImplementedError(
'skipna=True not yet implemented for %s with dtype %s'
% (name, values.dtype))
nanname = 'nan' + name
if isinstance(axis, tuple) or not values.dtype.isnative:
# bottleneck can't handle multiple axis arguments or non-native
# endianness
eager_module = np
else:
eager_module = bn
func = _dask_or_eager_func(nanname, eager_module)
using_numpy_nan_func = eager_module is np
else:
func = _dask_or_eager_func(name)
using_numpy_nan_func = False
with _ignore_warnings_if(using_numpy_nan_func):
try:
return func(values, axis=axis, **kwargs)
except AttributeError:
if isinstance(values, dask_array_type):
msg = '%s is not yet implemented on dask arrays' % name
else:
assert using_numpy_nan_func
msg = ('%s is not available with skipna=False with the '
'installed version of numpy; upgrade to numpy 1.9 '
'or newer to use skipna=True or skipna=None' % name)
raise NotImplementedError(msg)
f.numeric_only = numeric_only
return f
argmax = _create_nan_agg_method('argmax', coerce_strings=True)
argmin = _create_nan_agg_method('argmin', coerce_strings=True)
max = _create_nan_agg_method('max', coerce_strings=True)
min = _create_nan_agg_method('min', coerce_strings=True)
sum = _create_nan_agg_method('sum', numeric_only=True)
mean = _create_nan_agg_method('mean', numeric_only=True)
std = _create_nan_agg_method('std', numeric_only=True)
var = _create_nan_agg_method('var', numeric_only=True)
median = _create_nan_agg_method('median', numeric_only=True)
_fail_on_dask_array_input_skipna = partial(
_fail_on_dask_array_input,
msg='%r with skipna=True is not yet implemented on dask arrays')
_prod = _dask_or_eager_func('prod')
def prod(values, axis=None, skipna=None, **kwargs):
if skipna or (skipna is None and values.dtype.kind == 'f'):
if values.dtype.kind not in ['i', 'f']:
raise NotImplementedError(
'skipna=True not yet implemented for prod with dtype %s'
% values.dtype)
_fail_on_dask_array_input_skipna(values)
return npcompat.nanprod(values, axis=axis, **kwargs)
return _prod(values, axis=axis, **kwargs)
prod.numeric_only = True
def first(values, axis, skipna=None):
"""Return the first non-NA elements in this array along the given axis
"""
if (skipna or skipna is None) and values.dtype.kind not in 'iSU':
# only bother for dtypes that can hold NaN
_fail_on_dask_array_input_skipna(values)
return nanfirst(values, axis)
return take(values, 0, axis=axis)
def last(values, axis, skipna=None):
"""Return the last non-NA elements in this array along the given axis
"""
if (skipna or skipna is None) and values.dtype.kind not in 'iSU':
# only bother for dtypes that can hold NaN
_fail_on_dask_array_input_skipna(values)
return nanlast(values, axis)
return take(values, -1, axis=axis)
def inject_reduce_methods(cls):
methods = ([(name, globals()['array_%s' % name], False) for name
in REDUCE_METHODS] +
[(name, globals()[name], True) for name in NAN_REDUCE_METHODS] +
[('count', count, False)])
for name, f, include_skipna in methods:
numeric_only = getattr(f, 'numeric_only', False)
func = cls._reduce_method(f, include_skipna, numeric_only)
func.__name__ = name
func.__doc__ = _REDUCE_DOCSTRING_TEMPLATE.format(
name=name, cls=cls.__name__,
extra_args=cls._reduce_extra_args_docstring)
setattr(cls, name, func)
def op_str(name):
return '__%s__' % name
def get_op(name):
return getattr(operator, op_str(name))
NON_INPLACE_OP = dict((get_op('i' + name), get_op(name))
for name in NUM_BINARY_OPS)
def inplace_to_noninplace_op(f):
return NON_INPLACE_OP[f]
def inject_binary_ops(cls, inplace=False):
for name in CMP_BINARY_OPS + NUM_BINARY_OPS:
setattr(cls, op_str(name), cls._binary_op(get_op(name)))
for name, f in [('eq', array_eq), ('ne', array_ne)]:
setattr(cls, op_str(name), cls._binary_op(f))
# patch in fillna
f = _func_slash_method_wrapper(fillna)
method = cls._binary_op(f, join='left', fillna=True)
setattr(cls, '_fillna', method)
# patch in where
f = _func_slash_method_wrapper(where_method, 'where')
setattr(cls, '_where', cls._binary_op(f))
for name in NUM_BINARY_OPS:
# only numeric operations have in-place and reflexive variants
setattr(cls, op_str('r' + name),
cls._binary_op(get_op(name), reflexive=True))
if inplace:
setattr(cls, op_str('i' + name),
cls._inplace_binary_op(get_op('i' + name)))
def inject_all_ops_and_reduce_methods(cls, priority=50, array_only=True):
# prioritize our operations over those of numpy.ndarray (priority=1)
# and numpy.matrix (priority=10)
cls.__array_priority__ = priority
# patch in standard special operations
for name in UNARY_OPS:
setattr(cls, op_str(name), cls._unary_op(get_op(name)))
inject_binary_ops(cls, inplace=True)
# patch in numpy/pandas methods
for name in NUMPY_UNARY_METHODS:
setattr(cls, name, cls._unary_op(_method_wrapper(name)))
for name in PANDAS_UNARY_FUNCTIONS:
f = _func_slash_method_wrapper(getattr(pd, name))
setattr(cls, name, cls._unary_op(f))
f = _func_slash_method_wrapper(around, name='round')
setattr(cls, 'round', cls._unary_op(f))
if array_only:
# these methods don't return arrays of the same shape as the input, so
# don't try to patch these in for Dataset objects
for name in NUMPY_SAME_METHODS:
setattr(cls, name, _values_method_wrapper(name))
inject_reduce_methods(cls)
def inject_bottleneck_rolling_methods(cls):
# standard numpy reduce methods
methods = [(name, globals()[name]) for name in NAN_REDUCE_METHODS]
for name, f in methods:
func = cls._reduce_method(f)
func.__name__ = name
func.__doc__ = _ROLLING_REDUCE_DOCSTRING_TEMPLATE.format(name=func.__name__)
setattr(cls, name, func)
# bottleneck rolling methods
if has_bottleneck:
for bn_name, method_name in BOTTLENECK_ROLLING_METHODS.items():
f = getattr(bn, bn_name)
func = cls._bottleneck_reduce(f)
func.__name__ = method_name
func.__doc__ = _ROLLING_REDUCE_DOCSTRING_TEMPLATE.format(name=func.__name__)
setattr(cls, method_name, func)
# bottleneck rolling methods without min_count
f = getattr(bn, 'move_median')
func = cls._bottleneck_reduce_without_min_count(f)
func.__name__ = 'median'
func.__doc__ = _ROLLING_REDUCE_DOCSTRING_TEMPLATE.format(name=func.__name__)
setattr(cls, 'median', func)
| {
"repo_name": "NicWayand/xray",
"path": "xarray/core/ops.py",
"copies": "1",
"size": "15733",
"license": "apache-2.0",
"hash": 5575218922577292000,
"line_mean": 32.8344086022,
"line_max": 88,
"alpha_frac": 0.6131697705,
"autogenerated": false,
"ratio": 3.5700022691173134,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.968198235727755,
"avg_score": 0.00023793646795255318,
"num_lines": 465
} |
from functools import partial
import contextlib
import inspect
import operator
import warnings
import numpy as np
import pandas as pd
from . import npcompat
from .pycompat import PY3, range, dask_array_type
from .nputils import (
nanfirst, nanlast, interleaved_concat as _interleaved_concat_numpy,
array_eq, array_ne, _validate_axis, _calc_concat_shape
)
try:
import bottleneck as bn
except ImportError:
# use numpy methods instead
bn = np
try:
import dask.array as da
has_dask = True
except ImportError:
has_dask = False
UNARY_OPS = ['neg', 'pos', 'abs', 'invert']
CMP_BINARY_OPS = ['lt', 'le', 'ge', 'gt']
NUM_BINARY_OPS = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'mod',
'pow', 'and', 'xor', 'or']
if not PY3:
NUM_BINARY_OPS.append('div')
# methods which pass on the numpy return value unchanged
# be careful not to list methods that we would want to wrap later
NUMPY_SAME_METHODS = ['item', 'searchsorted']
# methods which don't modify the data shape, so the result should still be
# wrapped in an Variable/DataArray
NUMPY_UNARY_METHODS = ['astype', 'argsort', 'clip', 'conj', 'conjugate']
PANDAS_UNARY_FUNCTIONS = ['isnull', 'notnull']
# methods which remove an axis
NUMPY_REDUCE_METHODS = ['all', 'any']
NAN_REDUCE_METHODS = ['argmax', 'argmin', 'max', 'min', 'mean', 'prod', 'sum',
'std', 'var', 'median']
# TODO: wrap cumprod/cumsum, take, dot, sort
def _dask_or_eager_func(name, eager_module=np, dispatch_elemwise=False):
if has_dask:
def f(data, *args, **kwargs):
target = data[0] if dispatch_elemwise else data
module = da if isinstance(target, da.Array) else eager_module
return getattr(module, name)(data, *args, **kwargs)
else:
def f(data, *args, **kwargs):
return getattr(eager_module, name)(data, *args, **kwargs)
return f
def _fail_on_dask_array_input(values, msg=None, func_name=None):
if isinstance(values, dask_array_type):
if msg is None:
msg = '%r is not a valid method on dask arrays'
if func_name is None:
func_name = inspect.stack()[1][3]
raise NotImplementedError(msg % func_name)
around = _dask_or_eager_func('around')
isclose = _dask_or_eager_func('isclose')
isnull = _dask_or_eager_func('isnull', pd)
notnull = _dask_or_eager_func('notnull', pd)
transpose = _dask_or_eager_func('transpose')
where = _dask_or_eager_func('where')
insert = _dask_or_eager_func('insert')
take = _dask_or_eager_func('take')
broadcast_to = _dask_or_eager_func('broadcast_to', npcompat)
concatenate = _dask_or_eager_func('concatenate', dispatch_elemwise=True)
stack = _dask_or_eager_func('stack', npcompat, dispatch_elemwise=True)
def _interleaved_indices_required(indices):
"""With dask, we care about data locality and would rather avoid splitting
splitting up each arrays into single elements. This routine checks to see
if we really need the "interleaved" part of interleaved_concat.
We don't use for the pure numpy version of interleaved_concat, because it's
just as fast or faster to directly do the interleaved concatenate rather
than check if we could simply it.
"""
next_expected = 0
for ind in indices:
if isinstance(ind, slice):
if ((ind.start or 0) != next_expected
or ind.step not in (1, None)):
return True
next_expected = ind.stop
else:
ind = np.asarray(ind)
expected = np.arange(next_expected, next_expected + ind.size)
if (ind != expected).any():
return True
next_expected = ind[-1] + 1
return False
def _interleaved_concat_slow(arrays, indices, axis=0):
"""A slow version of interleaved_concat that also works on dask arrays
"""
axis = _validate_axis(arrays[0], axis)
result_shape = _calc_concat_shape(arrays, axis=axis)
length = result_shape[axis]
array_lookup = np.empty(length, dtype=int)
element_lookup = np.empty(length, dtype=int)
for n, ind in enumerate(indices):
if isinstance(ind, slice):
ind = np.arange(*ind.indices(length))
for m, i in enumerate(ind):
array_lookup[i] = n
element_lookup[i] = m
split_arrays = [arrays[n][(slice(None),) * axis + (slice(m, m + 1),)]
for (n, m) in zip(array_lookup, element_lookup)]
return concatenate(split_arrays, axis)
def interleaved_concat(arrays, indices, axis=0):
"""Concatenate each array along the given axis, but also assign each array
element into the location given by indices. This operation is used for
groupby.transform.
"""
if has_dask and isinstance(arrays[0], da.Array):
if not _interleaved_indices_required(indices):
return da.concatenate(arrays, axis)
else:
return _interleaved_concat_slow(arrays, indices, axis)
else:
return _interleaved_concat_numpy(arrays, indices, axis)
def asarray(data):
return data if isinstance(data, dask_array_type) else np.asarray(data)
def as_like_arrays(*data):
if all(isinstance(d, dask_array_type) for d in data):
return data
else:
return tuple(np.asarray(d) for d in data)
def allclose_or_equiv(arr1, arr2, rtol=1e-5, atol=1e-8):
"""Like np.allclose, but also allows values to be NaN in both arrays
"""
arr1, arr2 = as_like_arrays(arr1, arr2)
if arr1.shape != arr2.shape:
return False
return bool(isclose(arr1, arr2, rtol=rtol, atol=atol, equal_nan=True).all())
def array_equiv(arr1, arr2):
"""Like np.array_equal, but also allows values to be NaN in both arrays
"""
arr1, arr2 = as_like_arrays(arr1, arr2)
if arr1.shape != arr2.shape:
return False
return bool(((arr1 == arr2) | (isnull(arr1) & isnull(arr2))).all())
def _call_possibly_missing_method(arg, name, args, kwargs):
try:
method = getattr(arg, name)
except AttributeError:
_fail_on_dask_array_input(arg, func_name=name)
if hasattr(arg, 'data'):
_fail_on_dask_array_input(arg.data, func_name=name)
raise
else:
return method(*args, **kwargs)
def _values_method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self.data, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _func_slash_method_wrapper(f, name=None):
# try to wrap a method, but if not found use the function
# this is useful when patching in a function as both a DataArray and
# Dataset method
if name is None:
name = f.__name__
def func(self, *args, **kwargs):
try:
return getattr(self, name)(*args, **kwargs)
except AttributeError:
return f(self, *args, **kwargs)
func.__name__ = name
func.__doc__ = f.__doc__
return func
_REDUCE_DOCSTRING_TEMPLATE = \
"""Reduce this {cls}'s data by applying `{name}` along some
dimension(s).
Parameters
----------
{extra_args}
skipna : bool, optional
If True, skip missing values (as marked by NaN). By default, only
skips missing values for float dtypes; other dtypes either do not
have a sentinel missing value (int) or skipna=True has not been
implemented (object, datetime64 or timedelta64).
keep_attrs : bool, optional
If True, the attributes (`attrs`) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
**kwargs : dict
Additional keyword arguments passed on to `{name}`.
Returns
-------
reduced : {cls}
New {cls} object with `{name}` applied to its data and the
indicated dimension(s) removed.
"""
def count(values, axis=None):
"""Count the number of non-NA in this array along the given axis or axes
"""
return sum(~isnull(values), axis=axis)
def fillna(values, other):
"""Fill missing values in this object with values from the other object.
Follows normal broadcasting and alignment rules.
"""
return where(isnull(values), other, values)
@contextlib.contextmanager
def _ignore_warnings_if(condition):
if condition:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
yield
else:
yield
def _create_nan_agg_method(name, numeric_only=False, coerce_strings=False):
def f(values, axis=None, skipna=None, **kwargs):
# ignore keyword args inserted by np.mean and other numpy aggreagators
# automatically:
kwargs.pop('dtype', None)
kwargs.pop('out', None)
values = asarray(values)
if coerce_strings and values.dtype.kind in 'SU':
values = values.astype(object)
if skipna or (skipna is None and values.dtype.kind == 'f'):
if values.dtype.kind not in ['i', 'f']:
raise NotImplementedError(
'skipna=True not yet implemented for %s with dtype %s'
% (name, values.dtype))
nanname = 'nan' + name
eager_module = np if isinstance(axis, tuple) else bn
func = _dask_or_eager_func(nanname, eager_module)
using_numpy_nan_func = eager_module is np
else:
func = _dask_or_eager_func(name)
using_numpy_nan_func = False
with _ignore_warnings_if(using_numpy_nan_func):
try:
return func(values, axis=axis, **kwargs)
except AttributeError:
if isinstance(values, dask_array_type):
msg = '%s is not yet implemented on dask arrays' % name
else:
assert using_numpy_nan_func
msg = ('%s is not available with skipna=False with the '
'installed version of numpy; upgrade to numpy 1.9 '
'or newer to use skipna=True or skipna=None' % name)
raise NotImplementedError(msg)
f.numeric_only = numeric_only
return f
argmax = _create_nan_agg_method('argmax', coerce_strings=True)
argmin = _create_nan_agg_method('argmin', coerce_strings=True)
max = _create_nan_agg_method('max', coerce_strings=True)
min = _create_nan_agg_method('min', coerce_strings=True)
sum = _create_nan_agg_method('sum', numeric_only=True)
mean = _create_nan_agg_method('mean', numeric_only=True)
std = _create_nan_agg_method('std', numeric_only=True)
var = _create_nan_agg_method('var', numeric_only=True)
median = _create_nan_agg_method('median', numeric_only=True)
_fail_on_dask_array_input_skipna = partial(
_fail_on_dask_array_input,
msg='%r with skipna=True is not yet implemented on dask arrays')
_prod = _dask_or_eager_func('prod')
def prod(values, axis=None, skipna=None, **kwargs):
if skipna or (skipna is None and values.dtype.kind == 'f'):
if values.dtype.kind not in ['i', 'f']:
raise NotImplementedError(
'skipna=True not yet implemented for prod with dtype %s'
% values.dtype)
_fail_on_dask_array_input_skipna(values)
return npcompat.nanprod(values, axis=axis, **kwargs)
return _prod(values, axis=axis, **kwargs)
prod.numeric_only = True
def first(values, axis, skipna=None):
"""Return the first non-NA elements in this array along the given axis
"""
if (skipna or skipna is None) and values.dtype.kind not in 'iSU':
# only bother for dtypes that can hold NaN
_fail_on_dask_array_input_skipna(values)
return nanfirst(values, axis)
return take(values, 0, axis=axis)
def last(values, axis, skipna=None):
"""Return the last non-NA elements in this array along the given axis
"""
if (skipna or skipna is None) and values.dtype.kind not in 'iSU':
# only bother for dtypes that can hold NaN
_fail_on_dask_array_input_skipna(values)
return nanlast(values, axis)
return take(values, -1, axis=axis)
def inject_reduce_methods(cls):
methods = ([(name, getattr(np, name), False) for name
in NUMPY_REDUCE_METHODS]
+ [(name, globals()[name], True) for name
in NAN_REDUCE_METHODS]
+ [('count', count, False)])
for name, f, include_skipna in methods:
numeric_only = getattr(f, 'numeric_only', False)
func = cls._reduce_method(f, include_skipna, numeric_only)
func.__name__ = name
func.__doc__ = _REDUCE_DOCSTRING_TEMPLATE.format(
name=name, cls=cls.__name__,
extra_args=cls._reduce_extra_args_docstring)
setattr(cls, name, func)
def op_str(name):
return '__%s__' % name
def get_op(name):
return getattr(operator, op_str(name))
NON_INPLACE_OP = dict((get_op('i' + name), get_op(name))
for name in NUM_BINARY_OPS)
def inplace_to_noninplace_op(f):
return NON_INPLACE_OP[f]
def inject_binary_ops(cls, inplace=False):
for name in CMP_BINARY_OPS + NUM_BINARY_OPS:
setattr(cls, op_str(name), cls._binary_op(get_op(name)))
for name, f in [('eq', array_eq), ('ne', array_ne)]:
setattr(cls, op_str(name), cls._binary_op(f))
# patch in fillna
f = _func_slash_method_wrapper(fillna)
method = cls._binary_op(f, join='left', drop_na_vars=False)
setattr(cls, '_fillna', method)
for name in NUM_BINARY_OPS:
# only numeric operations have in-place and reflexive variants
setattr(cls, op_str('r' + name),
cls._binary_op(get_op(name), reflexive=True))
if inplace:
setattr(cls, op_str('i' + name),
cls._inplace_binary_op(get_op('i' + name)))
def inject_all_ops_and_reduce_methods(cls, priority=50, array_only=True):
# priortize our operations over those of numpy.ndarray (priority=1)
# and numpy.matrix (priority=10)
cls.__array_priority__ = priority
# patch in standard special operations
for name in UNARY_OPS:
setattr(cls, op_str(name), cls._unary_op(get_op(name)))
inject_binary_ops(cls, inplace=True)
# patch in numpy/pandas methods
for name in NUMPY_UNARY_METHODS:
setattr(cls, name, cls._unary_op(_method_wrapper(name)))
for name in PANDAS_UNARY_FUNCTIONS:
f = _func_slash_method_wrapper(getattr(pd, name))
setattr(cls, name, cls._unary_op(f))
f = _func_slash_method_wrapper(around, name='round')
setattr(cls, 'round', cls._unary_op(f))
if array_only:
# these methods don't return arrays of the same shape as the input, so
# don't try to patch these in for Dataset objects
for name in NUMPY_SAME_METHODS:
setattr(cls, name, _values_method_wrapper(name))
inject_reduce_methods(cls)
| {
"repo_name": "hetland/xray",
"path": "xray/core/ops.py",
"copies": "1",
"size": "15304",
"license": "apache-2.0",
"hash": -2334349465531393500,
"line_mean": 33.7029478458,
"line_max": 80,
"alpha_frac": 0.6225823314,
"autogenerated": false,
"ratio": 3.578209025017536,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9699329831411037,
"avg_score": 0.0002923050012997103,
"num_lines": 441
} |
from functools import partial
import copy
import datetime
import numbers
import pytz
import json
import types
from django import template
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.html import escape
from casexml.apps.case.models import CommCareCase
from casexml.apps.stock.utils import get_current_ledger_transactions
from corehq.apps.products.models import SQLProduct
from couchdbkit import ResourceNotFound
register = template.Library()
DYNAMIC_CASE_PROPERTIES_COLUMNS = 4
def wrapped_case(case):
json = case.to_json()
case_class = CommCareCase.get_wrap_class(json)
return case_class.wrap(case.to_json())
def normalize_date(val):
# Can't use isinstance since datetime is a subclass of date.
if type(val) == datetime.date:
return datetime.datetime.combine(val, datetime.time.min)
return val
@register.simple_tag
def render_case(case, options):
"""
Uses options since Django 1.3 doesn't seem to support templatetag kwargs.
Change to kwargs when we're on a version of Django that does.
"""
from corehq.apps.hqwebapp.templatetags.proptable_tags import get_tables_as_rows, get_default_definition
case = wrapped_case(case)
timezone = options.get('timezone', pytz.utc)
timezone = timezone.localize(datetime.datetime.utcnow()).tzinfo
_get_tables_as_rows = partial(get_tables_as_rows, timezone=timezone)
display = options.get('display') or case.get_display_config()
show_transaction_export = options.get('show_transaction_export') or False
get_case_url = options['get_case_url']
data = copy.deepcopy(case.to_full_dict())
default_properties = _get_tables_as_rows(data, display)
# pop seen properties off of remaining case properties
dynamic_data = dict(case.dynamic_case_properties())
# hack - as of commcare 2.0, external id is basically a dynamic property
# so also check and add it here
if case.external_id:
dynamic_data['external_id'] = case.external_id
for section in display:
for row in section['layout']:
for item in row:
dynamic_data.pop(item.get("expr"), None)
if dynamic_data:
dynamic_keys = sorted(dynamic_data.keys())
definition = get_default_definition(
dynamic_keys, num_columns=DYNAMIC_CASE_PROPERTIES_COLUMNS)
dynamic_properties = _get_tables_as_rows(dynamic_data, definition)
else:
dynamic_properties = None
actions = case.to_json()['actions']
actions.reverse()
the_time_is_now = datetime.datetime.utcnow()
tz_offset_ms = int(timezone.utcoffset(the_time_is_now).total_seconds()) * 1000
tz_abbrev = timezone.localize(the_time_is_now).tzname()
# ledgers
def _product_name(product_id):
try:
return SQLProduct.objects.get(product_id=product_id).name
except SQLProduct.DoesNotExist:
return (_('Unknown Product ("{}")').format(product_id))
ledgers = get_current_ledger_transactions(case._id)
for section, product_map in ledgers.items():
product_tuples = sorted(
(_product_name(product_id), product_map[product_id]) for product_id in product_map
)
ledgers[section] = product_tuples
return render_to_string("case/partials/single_case.html", {
"default_properties": default_properties,
"default_properties_options": {
"style": "table"
},
"dynamic_properties": dynamic_properties,
"dynamic_properties_options": {
"style": "table"
},
"case": case,
"case_actions": mark_safe(json.dumps(actions)),
"timezone": timezone,
"tz_abbrev": tz_abbrev,
"case_hierarchy_options": {
"show_view_buttons": True,
"get_case_url": get_case_url,
"timezone": timezone
},
"ledgers": ledgers,
"timezone_offset": tz_offset_ms,
"show_transaction_export": show_transaction_export,
"xform_api_url": reverse('single_case_forms', args=[case.domain, case._id]),
})
def get_inverse(val):
if isinstance(val, (datetime.datetime, datetime.date)):
return datetime.datetime.max - val
elif isinstance(val, numbers.Number):
return 10 ** 20
elif isinstance(val, (types.NoneType, bool)):
return not val
else:
raise Exception("%r has uninversable type: %s" % (val, type(val)))
def sortkey(child, type_info=None):
"""Return sortkey based on sort order defined in type_info, or use default
based on open/closed and opened_on/closed_on dates.
"""
type_info = type_info or {}
case = child['case']
if case.closed:
key = [1]
try:
for attr, direction in type_info[case.type]['closed_sortkeys']:
val = normalize_date(getattr(case, attr))
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(datetime.datetime.max - case.closed_on)
else:
key = [0]
try:
for attr, direction in type_info[case.type]['open_sortkeys']:
val = normalize_date(getattr(case, attr))
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(case.opened_on or datetime.datetime.min)
return key
def get_session_data(case, current_case, type_info):
# this logic should ideally be implemented in subclasses of
# CommCareCase
if type_info and case.type in type_info:
attr = type_info[case.type]['case_id_attr']
return {
attr: case._id,
'case_id': current_case._id
}
else:
return {
'case_id': case._id
}
TREETABLE_INDENT_PX = 19
def process_case_hierarchy(case_output, get_case_url, type_info):
current_case = case_output['case']
submit_url_root = reverse('receiver_post', args=[current_case.domain])
form_url_root = reverse('cloudcare_main', args=[current_case.domain, ''])
def process_output(case_output, depth=0):
for c in case_output['child_cases']:
process_output(c, depth=depth + 1)
case = case_output['case']
common_data = {
'indent_px': depth * TREETABLE_INDENT_PX,
'submit_url_root': submit_url_root,
'form_url_root': form_url_root,
'view_url': get_case_url(case.case_id),
'session_data': get_session_data(case, current_case, type_info)
}
data = type_info.get(case.type, {})
if 'description_property' in data:
data['description'] = getattr(
case, data['description_property'], None)
if 'edit_session_data' in data:
data['session_data'].update(data['edit_session_data'])
data.update(common_data)
case.edit_data = data
if 'child_type' in data and not case.closed:
child_type = data['child_type']
child_data = type_info.get(child_type, {})
child_data.update(common_data)
child_data.update({
"link_text": _("Add %(case_type)s") % {
'case_type': child_data.get('type_name', child_type)
},
"parent_node_id": case.case_id,
})
if 'create_session_data' in child_data:
child_data['session_data'].update(child_data['create_session_data'])
case.add_child_data = child_data
process_output(case_output)
def get_case_hierarchy(case, type_info):
def get_children(case, referenced_type=None, seen=None):
seen = seen or set()
ignore_types = type_info.get(case.type, {}).get("ignore_relationship_types", [])
if referenced_type and referenced_type in ignore_types:
return None
seen.add(case._id)
children = [
get_children(i.referenced_case, i.referenced_type, seen) for i in case.reverse_indices
if i.referenced_id not in seen
]
children = [c for c in children if c is not None]
# non-first-level descendants
descendant_types = []
for c in children:
descendant_types.extend(c['descendant_types'])
descendant_types = list(set(descendant_types))
children = sorted(children, key=partial(sortkey, type_info=type_info))
# set parent_case_id used by flat display
for c in children:
if not hasattr(c['case'], 'treetable_parent_node_id'):
c['case'].treetable_parent_node_id = case.case_id
child_cases = []
for c in children:
child_cases.extend(c['case_list'])
return {
'case': case,
'child_cases': children,
'descendant_types': list(set(descendant_types + [c['case'].type for c in children])),
'case_list': [case] + child_cases
}
return get_children(case)
def get_flat_descendant_case_list(case, get_case_url, type_info=None):
type_info = type_info or {}
hierarchy = get_case_hierarchy(case, type_info)
process_case_hierarchy(hierarchy, get_case_url, type_info)
return hierarchy['case_list']
@register.simple_tag
def render_case_hierarchy(case, options):
# todo: what are these doing here?
from corehq.apps.hqwebapp.templatetags.proptable_tags import get_display_data
case = wrapped_case(case)
get_case_url = options.get('get_case_url')
timezone = options.get('timezone', pytz.utc)
columns = options.get('columns') or case.related_cases_columns
show_view_buttons = options.get('show_view_buttons', True)
type_info = options.get('related_type_info', case.related_type_info)
case_list = get_flat_descendant_case_list(
case, get_case_url, type_info=type_info)
if case.indices:
# has parent case(s)
# todo: handle duplicates in ancestor path (bubbling up of parent-child
# relationships)
parent_cases = []
for idx in case.indices:
try:
parent_cases.append(idx.referenced_case)
except ResourceNotFound:
parent_cases.append(None)
for parent_case in parent_cases:
if parent_case:
parent_case.edit_data = {
'view_url': get_case_url(parent_case.case_id)
}
last_parent_id = parent_case.case_id
else:
last_parent_id = None
for c in case_list:
if not getattr(c, 'treetable_parent_node_id', None) and last_parent_id:
c.treetable_parent_node_id = last_parent_id
case_list = parent_cases + case_list
for c in case_list:
if not c:
continue
c.columns = []
case_dict = c.to_full_dict()
for column in columns:
c.columns.append(get_display_data(
case_dict, column, timezone=timezone))
return render_to_string("case/partials/case_hierarchy.html", {
'current_case': case,
'domain': case.domain,
'case_list': case_list,
'columns': columns,
'num_columns': len(columns) + 1,
'show_view_buttons': show_view_buttons,
})
@register.simple_tag
def case_inline_display(case):
"""
Given a case id, make a best effort at displaying it.
"""
if case:
if case.opened_on:
ret = "%s (%s: %s)" % (case.name, _("Opened"), case.opened_on.date())
else:
ret = case.name
else:
ret = _("Empty Case")
return escape(ret)
| {
"repo_name": "puttarajubr/commcare-hq",
"path": "corehq/ex-submodules/casexml/apps/case/templatetags/case_tags.py",
"copies": "1",
"size": "11929",
"license": "bsd-3-clause",
"hash": 6163070767706054000,
"line_mean": 33.0828571429,
"line_max": 107,
"alpha_frac": 0.6070081314,
"autogenerated": false,
"ratio": 3.794211195928753,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9891797061995335,
"avg_score": 0.0018844530666834757,
"num_lines": 350
} |
from functools import partial
import copy
import datetime
import numbers
import pytz
import simplejson
import types
from django import template
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.html import escape
from casexml.apps.case.models import CommCareCase
register = template.Library()
DYNAMIC_CASE_PROPERTIES_COLUMNS = 4
def wrapped_case(case):
json = case.to_json()
case_class = CommCareCase.get_wrap_class(json)
return case_class.wrap(case.to_json())
@register.simple_tag
def render_case(case, options):
"""
Uses options since Django 1.3 doesn't seem to support templatetag kwargs.
Change to kwargs when we're on a version of Django that does.
"""
# todo: what are these doing here?
from corehq.apps.hqwebapp.templatetags.proptable_tags import get_tables_as_rows, get_definition
case = wrapped_case(case)
timezone = options.get('timezone', pytz.utc)
_get_tables_as_rows = partial(get_tables_as_rows, timezone=timezone)
display = options.get('display') or case.get_display_config()
get_case_url = options['get_case_url']
data = copy.deepcopy(case.to_full_dict())
default_properties = _get_tables_as_rows(data, display)
# pop seen properties off of remaining case properties
dynamic_data = dict(case.dynamic_case_properties())
# hack - as of commcare 2.0, external id is basically a dynamic property
# so also check and add it here
if case.external_id:
dynamic_data['external_id'] = case.external_id
for section in display:
for row in section['layout']:
for item in row:
dynamic_data.pop(item.get("expr"), None)
if dynamic_data:
dynamic_keys = sorted(dynamic_data.keys())
definition = get_definition(
dynamic_keys, num_columns=DYNAMIC_CASE_PROPERTIES_COLUMNS)
dynamic_properties = _get_tables_as_rows(dynamic_data, definition)
else:
dynamic_properties = None
actions = case.to_json()['actions']
actions.reverse()
tz_abbrev = timezone.localize(datetime.datetime.now()).tzname()
return render_to_string("case/partials/single_case.html", {
"default_properties": default_properties,
"default_properties_options": {
"style": "table"
},
"dynamic_properties": dynamic_properties,
"dynamic_properties_options": {
"style": "table"
},
"case": case,
"case_actions": mark_safe(simplejson.dumps(actions)),
"timezone": timezone,
"tz_abbrev": tz_abbrev,
"case_hierarchy_options": {
"show_view_buttons": True,
"get_case_url": get_case_url,
"timezone": timezone
}
})
def get_inverse(val):
if isinstance(val, (datetime.datetime, datetime.date)):
return datetime.datetime.max - val
elif isinstance(val, numbers.Number):
return 10 ** 20
elif isinstance(val, (types.NoneType, bool)):
return not val
else:
raise Exception("%r has uninversable type: %s" % (val, type(val)))
def sortkey(child, type_info=None):
"""Return sortkey based on sort order defined in type_info, or use default
based on open/closed and opened_on/closed_on dates.
"""
type_info = type_info or {}
case = child['case']
if case.closed:
key = [1]
try:
for attr, direction in type_info[case.type]['closed_sortkeys']:
val = getattr(case, attr)
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(datetime.datetime.max - case.closed_on)
else:
key = [0]
try:
for attr, direction in type_info[case.type]['open_sortkeys']:
val = getattr(case, attr)
if direction.lower() == 'desc':
val = get_inverse(val)
key.append(val)
except KeyError:
key.append(case.opened_on or datetime.datetime.min)
return key
def get_session_data(case, current_case, type_info):
# this logic should ideally be implemented in subclasses of
# CommCareCase
if type_info and case.type in type_info:
attr = type_info[case.type]['case_id_attr']
return {
attr: case._id,
'case_id': current_case._id
}
else:
return {
'case_id': case._id
}
TREETABLE_INDENT_PX = 19
def process_case_hierarchy(case_output, get_case_url, type_info):
current_case = case_output['case']
submit_url_root = reverse('receiver_post', args=[current_case.domain])
form_url_root = reverse('cloudcare_main', args=[current_case.domain, ''])
def process_output(case_output, depth=0):
for c in case_output['child_cases']:
process_output(c, depth=depth + 1)
case = case_output['case']
common_data = {
'indent_px': depth * TREETABLE_INDENT_PX,
'submit_url_root': submit_url_root,
'form_url_root': form_url_root,
'view_url': get_case_url(case.case_id),
'session_data': get_session_data(case, current_case, type_info)
}
data = type_info.get(case.type, {})
if 'description_property' in data:
data['description'] = getattr(
case, data['description_property'], None)
if 'edit_session_data' in data:
data['session_data'].update(data['edit_session_data'])
data.update(common_data)
case.edit_data = data
if 'child_type' in data and not case.closed:
child_type = data['child_type']
child_data = type_info.get(child_type, {})
child_data.update(common_data)
child_data.update({
"link_text": _("Add %(case_type)s") % {
'case_type': child_data.get('type_name', child_type)
},
"parent_node_id": case.case_id,
})
if 'create_session_data' in child_data:
child_data['session_data'].update(child_data['create_session_data'])
case.add_child_data = child_data
process_output(case_output)
def get_case_hierarchy(case, type_info):
def get_children(case, referenced_type=None, seen=None):
seen = seen or set()
seen.add(case._id)
children = [
get_children(i.referenced_case, i.referenced_type, seen) for i in case.reverse_indices
if i.referenced_id not in seen
]
ignore_types = type_info.get(case.type, {}).get("ignore_relationship_types", [])
if referenced_type and referenced_type in ignore_types:
return None
children = [c for c in children if c is not None]
# non-first-level descendants
descendant_types = []
for c in children:
descendant_types.extend(c['descendant_types'])
descendant_types = list(set(descendant_types))
children = sorted(children, key=partial(sortkey, type_info=type_info))
# set parent_case_id used by flat display
for c in children:
if not hasattr(c['case'], 'treetable_parent_node_id'):
c['case'].treetable_parent_node_id = case.case_id
child_cases = []
for c in children:
child_cases.extend(c['case_list'])
return {
'case': case,
'child_cases': children,
'descendant_types': list(set(descendant_types + [c['case'].type for c in children])),
'case_list': [case] + child_cases
}
return get_children(case)
def get_flat_descendant_case_list(case, get_case_url, type_info=None):
type_info = type_info or {}
hierarchy = get_case_hierarchy(case, type_info)
process_case_hierarchy(hierarchy, get_case_url, type_info)
return hierarchy['case_list']
@register.simple_tag
def render_case_hierarchy(case, options):
# todo: what are these doing here?
from corehq.apps.hqwebapp.templatetags.proptable_tags import get_display_data
case = wrapped_case(case)
get_case_url = options.get('get_case_url')
timezone = options.get('timezone', pytz.utc)
columns = options.get('columns') or case.related_cases_columns
show_view_buttons = options.get('show_view_buttons', True)
type_info = options.get('related_type_info', case.related_type_info)
case_list = get_flat_descendant_case_list(
case, get_case_url, type_info=type_info)
if case.indices:
# has parent case(s)
# todo: handle duplicates in ancestor path (bubbling up of parent-child
# relationships)
parent_cases = [idx.referenced_case for idx in case.indices]
for parent_case in parent_cases:
parent_case.edit_data = {
'view_url': get_case_url(parent_case.case_id)
}
last_parent_id = parent_cases[-1].case_id
for c in case_list:
if not getattr(c, 'treetable_parent_node_id', None):
c.treetable_parent_node_id = last_parent_id
case_list = parent_cases + case_list
for c in case_list:
c.columns = []
case_dict = c.to_full_dict()
for column in columns:
c.columns.append(get_display_data(
case_dict, column, timezone=timezone))
return render_to_string("case/partials/case_hierarchy.html", {
'current_case': case,
'domain': case.domain,
'case_list': case_list,
'columns': columns,
'num_columns': len(columns) + 1,
'show_view_buttons': show_view_buttons,
})
@register.simple_tag
def case_inline_display(case):
"""
Given a case id, make a best effort at displaying it.
"""
if case:
if case.opened_on:
ret = "%s (%s: %s)" % (case.name, _("Opened"), case.opened_on.date())
else:
ret = case.name
else:
ret = _("Empty Case")
return escape(ret)
| {
"repo_name": "SEL-Columbia/commcare-hq",
"path": "corehq/ex-submodules/casexml/apps/case/templatetags/case_tags.py",
"copies": "1",
"size": "10253",
"license": "bsd-3-clause",
"hash": 5261661731328669000,
"line_mean": 32.6163934426,
"line_max": 99,
"alpha_frac": 0.6024578172,
"autogenerated": false,
"ratio": 3.7598093142647597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.486226713146476,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import copy
import time
import simplejson as json
from tornado.web import RequestHandler
from tornado.gen import coroutine
from gemstone.core.structs import JsonRpcResponse, JsonRpcRequest, JsonRpcResponseBatch, \
GenericResponse, JsonRpcInvalidRequestError
__all__ = [
'TornadoJsonRpcHandler',
'GemstoneCustomHandler'
]
# noinspection PyAbstractClass
class GemstoneCustomHandler(RequestHandler):
"""
Base class for custom Tornado handlers that
can be added to the microservice.
Offers a reference to the microservice through the ``self.microservice`` attribute.
"""
def __init__(self, *args, **kwargs):
#: reference to the microservice that uses the request handler
self.microservice = None
super(GemstoneCustomHandler, self).__init__(*args, **kwargs)
# noinspection PyMethodOverriding
def initialize(self, microservice):
self.microservice = microservice
# noinspection PyAbstractClass
class TornadoJsonRpcHandler(RequestHandler):
def __init__(self, *args, **kwargs):
self.response_is_sent = False
self.methods = None
self.executor = None
self.validation_strategies = None
self.api_token_handlers = None
self.logger = None
self.microservice = None
super(TornadoJsonRpcHandler, self).__init__(*args, **kwargs)
# noinspection PyMethodOverriding
def initialize(self, microservice):
self.logger = microservice.logger
self.methods = microservice.methods
self.executor = microservice.get_executor()
self.response_is_sent = False
self.microservice = microservice
def get_current_user(self):
return self.microservice.authenticate_request(self)
@coroutine
def post(self):
if self.request.headers.get("Content-type").split(";")[0] != "application/json":
self.write_single_response(GenericResponse.INVALID_REQUEST)
return
req_body_raw = self.request.body.decode()
try:
req_object = json.loads(req_body_raw)
except json.JSONDecodeError:
self.write_single_response(GenericResponse.PARSE_ERROR)
return
# handle the actual call
if isinstance(req_object, dict):
# single call
try:
req_object = JsonRpcRequest.from_dict(req_object)
except JsonRpcInvalidRequestError:
self.write_single_response(GenericResponse.INVALID_REQUEST)
return
if req_object.is_notification():
self.write_single_response(GenericResponse.NOTIFICATION_RESPONSE)
result = yield self.handle_single_request(req_object)
self.write_single_response(result)
elif isinstance(req_object, list):
if len(req_object) == 0:
self.write_single_response(GenericResponse.INVALID_REQUEST)
return
# batch call
invalid_requests = []
requests_futures = []
notification_futures = []
for item in req_object:
try:
if not isinstance(item, dict):
raise JsonRpcInvalidRequestError()
current_rpc_call = JsonRpcRequest.from_dict(item)
# handle notifications
if current_rpc_call.is_notification():
# we trigger their execution, but we don't yield for their results
notification_futures.append(self.handle_single_request(current_rpc_call))
else:
requests_futures.append(self.handle_single_request(current_rpc_call))
except JsonRpcInvalidRequestError:
invalid_requests.append(GenericResponse.INVALID_REQUEST)
finished_rpc_calls = yield requests_futures
self.write_batch_response(JsonRpcResponseBatch(invalid_requests + finished_rpc_calls))
else:
self.write_single_response(GenericResponse.INVALID_REQUEST)
@coroutine
def handle_single_request(self, request_object):
"""
Handles a single request object and returns the correct result as follows:
- A valid response object if it is a regular request (with ID)
- ``None`` if it was a notification (if None is returned, a response object with
"received" body was already sent to the client.
:param request_object: A :py:class:`gemstone.core.structs.JsonRpcRequest` object
representing a Request object
:return: A :py:class:`gemstone.core.structs.JsonRpcResponse` object representing a
Response object or None if no response is expected (it was a notification)
"""
# don't handle responses?
if isinstance(request_object, JsonRpcResponse):
return request_object
error = None
result = None
id_ = request_object.id
# validate method name
if request_object.method not in self.methods:
resp = GenericResponse.METHOD_NOT_FOUND
resp.id = id_
return resp
# check for private access
method = self.methods[request_object.method]
if isinstance(request_object.params, (list, tuple)):
self.call_method_from_all_plugins("on_method_call", request_object)
else:
self.call_method_from_all_plugins("on_method_call", request_object)
if self._method_is_private(method):
if not self.get_current_user():
resp = GenericResponse.ACCESS_DENIED
resp.id = id_
return resp
method = self.prepare_method_call(method, request_object.params)
# before request hook
_method_duration = time.time()
try:
result = yield self.call_method(method)
except Exception as e:
# catch all exceptions generated by method
# and handle in a special manner only the TypeError
if isinstance(e, TypeError):
# TODO: find a proper way to check that the function got the wrong
# parameters (with **kwargs)
if "got an unexpected keyword argument" in e.args[0]:
resp = GenericResponse.INVALID_PARAMS
resp.id = id_
return resp
# TODO: find a proper way to check that the function got the wrong
# parameters (with *args)
elif "takes" in e.args[0] and "positional argument" in e.args[0] and "were given" in \
e.args[0]:
resp = GenericResponse.INVALID_PARAMS
resp.id = id_
return resp
elif "missing" in e.args[0] and "required positional argument" in e.args[0]:
resp = GenericResponse.INVALID_PARAMS
resp.id = id_
return resp
# generic handling for any exception (even TypeError) that
# is not generated because of bad parameters
self.call_method_from_all_plugins("on_internal_error", e)
err = GenericResponse.INTERNAL_ERROR
err.id = id_
err.error["data"] = {
"class": type(e).__name__,
"info": str(e)
}
return err
to_return_resp = JsonRpcResponse(result=result, error=error, id=id_)
return to_return_resp
def write_single_response(self, response_obj):
"""
Writes a json rpc response ``{"result": result, "error": error, "id": id}``.
If the ``id`` is ``None``, the response will not contain an ``id`` field.
The response is sent to the client as an ``application/json`` response. Only one call per
response is allowed
:param response_obj: A Json rpc response object
:return:
"""
if not isinstance(response_obj, JsonRpcResponse):
raise ValueError(
"Expected JsonRpcResponse, but got {} instead".format(type(response_obj).__name__))
if not self.response_is_sent:
self.set_status(200)
self.set_header("Content-Type", "application/json")
self.finish(response_obj.to_string())
self.response_is_sent = True
def write_batch_response(self, batch_response):
self.set_header("Content-Type", "application/json")
self.write(batch_response.to_string())
def write_error(self, status_code, **kwargs):
if status_code == 405:
self.set_status(405)
self.write_single_response(
JsonRpcResponse(error={"code": 405, "message": "Method not allowed"}))
return
exc_info = kwargs["exc_info"]
err = GenericResponse.INTERNAL_ERROR
err.error["data"] = {
"class": str(exc_info[0].__name__),
"info": str(exc_info[1])
}
self.set_status(200)
self.write_single_response(err)
def prepare_method_call(self, method, args):
"""
Wraps a method so that method() will call ``method(*args)`` or ``method(**args)``,
depending of args type
:param method: a callable object (method)
:param args: dict or list with the parameters for the function
:return: a 'patched' callable
"""
if self._method_requires_handler_ref(method):
if isinstance(args, list):
args = [self] + args
elif isinstance(args, dict):
args["handler"] = self
if isinstance(args, list):
to_call = partial(method, *args)
elif isinstance(args, dict):
to_call = partial(method, **args)
else:
raise TypeError(
"args must be list or dict but got {} instead".format(type(args).__name__))
return to_call
@coroutine
def call_method(self, method):
"""
Calls a blocking method in an executor, in order to preserve the non-blocking behaviour
If ``method`` is a coroutine, yields from it and returns, no need to execute in
in an executor.
:param method: The method or coroutine to be called (with no arguments).
:return: the result of the method call
"""
if self._method_is_async_generator(method):
result = yield method()
else:
result = yield self.executor.submit(method)
return result
@coroutine
def handle_batch_request(self, batch_req_obj):
responses = yield [self.handle_single_request(single_req) for single_req in
batch_req_obj.iter_items()]
return responses
def _method_requires_handler_ref(self, method):
return getattr(method, "_req_h_ref", False)
def _method_is_async_generator(self, method):
"""
Given a simple callable or a callable wrapped in funtools.partial, determines
if it was wrapped with the :py:func:`gemstone.async_method` decorator.
:param method:
:return:
"""
if hasattr(method, "func"):
func = method.func
else:
func = method
return getattr(func, "_is_coroutine", False)
@staticmethod
def _method_is_private(method):
return getattr(method, "_exposed_private", False)
def call_method_from_all_plugins(self, method, *args, **kwargs):
for plugin in self.microservice.plugins:
method_callable = getattr(plugin, method)
if not method:
continue
method_callable(*args, **kwargs)
| {
"repo_name": "vladcalin/gemstone",
"path": "gemstone/core/handlers.py",
"copies": "2",
"size": "11846",
"license": "mit",
"hash": 6248606083332428000,
"line_mean": 36.01875,
"line_max": 102,
"alpha_frac": 0.5930271822,
"autogenerated": false,
"ratio": 4.514481707317073,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6107508889517074,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import copy
from six import (
with_metaclass,
)
from elasticsearch_dsl import DocType
from elasticsearch_dsl.utils import AttrList, AttrDict
from elasticsearch_dsl.field import InnerObjectWrapper
from elasticsearch import helpers
from nefertari.json_httpexceptions import (
JHTTPBadRequest,
JHTTPNotFound,
)
from nefertari.utils import (
process_fields,
process_limit,
dictset,
split_strip,
)
from .meta import DocTypeMeta
from .fields import (
ReferenceField, IdField, DictField, ListField,
IntegerField,
)
class SyncRelatedMixin(object):
_backref_hooks = ()
_created = False
def __init__(self, *args, **kwargs):
_created = 'meta' not in kwargs
super(SyncRelatedMixin, self).__init__(*args, **kwargs)
if not _created:
return
for field_name in self._relationships():
if field_name not in kwargs:
continue
new_value = kwargs[field_name]
if new_value not in ([], {}, None):
field_obj = self._doc_type.mapping[field_name]
self._d_[field_name] = field_obj.empty()
setattr(self, field_name, new_value)
self._created = _created
def __setattr__(self, name, value):
if name in self._relationships():
self._load_related(name)
self._sync_related(
new_value=value,
old_value=self._d_.get(name),
field_name=name)
super(SyncRelatedMixin, self).__setattr__(name, value)
def _sync_related(self, new_value, old_value, field_name):
field = self._doc_type.mapping[field_name]
if not field._back_populates:
return
if not isinstance(new_value, (list, AttrList)):
new_value = [new_value] if new_value else []
if not isinstance(old_value, (list, AttrList)):
old_value = [old_value] if old_value else []
added_values = set(new_value) - set(old_value)
deleted_values = set(old_value) - set(new_value)
if added_values:
for val in added_values:
self._register_addition_hook(val, field._back_populates)
if deleted_values:
for val in deleted_values:
self._register_deletion_hook(val, field._back_populates)
@staticmethod
def _addition_hook(_item, _add_item, _field_name):
field = _item._doc_type.mapping[_field_name]
curr_val = getattr(_item, _field_name, None)
if field._multi:
new_val = list(curr_val or [])
if _add_item not in new_val:
new_val.append(_add_item)
else:
new_val = (_add_item if _add_item != curr_val
else curr_val)
value_changed = (
(field._multi and set(curr_val or []) != set(new_val)) or
(not field._multi and curr_val != new_val))
if value_changed:
_item.update({_field_name: new_val})
def _register_addition_hook(self, item, field_name):
""" Register hook to add `self` to `item` field `field_name`. """
_hook = partial(
self.__class__._addition_hook,
_item=item,
_add_item=self,
_field_name=field_name)
self._backref_hooks += (_hook,)
@staticmethod
def _deletion_hook(_item, _del_item, _field_name):
curr_val = getattr(_item, _field_name, None)
if not curr_val:
return
field = _item._doc_type.mapping[_field_name]
if field._multi:
new_val = list(curr_val or [])
if _del_item in new_val:
new_val.remove(_del_item)
else:
new_val = (None if _del_item == curr_val
else curr_val)
value_changed = (
(field._multi and set(curr_val or []) != set(new_val)) or
(not field._multi and curr_val != new_val))
if value_changed:
_item.update({_field_name: new_val})
def _register_deletion_hook(self, item, field_name):
""" Register hook to delete `self` from `item` field
`field_name`.
"""
_hook = partial(
self.__class__._deletion_hook,
_item=item,
_del_item=self,
_field_name=field_name)
self._backref_hooks += (_hook,)
def save(self, *args, **kwargs):
try:
obj = super(SyncRelatedMixin, self).save(*args, **kwargs)
except:
raise
else:
for hook in self._backref_hooks:
hook()
self._backref_hooks = ()
return obj
class VersionedMixin(object):
""" Mixin that adds "version" field. """
version = IntegerField()
def _bump_version(self):
if self._is_modified():
self.version = (self.version or 0) + 1
def save(self, *args, **kwargs):
self._bump_version()
return super(VersionedMixin, self).save(*args, **kwargs)
@classmethod
def get_null_values(cls):
null_values = super(VersionedMixin, cls).get_null_values()
null_values.pop('version', None)
return null_values
def __repr__(self):
name = super(VersionedMixin, self).__repr__()
if hasattr(self, 'version'):
name.replace('>', ', v=%s>' % self.version)
return name
class BaseDocument(with_metaclass(
DocTypeMeta,
VersionedMixin, SyncRelatedMixin, DocType)):
_public_fields = None
_auth_fields = None
_hidden_fields = None
_nested_relationships = ()
_nesting_depth = 1
_request = None
def __init__(self, *args, **kwargs):
super(BaseDocument, self).__init__(*args, **kwargs)
self._sync_id_field()
def __eq__(self, other):
if isinstance(other, self.__class__):
pk_field = self.pk_field()
self_pk = getattr(self, pk_field, None)
other_pk = getattr(other, pk_field, None)
return (self_pk is not None and other_pk is not None
and self_pk == other_pk)
return super(BaseDocument, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
@property
def __hash__(self):
pk_field = self.pk_field()
pk = getattr(self, pk_field, None)
if pk is None:
self._sync_id_field()
pk = getattr(self, pk_field, None)
if pk is None:
return None
def _hasher():
cls_name = self.__class__.__name__
return hash(cls_name + str(pk))
return _hasher
def _sync_id_field(self):
""" Copy meta["_id"] to IdField. """
if self.pk_field_type() is IdField:
pk_field = self.pk_field()
if not getattr(self, pk_field, None) and self._id is not None:
self._d_[pk_field] = str(self._id)
def __setattr__(self, name, value):
if name == self.pk_field() and self.pk_field_type() == IdField:
raise AttributeError('{} is read-only'.format(self.pk_field()))
super(BaseDocument, self).__setattr__(name, value)
def __getattr__(self, name):
if name == '_id' and 'id' not in self.meta:
return None
if name in self._relationships():
self._load_related(name)
return super(BaseDocument, self).__getattr__(name)
def __repr__(self):
parts = ['%s:' % self.__class__.__name__]
pk_field = self.pk_field()
parts.append('{}={}'.format(pk_field, getattr(self, pk_field)))
return '<%s>' % ', '.join(parts)
def _getattr_raw(self, name):
return self._d_[name]
def _unload_related(self, field_name):
value = field_name in self._d_ and self._d_[field_name]
if not value:
return
field = self._doc_type.mapping[field_name]
doc_cls = field._doc_class
if not isinstance(value, (list, AttrList)):
value = [value]
if isinstance(value[0], doc_cls):
pk_field = doc_cls.pk_field()
items = [getattr(item, pk_field, None) for item in value]
items = [item for item in items if item is not None]
if items:
self._d_[field_name] = items if field._multi else items[0]
def _load_related(self, field_name):
value = field_name in self._d_ and self._d_[field_name]
if not value:
return
field = self._doc_type.mapping[field_name]
doc_cls = field._doc_class
if not isinstance(value, (list, AttrList)):
value = [value]
if not isinstance(value[0], doc_cls):
pk_field = doc_cls.pk_field()
items = doc_cls.get_collection(**{pk_field: value})
if items:
self._d_[field_name] = items if field._multi else items[0]
def save(self, request=None, refresh=True, **kwargs):
super(BaseDocument, self).save(refresh=refresh, **kwargs)
self._sync_id_field()
return self
def update(self, params, **kw):
process_bools(params)
_validate_fields(self.__class__, params.keys())
pk_field = self.pk_field()
iter_types = (DictField, ListField)
iter_fields = [
field for field in self._doc_type.mapping
if isinstance(self._doc_type.mapping[field], iter_types)]
for key, value in params.items():
if key == pk_field:
continue
if key in iter_fields:
self.update_iterables(value, key, unique=True, save=False)
else:
setattr(self, key, value)
return self.save(**kw)
def delete(self, request=None):
super(BaseDocument, self).delete()
def to_dict(self, include_meta=False, _keys=None, request=None,
_depth=None):
"""
DocType and nefertari both expect a to_dict method, but
they expect it to act differently. DocType uses to_dict for
serialize for saving to es. nefertari uses it to serialize
for serving JSON to the client. For now we differentiate by
looking for a request argument. If it's present we assume
that we're serving JSON to the client, otherwise we assume
that we're saving to es
"""
if _depth is None:
_depth = self._nesting_depth
depth_reached = _depth is not None and _depth <= 0
if request is None:
request = self._request
for name in self._relationships():
include = (request is not None and
name in self._nested_relationships and
not depth_reached)
if not include:
self._unload_related(name)
continue
# Related document is implicitly loaded on __getattr__
value = getattr(self, name)
if value:
if not isinstance(value, (list, AttrList)):
value = [value]
for val in value:
try:
val._nesting_depth = _depth - 1
val._request = request
except AttributeError:
continue
data = super(BaseDocument, self).to_dict(include_meta=include_meta)
data = {key: val for key, val in data.items()
if not key.startswith('__')}
if request is not None and '_type' not in data:
data['_type'] = self.__class__.__name__
if request is not None:
data['_pk'] = str(getattr(self, self.pk_field()))
return data
@classmethod
def _flatten_relationships(cls, params):
for name in cls._relationships():
if name not in params:
continue
inst = params[name]
field_obj = cls._doc_type.mapping[name]
pk_field = field_obj._doc_class.pk_field()
if isinstance(inst, (list, AttrList)):
params[name] = [getattr(i, pk_field, i) for i in inst]
else:
params[name] = getattr(inst, pk_field, inst)
return params
@classmethod
def _relationships(cls):
return [
name for name in cls._doc_type.mapping
if isinstance(cls._doc_type.mapping[name], ReferenceField)]
@classmethod
def pk_field(cls):
for name in cls._doc_type.mapping:
field = cls._doc_type.mapping[name]
if getattr(field, '_primary_key', False):
return name
else:
raise AttributeError('No primary key field')
@classmethod
def pk_field_type(cls):
pk_field = cls.pk_field()
return cls._doc_type.mapping[pk_field].__class__
@classmethod
def get_item(cls, **kw):
""" Get single item and raise exception if not found.
Exception raising when item is not found can be disabled
by passing ``_raise_on_empty=False`` in params.
:returns: Single collection item as an instance of ``cls``.
"""
kw.setdefault('_raise_on_empty', True)
result = cls.get_collection(_limit=1, _item_request=True, **kw)
return result[0]
@classmethod
def _update_many(cls, items, params, request=None):
params = cls._flatten_relationships(params)
if not items:
return
actions = [item.to_dict(include_meta=True) for item in items]
actions_count = len(actions)
for action in actions:
action.pop('_source')
action['doc'] = params
client = items[0].connection
operation = partial(
_bulk,
client=client, op_type='update', request=request)
_perform_in_chunks(actions, operation)
return actions_count
@classmethod
def _delete_many(cls, items, request=None):
if not items:
return
actions = [item.to_dict(include_meta=True) for item in items]
actions_count = len(actions)
client = items[0].connection
operation = partial(
_bulk,
client=client, op_type='delete', request=request)
_perform_in_chunks(actions, operation)
return actions_count
@classmethod
def get_collection(cls, _count=False, _strict=True, _sort=None,
_fields=None, _limit=None, _page=None, _start=None,
_query_set=None, _item_request=False, _explain=None,
_search_fields=None, q=None, _raise_on_empty=False,
**params):
""" Query collection and return results.
Notes:
* Before validating that only model fields are present in params,
reserved params, query params and all params starting with
double underscore are dropped.
* Params which have value "_all" are dropped.
* When ``_count`` param is used, objects count is returned
before applying offset and limit.
:param bool _strict: If True ``params`` are validated to contain
only fields defined on model, exception is raised if invalid
fields are present. When False - invalid fields are dropped.
Defaults to ``True``.
:param list _sort: Field names to sort results by. If field name
is prefixed with "-" it is used for "descending" sorting.
Otherwise "ascending" sorting is performed by that field.
Defaults to an empty list in which case sorting is not
performed.
:param list _fields: Names of fields which should be included
or excluded from results. Fields to excluded should be
prefixed with "-". Defaults to an empty list in which
case all fields are returned.
:param int _limit: Number of results per page. Defaults
to None in which case all results are returned.
:param int _page: Number of page. In conjunction with
``_limit`` is used to calculate results offset. Defaults to
None in which case it is ignored. Params ``_page`` and
``_start` are mutually exclusive.
:param int _start: Results offset. If provided ``_limit`` and
``_page`` params are ignored when calculating offset. Defaults
to None. Params ``_page`` and ``_start`` are mutually
exclusive. If not offset-related params are provided, offset
equals to 0.
:param Query _query_set: Existing queryset. If provided, all queries
are applied to it instead of creating new queryset. Defaults
to None.
:param bool _item_request: Indicates whether it is a single item
request or not. When True and DataError happens on DB request,
JHTTPNotFound is raised. JHTTPBadRequest is raised when False.
Defaults to ``False``.
:param _count: When provided, only results number is returned as
integer.
:param _explain: When provided, query performed(SQL) is returned
as a string instead of query results.
:param bool _raise_on_empty: When True JHTTPNotFound is raised
if query returned no results. Defaults to False in which case
error is just logged and empty query results are returned.
:param q: Query string to perform full-text search with.
:param _search_fields: Coma-separated list of field names to use
with full-text search(q param) to limit fields which are
searched.
:returns: Query results. May be sorted, offset, limited.
:returns: Dict of {'field_name': fieldval}, when ``_fields`` param
is provided.
:returns: Number of query results as an int when ``_count`` param
is provided.
:raises JHTTPNotFound: When ``_raise_on_empty=True`` and no
results found.
:raises JHTTPNotFound: When ``_item_request=True`` and
``sqlalchemy.exc.DataError`` exception is raised during DB
query. Latter exception is raised when querying DB with
an identifier of a wrong type. E.g. when querying Int field
with a string.
:raises JHTTPBadRequest: When ``_item_request=False`` and
``sqlalchemy.exc.DataError`` exception is raised during DB
query.
:raises JHTTPBadRequest: When ``sqlalchemy.exc.InvalidRequestError``
or ``sqlalchemy.exc.IntegrityError`` errors happen during DB
query.
"""
search_obj = cls.search()
if _limit is not None:
_start, limit = process_limit(_start, _page, _limit)
search_obj = search_obj.extra(from_=_start, size=limit)
if _fields:
include, exclude = process_fields(_fields)
if _strict:
_validate_fields(cls, include + exclude)
# XXX partial fields support isn't yet released. for now
# we just use fields, later we'll add support for excluded fields
search_obj = search_obj.fields(include)
if params:
params = _cleaned_query_params(cls, params, _strict)
params = _restructure_params(cls, params)
if params:
search_obj = search_obj.filter('terms', **params)
if q is not None:
query_kw = {'query': q}
if _search_fields is not None:
query_kw['fields'] = _search_fields.split(',')
search_obj = search_obj.query('query_string', **query_kw)
if _count:
return search_obj.count()
if _explain:
return search_obj.to_dict()
if _sort:
sort_fields = split_strip(_sort)
if _strict:
_validate_fields(
cls,
[f[1:] if f.startswith('-') else f for f in sort_fields])
search_obj = search_obj.sort(*sort_fields)
hits = search_obj.execute().hits
if not hits and _raise_on_empty:
msg = "'%s(%s)' resource not found" % (cls.__name__, params)
raise JHTTPNotFound(msg)
hits._nefertari_meta = dict(
total=hits.total,
start=_start,
fields=_fields)
return hits
@classmethod
def get_by_ids(cls, ids, **params):
params[cls.pk_field()] = ids
return cls.get_collection(**params)
@classmethod
def get_field_params(cls, field_name):
if field_name in cls._doc_type.mapping:
field = cls._doc_type.mapping[field_name]
return getattr(field, '_init_kwargs', None)
@classmethod
def fields_to_query(cls):
return set(cls._doc_type.mapping).union({'_id'})
@classmethod
def count(cls, query_set):
try:
return query_set.count()
except AttributeError:
return len(query_set)
@classmethod
def has_field(cls, field):
return field in cls._doc_type.mapping
@classmethod
def get_or_create(cls, **params):
defaults = params.pop('defaults', {})
items = cls.get_collection(_raise_on_empty=False, **params)
if not items:
defaults.update(params)
return cls(**defaults).save(), True
elif len(items) > 1:
raise JHTTPBadRequest('Bad or Insufficient Params')
else:
return items[0], False
@classmethod
def get_null_values(cls):
""" Get null values of :cls: fields. """
skip_fields = {'_acl'}
null_values = {}
for name in cls._doc_type.mapping:
if name in skip_fields:
continue
field = cls._doc_type.mapping[name]
null_values[name] = field.empty()
null_values.pop('id', None)
return null_values
def update_iterables(self, params, attr, unique=False,
value_type=None, save=True,
request=None):
field = self._doc_type.mapping[attr]
is_dict = isinstance(field, DictField)
is_list = isinstance(field, ListField)
def split_keys(keys):
neg_keys = []
pos_keys = []
for key in keys:
if key.startswith('__'):
continue
if key.startswith('-'):
neg_keys.append(key[1:])
else:
pos_keys.append(key.strip())
return pos_keys, neg_keys
def update_dict(update_params):
final_value = getattr(self, attr, {}) or {}
if isinstance(final_value, (InnerObjectWrapper, AttrDict)):
final_value = final_value.to_dict()
else:
final_value = final_value.copy()
if isinstance(update_params, (InnerObjectWrapper, AttrDict)):
update_params = update_params.to_dict()
if update_params in (None, '', {}):
if not final_value:
return
update_params = {
'-' + key: val for key, val in final_value.items()}
positive, negative = split_keys(list(update_params.keys()))
# Pop negative keys
for key in negative:
final_value.pop(key, None)
# Set positive keys
for key in positive:
final_value[str(key)] = update_params[key]
setattr(self, attr, final_value)
if save:
self.save(request)
def update_list(update_params):
final_value = getattr(self, attr, []) or []
final_value = list(final_value)
final_value = copy.deepcopy(final_value)
if update_params in (None, '', []):
if not final_value:
return
update_params = ['-' + val for val in final_value]
if isinstance(update_params, dict):
keys = list(update_params.keys())
else:
keys = update_params
positive, negative = split_keys(keys)
if not (positive + negative):
raise JHTTPBadRequest('Missing params')
if positive:
if unique:
positive = [v for v in positive if v not in final_value]
final_value += positive
if negative:
final_value = list(set(final_value) - set(negative))
setattr(self, attr, final_value)
if save:
self.save(request)
if is_dict:
update_dict(params)
elif is_list:
update_list(params)
def _is_modified(self):
""" Determine if instance is modified.
TODO: Rework to make the check more sane.
"""
return not self._is_created()
def _is_created(self):
return self._created
def _cleaned_query_params(cls, params, strict):
params = {
key: val for key, val in params.items()
if not key.startswith('__') and val != '_all'
}
# XXX support field__bool and field__in/field__all queries?
# process_lists(params)
process_bools(params)
if strict:
_validate_fields(cls, params.keys())
else:
field_names = frozenset(cls.fields_to_query())
param_names = frozenset(params.keys())
invalid_params = param_names.difference(field_names)
for key in invalid_params:
del params[key]
return params
def _restructure_params(cls, params):
pk_field = cls.pk_field()
if pk_field in params:
field_obj = cls._doc_type.mapping[pk_field]
if isinstance(field_obj, IdField):
params['_id'] = params.pop(pk_field)
for field, param in params.items():
if not isinstance(param, list):
params[field] = [param]
return params
def _validate_fields(cls, field_names):
valid_names = frozenset(cls.fields_to_query())
names = frozenset(field_names)
invalid_names = names.difference(valid_names)
if invalid_names:
raise JHTTPBadRequest(
"'%s' object does not have fields: %s" % (
cls.__name__, ', '.join(invalid_names)))
def _perform_in_chunks(actions, operation, chunk_size=None):
if chunk_size is None:
from nefertari_es import Settings
chunk_size = Settings.asint('chunk_size', 500)
start = end = 0
count = len(actions)
while count:
if count < chunk_size:
chunk_size = count
end += chunk_size
operation(actions=actions[start:end])
start += chunk_size
count -= chunk_size
def _bulk(actions, client, op_type='index', request=None):
from nefertari_es import Settings
for action in actions:
action['_op_type'] = op_type
kwargs = {
'client': client,
'actions': actions,
}
if request is None:
query_params = {}
else:
query_params = request.params.mixed()
query_params = dictset(query_params)
refresh_enabled = Settings.asbool('enable_refresh_query', False)
if '_refresh_index' in query_params and refresh_enabled:
kwargs['refresh'] = query_params.asbool('_refresh_index')
executed_num, errors = helpers.bulk(**kwargs)
if errors:
raise Exception('Errors happened when executing Elasticsearch '
'actions: {}'.format('; '.join(errors)))
return executed_num
def process_bools(_dict):
for k in _dict:
new_k, _, _t = k.partition('__')
if _t == 'bool':
_dict[new_k] = _dict.pop_bool_param(k)
return _dict
| {
"repo_name": "ramses-tech/nefertari-es",
"path": "nefertari_es/documents.py",
"copies": "2",
"size": "27919",
"license": "apache-2.0",
"hash": -3813799381601325000,
"line_mean": 33.8551810237,
"line_max": 77,
"alpha_frac": 0.5582936352,
"autogenerated": false,
"ratio": 4.180116783949693,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5738410419149693,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import cv2
def bilateral(d=11, c=75, s=75):
""" creates a bilateral blur filter function
bilateralFilter can reduce unwanted noise very well while keeping edges
fairly sharp. it's, however, much slower than other methods
d: int
diameter of each pixel neighborhood that is used during filtering
if it is non-positive, it is computed from sigmaSpace
c: int
filter sigma in the color space
a larger value of the parameter means that farther colors within
the pixel neighborhood will be mixed together, resulting in larger
areas of semi-equal color
returns: function
a bilateral filter function with specialized arguments
"""
return partial(cv2.bilateralFilter, d=d, sigmaColor=c, sigmaSpace=s)
def canny(hysthres=15, ratio=3):
""" creates a canny edge detection filter function
hysthres: int
first threshold for the hysteresis procedure
value in the interval [0, 255]
ratio: int
used to calculate second threshold for the hysteresis procedure
threshold2 = hysthres * ratio
rule of thumb suggests to keep use 3 as the ration value
returns: function
a canny edge detection function with specialized arguments
"""
return partial(cv2.Canny, threshold1=hysthres, threshold2=hysthres * ratio)
def grayscale(code=cv2.COLOR_BGR2GRAY):
""" creates a color to grayscale conversion filter
code: enum
a conversion code flag
returns: function
a grayscale conversion function
"""
return partial(cv2.cvtColor, code=code)
| {
"repo_name": "dominiktomicevic/pedestrian",
"path": "processing/filters.py",
"copies": "1",
"size": "1744",
"license": "mit",
"hash": 1207821817144484000,
"line_mean": 31.2962962963,
"line_max": 79,
"alpha_frac": 0.6599770642,
"autogenerated": false,
"ratio": 4.426395939086294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5586373003286295,
"avg_score": null,
"num_lines": null
} |
from functools import partial
import datacube
from .clean_mask import landsat_clean_mask_invalid, landsat_qa_clean_mask
import numpy as np
import xarray as xr
import pandas as pd
def build_cloud_coverage_table_landsat(product,
platform,
collection,
level,
latitude,
longitude,
time=None,
dc=None,
extra_band='green',
extra_load_params={}):
dc = dc if dc is not None else datacube.Datacube(app="")
load_params = dict(product=product,
latitude=latitude,
longitude=longitude,
measurements=[extra_band, 'pixel_qa'],
**extra_load_params)
if time is not None:
load_params["time"] = time
landsat_dataset = dc.load(**load_params).persist()
clean_mask = landsat_qa_clean_mask(landsat_dataset, platform=platform,
collection=collection, level=level) & \
landsat_clean_mask_invalid(landsat_dataset, platform, collection, level)
data_mask = xr.full_like(clean_mask, True)
band_no_data_values = dc.list_measurements().loc[product, 'nodata']
if band_no_data_values is not None:
for data_var in landsat_dataset.values():
band_data_mask = data_var != data_var.attrs['nodata']
data_mask = data_mask & band_data_mask
clean_data_mask = clean_mask & data_mask
landsat_dataset = landsat_dataset.where(clean_data_mask)
times = list(landsat_dataset.time.values)
clean_data_mask_list = [clean_data_mask.sel(time=str(time)).values
for time in clean_data_mask.time.values]
# Calculate the percentage of all pixels which are not cloud.
percentage_list = [clean_data_mask.mean()*100 for clean_data_mask in clean_data_mask_list]
clean_pixel_count_list = list(map(np.sum, clean_data_mask_list))
data = {"times": times,
"clean_percentage": percentage_list,
"clean_count": clean_pixel_count_list}
return (landsat_dataset,
pd.DataFrame(data=data, columns=["times", "clean_percentage", "clean_count"]),
clean_mask, data_mask, clean_data_mask)
| {
"repo_name": "ceos-seo/data_cube_utilities",
"path": "data_cube_utilities/build_cloud_coverage_table_landsat.py",
"copies": "1",
"size": "2502",
"license": "apache-2.0",
"hash": -9083289557651250000,
"line_mean": 41.406779661,
"line_max": 94,
"alpha_frac": 0.548361311,
"autogenerated": false,
"ratio": 4.226351351351352,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007413143898424899,
"num_lines": 59
} |
from functools import partial
import datetime
from insalata.Timer import Timer
import json
import threading
import os
WRITE_INTERVAL = 1
OUTFILE = "/etc/insalata/testEnv/data/changeLog.txt"
TYPE="w"
class Exporter:
def __init__(self, onNewEvent, onDeletedEvent, onChangedEvent, logger, outputDirectory):
onNewEvent.add(partial(self.onNewHandler))
onChangedEvent.add(partial(self.onChangedHandler))
onDeletedEvent.add(partial(self.onDeletedHandler))
self.buffer = list()
self.outFile = os.path.join(outputDirectory, "jsonChangeLog.txt")
self.logger = logger
self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile))
self.writer.start()
self.__stopEvent = threading.Event()
def stop(self):
self.writer.cancel()
self.__stopEvent.set()
def onNewHandler(self, sender, args):
self.logger.debug("Received onNewEvent writing to file: {0}".format(self.outFile))
timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S")
message = {"time" : timestamp,
"type" : "new",
"objectType" : args["objectType"],
"initialValues" : args["values"]}
self.buffer.append(json.dumps(message))
def onChangedHandler(self, sender, args):
self.logger.debug("Received onChangedEvent writing to file: {0}".format(self.outFile))
try:
timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S")
message = {"time" : timestamp,
"type" : "change_" + args["type"],
"objectType" : args["objectType"],
"object" : args["object"],
"value" : args["value"]}
if "member" in args:
message["member"] = args["member"]
self.buffer.append(json.dumps(message))
except KeyError as e:
print(str(args))
def onDeletedHandler(self, sender, args):
self.logger.debug("Received onDeletedEvent writing to file: {0}".format(self.outFile))
timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S")
message = {"time" : timestamp,
"type" : "delete",
"objectType" : args["objectType"],
"object" : args["object"]}
self.buffer.append(json.dumps(message))
def writeFile(self):
if len(self.buffer) > 0:
try:
with open(self.outFile, TYPE) as fileHandler:
for entry in self.buffer:
print(entry, file=fileHandler)
except:
self.logger.error("Cannot print JSON change log to file {0}.".format(self.outFile))
if not self.__stopEvent.isSet():
self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile))
self.writer.start()
| {
"repo_name": "tumi8/INSALATA",
"path": "src/insalata/export/continuous/JsonOutput.py",
"copies": "1",
"size": "2874",
"license": "apache-2.0",
"hash": 9134189402542933000,
"line_mean": 38.3698630137,
"line_max": 99,
"alpha_frac": 0.5831593598,
"autogenerated": false,
"ratio": 3.9806094182825484,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010893891415706637,
"num_lines": 73
} |
from functools import partial
import errno
import sys
from datetime import datetime
import socket
import ssl
import greenlet
import logging
from gunicorn import http, util
from gunicorn.http import wsgi
from gunicorn.http.wsgi import sendfile as o_sendfile
from gunicorn.workers import base
import guv
import guv.wsgi
from guv import hubs, greenthread, greenpool, StopServe, trampoline, gyield
from guv.greenio import socket as gsocket
from guv.support import get_errno, reraise
from guv.const import WRITE
from guv.exceptions import BROKEN_SOCK
ALREADY_HANDLED = object()
log = logging.getLogger('guv')
class AsyncWorker(base.Worker):
"""
This class is a copy of the AsyncWorker included in gunicorn, with a few minor modifications:
- Removed python 2 support
- Improved request latency for keep-alive connections by yielding after each request
- Graceful quit on ctrl-c by overriding handle_quit
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.worker_connections = self.cfg.worker_connections
def handle_quit(self, sig, frame):
"""
We override this because sys.exit() shouldn't be called. Instead, we should let the
worker gracefully quit on its own.
"""
# sys.stderr.write('handle_quit() frame: {0}, '
# '{0.f_code.co_filename}:{0.f_code.co_name}:{0.f_lineno}\n'
# .format(frame))
sys.stderr.flush()
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
# sys.exit(0)
def timeout_ctx(self):
raise NotImplementedError()
def handle(self, server_sock, client_sock, addr):
"""Handle client connection
The client may send one or more requests.
"""
req = None
try:
parser = http.RequestParser(self.cfg, client_sock)
try:
server_name = server_sock.getsockname()
if not self.cfg.keepalive:
req = next(parser)
self.handle_request(server_name, req, client_sock, addr)
else:
# keepalive loop
while True:
req = None
with self.timeout_ctx():
req = next(parser)
if not req:
break
self.handle_request(server_name, req, client_sock, addr)
gyield()
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
exc_info = sys.exc_info()
# pass to next try-except level
reraise(exc_info[0], exc_info[1], exc_info[2])
except socket.error:
exc_info = sys.exc_info()
# pass to next try-except level
reraise(exc_info[0], exc_info[1], exc_info[2])
except Exception as e:
self.handle_error(req, client_sock, addr, e)
except ssl.SSLError as e:
if get_errno(e) == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client_sock.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client_sock, addr, e)
except socket.error as e:
if get_errno(e) not in BROKEN_SOCK:
self.log.exception("Socket error processing request.")
else:
if get_errno(e) == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client_sock, addr, e)
finally:
util.close(client_sock)
def handle_request(self, listener_name, req, sock, addr):
request_start = datetime.now()
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
resp, environ = wsgi.create(req, sock, addr,
listener_name, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
if not self.cfg.keepalive:
resp.force_close()
respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
try:
if isinstance(respiter, environ['wsgi.file_wrapper']):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
except socket.error as e:
# BROKEN_SOCK not interesting here
if not get_errno(e) in BROKEN_SOCK:
raise
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
raise StopIteration()
except StopIteration:
raise
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
except socket.error:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True
def _guv_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if get_errno(e) == errno.EAGAIN:
if not isinstance(fdout, int):
fd = fdout.fileno()
else:
fd = fdout
trampoline(fd, WRITE)
else:
raise
def _guv_serve(sock, handle, concurrency):
pool = greenpool.GreenPool(concurrency)
server_gt = greenlet.getcurrent()
while True:
try:
conn, addr = sock.accept()
gt = pool.spawn(handle, conn, addr)
gt.link(_guv_stop, server_gt, conn)
conn, addr, gt = None, None, None
except StopServe:
pool.waitall()
return
def _guv_stop(client, server, conn):
"""Stop a greenlet handling a request and close its connection
This code is lifted from eventlet so as not to depend on undocumented
functions in the library.
"""
try:
try:
client.wait()
finally:
conn.close()
except greenlet.GreenletExit:
pass
except Exception:
greenthread.kill(server, *sys.exc_info())
def patch_sendfile():
from gunicorn.http import wsgi
if o_sendfile is not None:
setattr(wsgi, "sendfile", _guv_sendfile)
class GuvWorker(AsyncWorker):
def patch(self):
guv.monkey_patch(os=False)
patch_sendfile()
def init_process(self):
hubs.use_hub()
self.patch()
super().init_process()
def timeout_ctx(self):
return guv.Timeout(self.cfg.keepalive or None, False)
def handle(self, server_sock, client_sock, addr):
if self.cfg.is_ssl:
client_sock = guv.wrap_ssl(client_sock, server_side=True, **self.cfg.ssl_options)
super().handle(server_sock, client_sock, addr)
def run(self):
acceptors = []
for sock in self.sockets:
gsock = gsocket(sock.FAMILY, socket.SOCK_STREAM, fileno=sock.fileno())
gsock.setblocking(1)
hfun = partial(self.handle, gsock)
acceptor = guv.spawn(_guv_serve, gsock, hfun, self.worker_connections)
acceptors.append(acceptor)
guv.gyield()
try:
while self.alive:
self.notify()
guv.sleep(self.timeout / 2)
except (KeyboardInterrupt, SystemExit):
log.debug('KeyboardInterrupt, exiting')
self.notify()
try:
with guv.Timeout(self.cfg.graceful_timeout) as t:
for a in acceptors:
a.kill(guv.StopServe())
for a in acceptors:
a.wait()
except guv.Timeout as te:
if te != t:
raise
for a in acceptors:
a.kill()
log.debug('GuvWorker exited')
| {
"repo_name": "veegee/guv",
"path": "guv/support/gunicorn_worker.py",
"copies": "1",
"size": "9354",
"license": "mit",
"hash": -1412758236255324000,
"line_mean": 31.9366197183,
"line_max": 97,
"alpha_frac": 0.5379516784,
"autogenerated": false,
"ratio": 4.267335766423358,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5305287444823358,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.