hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7f8d6ebe30091a2ef3c3ea36b101629fb53ff29 | 14,172 | py | Python | capablerobot_camera/deserializers/max9286.py | CapableRobot/CapableRobot_Camera_Python | 3880e04e243ad21783c6a67563d83519a23d3eb4 | [
"MIT"
] | null | null | null | capablerobot_camera/deserializers/max9286.py | CapableRobot/CapableRobot_Camera_Python | 3880e04e243ad21783c6a67563d83519a23d3eb4 | [
"MIT"
] | null | null | null | capablerobot_camera/deserializers/max9286.py | CapableRobot/CapableRobot_Camera_Python | 3880e04e243ad21783c6a67563d83519a23d3eb4 | [
"MIT"
] | null | null | null | # The MIT License (MIT)
#
# Copyright (c) 2019 Chris Osterwood for Capable Robot Components
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
import logging
from adafruit_bus_device.i2c_device import I2CDevice
from ..support import MAX20087
from ..util import *
_DELAY = 0.01
_REG_LINKEN = 0x00
_REG_GPIO_CFG = 0x01
_REG_CTRLCNEN = 0x0A
_REG_SYNC = 0x0C
_REG_FLT_MON = 0x0D
_REG_ERROR_GPIO = 0x0F
_REG_CSI = 0x12
_REG_CSI_REMAP = 0x14
_REG_CSI_CHANNEL = 0x15
_REG_HIM = 0x1C
_REG_ID = 0x1E
_REG_REV = 0x1F
_REG_STAT_HLOCKED = 0x21
_REG_STAT_LINK = 0x22
_REG_STAT_PRBS_ERR = 0x23
_REG_STAT_DET_ERR = 0x28
_REG_STAT_COR_ERR = 0x2C
_REG_GPIO = 0x27
_REG_REV_FAST = 0x3F
class MAX9286:
def __init__(self, i2c_bus, addr=0x48):
"""
Construct a new MAX9286 instance
:param i2c_bus busio.I2C: I2C Bus object that communication should take place over
:param addr int: I2C Address (7 bit) of the MAX9286
"""
self.i2c_device = I2CDevice(i2c_bus, addr)
self.power = MAX20087(i2c_bus, addr=0x2c)
self._links = []
self._errors = []
## Turn off all line fault monitoring (required if resistors are not in place)
self.configure_line_fault_monitor(links=[])
def _write_register(self, reg, value):
seq = bytearray([reg, value & 0xFF])
try:
with self.i2c_device as i2c:
i2c.write(seq)
except OSError:
self._errors.append("I2C_WRITE")
return None
return True
def _read_register(self, reg):
outbuf = bytearray(1)
outbuf[0] = reg
inbuf = bytearray(1)
try:
with self.i2c_device as i2c:
i2c.write_then_readinto(outbuf, inbuf, stop=False)
except OSError:
self._errors.append("I2C_READ")
return None
if inbuf == None:
return None
return inbuf[0]
@property
def id(self):
"""
Get part SKU and silicon revision.
:return: (string, int) | Part SKU, silicon revision
"""
if self._read_register(_REG_ID) != 0x40:
raise ValueError("Device is not a MAX9286")
rev = self._read_register(_REG_REV)
return "MAX9286", rev
def poc(self, outputs, delay=0.5):
self.power.set_config(outputs=outputs)
time.sleep(delay)
def enable_links(self, links=[0], clock=0b111, internal_vsync=True):
"""
Enables one ore more GMSL links and their forward / reverse control channels.
If link 0 is enabled and this method is later called with `links=[1]`, then link 0 will become disabled.
:param links Array[int]: List of active links. Valid range 0 thru 3.
:param clock int: CSI clock source. Value of 0b1xx will enable auto-detection.
:param internal_vsync bool: When false, VSYNC comes from the camera.
True is only valid when FSYNCMODE is not set to 0b11 (e.g. external frame sync supplied by MCU).
:return: (int, int) | Contents of LINK_ENABLE and CONTROL_CHANNEL registers after writes completed.
"""
self._links = links
## Clock can be set to channel number, or 0b111 means to auto-select
## When internal_vsync = True, chip will generate VS when FSYNCMODE not set to 11
link3 = 3 in links
link2 = 2 in links
link1 = 1 in links
link0 = 0 in links
linken = clock << 5 | \
internal_vsync << 4 | \
link3 << 3 | \
link2 << 2 | \
link1 << 1 | \
link0
self._write_register(_REG_LINKEN, linken)
## Upper nibble : forward control channel from serializer (receiving)
## Lower nibble : reverse control channel to serializer (sending)
ctrlen = link3 << 7 | \
link2 << 6 | \
link1 << 5 | \
link0 << 4 | \
link3 << 3 | \
link2 << 2 | \
link1 << 1 | \
link0
self._write_register(_REG_CTRLCNEN, ctrlen)
return linken, ctrlen
def enable_link(self, link=0):
"""
Calls `enable_links(links=[link])`, waits, and checks to see if link is locked.
Method will log error messages if link does not lock within 50 ms.
:param link int: Link to enable and check for lock on.
"""
self.enable_links(self._links + [link])
## wait 10ms for link to lock
time.sleep(_DELAY)
## Check link lock and wait for it
idx = 0
while not self.locked:
time.sleep(_DELAY*5)
idx += 1
if idx > 10:
logging.warn("Link not locked")
break
@property
def locked(self):
"""
Deserializer GMSL lock state.
:return: bool | Deserializer GMSL lock state
"""
value = self._read_register(_REG_GPIO)
if value == None:
return None
return (value >> 7) == True
def configure_line_fault_monitor(self, links=[0], hsync_track=False, glitch_filter=True):
"""
Configure line fault monitor settings.
:param links Array[int]: List of links that are active and should have LMN enabled on.
:param hsync_track bool: HSYNC / line valid tracking.
:param glitch_filter bool: HSYNC & VSYNC glitch filtering. Default when BWS net is low on boot.
"""
link3 = 3 in links
link2 = 2 in links
link1 = 1 in links
link0 = 0 in links
value = link3 << 7 | \
link2 << 6 | \
link1 << 5 | \
link0 << 4 | \
hsync_track << 2 | \
glitch_filter << 1 | \
glitch_filter
## HSYNC / line valid tracking is disabled by default
## Bit 1 is for HSYNC. Default of 0 when BWS = open, 1 otherwise.
## Bit 0 is for VSYNC. Default of 0 when BWS = open, 1 otherwise.
self._write_register(_REG_FLT_MON, value)
def configure_csi(self, lanes=1, double_load=False, pixel_type="YUV422_10"):
"""
Configure CSI-2 bus parameters.
:param lanes int: Number of lanes to emit data over. Valid range is 1 thru 4.
:param double_load bool: Enable double input mode.
Single-mode operation is compatible with all GMSL devices and serializers, yielding one parallel word for each serial word. Double mode serializes two half-width parallel words for each serial word, resulting in a 2x increase in the parallel word rate range (compared to single mode)
:param pixel_type str: Imager pixel encoding.
There are 12 valid values:
- `RGB888 RGB565 RGB666`
- `YUV422_8 YUV422_10`
- `RAW8/16 RAW10/20 RAW11/12 RAW14`
- `USER24 USER12 USER8`
"""
valid_types = [
"RGB888", "RGB565", "RGB666",
"YUV422_8", "YUV422_10",
"RAW8/16", "RAW10/20", "RAW11/12", "RAW14",
"USER24", "USER12", "USER8"
]
if not pixel_type in valid_types:
raise ValueError("Unkown pixel type. Valid choices: [{}]".format(" ".join(valid_types)))
value = (lanes-1) << 6 | \
double_load << 5 | \
double_load << 4 | \
valid_types.index(pixel_type)
self._write_register(_REG_CSI, value)
def remap_csi_lanes(self, order=[0,1,2,3]):
"""
Remap physical and logical CSI-2 lanes.
This functionality allows PCB layout to be optimized by swapping lanes.
:param order [int,int,int,int]: Logical order to apply to the physical lanes.
"""
if len(order) != 4:
raise ValueError("Expect mapping for all 4 CSI lanes, found {}.".format(len(order)))
for lane in [0,1,2,3]:
if lane not in order:
raise ValueError("No assignment for lane {}".format(lane))
value = order[3] << 6 | \
order[2] << 4 | \
order[1] << 2 | \
order[0]
self._write_register(_REG_CSI_REMAP, value)
def enable_csi(self, virtual_channel=0, enable=True):
"""
Turn on the CSI-2 output bus.
:param virtual_channel int: Virtual channel to emit data on. Valid options include 'auto' and 0 thru 3.
:param enable bool: Flag to enable / disable the CSI-2 output bus.
"""
vc_type = 0
if virtual_channel == 'auto':
vc_type = 1
virtual_channel = 0
value = virtual_channel << 5 | \
vc_type << 4 | \
enable << 3 | \
0b011 ## reserved value in datasheet
## Bits 0, 1, 2, & 7 are reserved
## When vc_type == 1, virtual channel is set according to the link number
## Otherwise, channel is set via bits 5 & 6
self._write_register(_REG_CSI_CHANNEL, value)
def configure_trig(self, ext_trig=True):
if ext_trig:
logging.debug("Enabling external trigger")
self._write_register(_REG_GPIO_CFG, 226)
else:
logging.debug("Disabling external trigger")
self._write_register(_REG_GPIO_CFG, 34)
def configure_error_gpio(self, auto_error_reset=False, gpio0=True, gpio1=True):
current = self._read_register(_REG_ERROR_GPIO)
desired = current
if auto_error_reset:
desired = set_bit(desired, 5)
else:
desired = clear_bit(desired, 5)
if gpio1:
desired = set_bit(desired, 1)
else:
desired = clear_bit(desired, 1)
if gpio0:
desired = set_bit(desired, 0)
else:
desired = clear_bit(desired, 0)
if desired != current:
logging.debug("REG ERROR_GPIO change {} -> {}".format(hex(current), hex(desired)))
self._write_register(_REG_ERROR_GPIO, desired)
def enable_reverse_channel_fast_mode(self):
current = self._read_register(_REG_REV_FAST)
desired = set_bit(current, 7)
if desired != current:
logging.debug("REG REV_FAST {}".format(hex(value)))
self._write_register(_REG_REV_FAST, desired)
def disable_reverse_channel_fast_mode(self):
current = self._read_register(_REG_REV_FAST)
desired = clear_bit(current, 7)
if desired != current:
logging.debug("REG REV_FAST {}".format(hex(value)))
self._write_register(_REG_REV_FAST, desired)
def errors(self, prbs=False):
"""
Fetch any errors within various MAX9286 error registers.
:param prbs bool: Query for PRBS errors. Only valid to do during PRBS testing.
:return: Array[string] | Short, textual representation of various error states.
"""
out = []
value = self._read_register(_REG_STAT_HLOCKED)
if value != None:
for link in self._links:
if value != None and (value >> link) & 0x01:
out.append("LINK{}_LINE_TRACKING_UNLOCK".format(link))
value = self._read_register(_REG_STAT_LINK)
if value != None:
for link in self._links:
if (value >> (link + 4)) & 0x01:
out.append("LINK{}_LINE_BUFFER_OVERLFOW".format(link))
if (value >>link) & 0x01:
out.append("LINK{}_LINE_ERROR".format(link))
if prbs:
for link in self._links:
value = self._read_register(_REG_STAT_PRBS_ERR + link)
if value != None and value > 0:
out.append("LINK{}_PRBS_ERROR {}".format(link, value))
for link in self._links:
value = self._read_register(_REG_STAT_DET_ERR + link)
if value != None and value > 0:
out.append("LINK{}_DETECTED_ERRORS {}".format(link, value))
value = self._read_register(_REG_STAT_COR_ERR + link)
if value != None and value > 0:
out.append("LINK{}_CORRECTED_ERRORS {}".format(link, value))
for error in self._errors:
out.append(error)
self._errors = []
return out
def enable_him(self):
value = self._read_register(_REG_HIM)
link3 = 3 in self._links
link2 = 2 in self._links
link1 = 1 in self._links
link0 = 0 in self._links
if link0:
value = set_bit(value, 4)
if link1:
value = set_bit(value, 5)
if link2:
value = set_bit(value, 6)
if link3:
value = set_bit(value, 7)
logging.info("REG HIM {}".format(hex(value)))
self._write_register(_REG_HIM, value)
| 33.034965 | 292 | 0.58411 |
import time
import logging
from adafruit_bus_device.i2c_device import I2CDevice
from ..support import MAX20087
from ..util import *
_DELAY = 0.01
_REG_LINKEN = 0x00
_REG_GPIO_CFG = 0x01
_REG_CTRLCNEN = 0x0A
_REG_SYNC = 0x0C
_REG_FLT_MON = 0x0D
_REG_ERROR_GPIO = 0x0F
_REG_CSI = 0x12
_REG_CSI_REMAP = 0x14
_REG_CSI_CHANNEL = 0x15
_REG_HIM = 0x1C
_REG_ID = 0x1E
_REG_REV = 0x1F
_REG_STAT_HLOCKED = 0x21
_REG_STAT_LINK = 0x22
_REG_STAT_PRBS_ERR = 0x23
_REG_STAT_DET_ERR = 0x28
_REG_STAT_COR_ERR = 0x2C
_REG_GPIO = 0x27
_REG_REV_FAST = 0x3F
class MAX9286:
def __init__(self, i2c_bus, addr=0x48):
self.i2c_device = I2CDevice(i2c_bus, addr)
self.power = MAX20087(i2c_bus, addr=0x2c)
self._links = []
self._errors = []
(self, reg, value):
seq = bytearray([reg, value & 0xFF])
try:
with self.i2c_device as i2c:
i2c.write(seq)
except OSError:
self._errors.append("I2C_WRITE")
return None
return True
def _read_register(self, reg):
outbuf = bytearray(1)
outbuf[0] = reg
inbuf = bytearray(1)
try:
with self.i2c_device as i2c:
i2c.write_then_readinto(outbuf, inbuf, stop=False)
except OSError:
self._errors.append("I2C_READ")
return None
if inbuf == None:
return None
return inbuf[0]
@property
def id(self):
if self._read_register(_REG_ID) != 0x40:
raise ValueError("Device is not a MAX9286")
rev = self._read_register(_REG_REV)
return "MAX9286", rev
def poc(self, outputs, delay=0.5):
self.power.set_config(outputs=outputs)
time.sleep(delay)
def enable_links(self, links=[0], clock=0b111, internal_vsync=True):
self._links = links
| \
internal_vsync << 4 | \
link3 << 3 | \
link2 << 2 | \
link1 << 1 | \
link0
self._write_register(_REG_LINKEN, linken)
4 | \
link3 << 3 | \
link2 << 2 | \
link1 << 1 | \
link0
self._write_register(_REG_CTRLCNEN, ctrlen)
return linken, ctrlen
def enable_link(self, link=0):
self.enable_links(self._links + [link])
t self.locked:
time.sleep(_DELAY*5)
idx += 1
if idx > 10:
logging.warn("Link not locked")
break
@property
def locked(self):
value = self._read_register(_REG_GPIO)
if value == None:
return None
return (value >> 7) == True
def configure_line_fault_monitor(self, links=[0], hsync_track=False, glitch_filter=True):
link3 = 3 in links
link2 = 2 in links
link1 = 1 in links
link0 = 0 in links
value = link3 << 7 | \
link2 << 6 | \
link1 << 5 | \
link0 << 4 | \
hsync_track << 2 | \
glitch_filter << 1 | \
glitch_filter
= [
"RGB888", "RGB565", "RGB666",
"YUV422_8", "YUV422_10",
"RAW8/16", "RAW10/20", "RAW11/12", "RAW14",
"USER24", "USER12", "USER8"
]
if not pixel_type in valid_types:
raise ValueError("Unkown pixel type. Valid choices: [{}]".format(" ".join(valid_types)))
value = (lanes-1) << 6 | \
double_load << 5 | \
double_load << 4 | \
valid_types.index(pixel_type)
self._write_register(_REG_CSI, value)
def remap_csi_lanes(self, order=[0,1,2,3]):
if len(order) != 4:
raise ValueError("Expect mapping for all 4 CSI lanes, found {}.".format(len(order)))
for lane in [0,1,2,3]:
if lane not in order:
raise ValueError("No assignment for lane {}".format(lane))
value = order[3] << 6 | \
order[2] << 4 | \
order[1] << 2 | \
order[0]
self._write_register(_REG_CSI_REMAP, value)
def enable_csi(self, virtual_channel=0, enable=True):
vc_type = 0
if virtual_channel == 'auto':
vc_type = 1
virtual_channel = 0
value = virtual_channel << 5 | \
vc_type << 4 | \
enable << 3 | \
0b011 g("Enabling external trigger")
self._write_register(_REG_GPIO_CFG, 226)
else:
logging.debug("Disabling external trigger")
self._write_register(_REG_GPIO_CFG, 34)
def configure_error_gpio(self, auto_error_reset=False, gpio0=True, gpio1=True):
current = self._read_register(_REG_ERROR_GPIO)
desired = current
if auto_error_reset:
desired = set_bit(desired, 5)
else:
desired = clear_bit(desired, 5)
if gpio1:
desired = set_bit(desired, 1)
else:
desired = clear_bit(desired, 1)
if gpio0:
desired = set_bit(desired, 0)
else:
desired = clear_bit(desired, 0)
if desired != current:
logging.debug("REG ERROR_GPIO change {} -> {}".format(hex(current), hex(desired)))
self._write_register(_REG_ERROR_GPIO, desired)
def enable_reverse_channel_fast_mode(self):
current = self._read_register(_REG_REV_FAST)
desired = set_bit(current, 7)
if desired != current:
logging.debug("REG REV_FAST {}".format(hex(value)))
self._write_register(_REG_REV_FAST, desired)
def disable_reverse_channel_fast_mode(self):
current = self._read_register(_REG_REV_FAST)
desired = clear_bit(current, 7)
if desired != current:
logging.debug("REG REV_FAST {}".format(hex(value)))
self._write_register(_REG_REV_FAST, desired)
def errors(self, prbs=False):
out = []
value = self._read_register(_REG_STAT_HLOCKED)
if value != None:
for link in self._links:
if value != None and (value >> link) & 0x01:
out.append("LINK{}_LINE_TRACKING_UNLOCK".format(link))
value = self._read_register(_REG_STAT_LINK)
if value != None:
for link in self._links:
if (value >> (link + 4)) & 0x01:
out.append("LINK{}_LINE_BUFFER_OVERLFOW".format(link))
if (value >>link) & 0x01:
out.append("LINK{}_LINE_ERROR".format(link))
if prbs:
for link in self._links:
value = self._read_register(_REG_STAT_PRBS_ERR + link)
if value != None and value > 0:
out.append("LINK{}_PRBS_ERROR {}".format(link, value))
for link in self._links:
value = self._read_register(_REG_STAT_DET_ERR + link)
if value != None and value > 0:
out.append("LINK{}_DETECTED_ERRORS {}".format(link, value))
value = self._read_register(_REG_STAT_COR_ERR + link)
if value != None and value > 0:
out.append("LINK{}_CORRECTED_ERRORS {}".format(link, value))
for error in self._errors:
out.append(error)
self._errors = []
return out
def enable_him(self):
value = self._read_register(_REG_HIM)
link3 = 3 in self._links
link2 = 2 in self._links
link1 = 1 in self._links
link0 = 0 in self._links
if link0:
value = set_bit(value, 4)
if link1:
value = set_bit(value, 5)
if link2:
value = set_bit(value, 6)
if link3:
value = set_bit(value, 7)
logging.info("REG HIM {}".format(hex(value)))
self._write_register(_REG_HIM, value)
| true | true |
f7f8d7f86f7bfd88c08580e4fd8235435dcc681c | 516 | py | Python | tests/test_render.py | Bezier89/conda-build | 95a118f8f06230120514fe0066a52a152ec2349b | [
"BSD-3-Clause"
] | null | null | null | tests/test_render.py | Bezier89/conda-build | 95a118f8f06230120514fe0066a52a152ec2349b | [
"BSD-3-Clause"
] | null | null | null | tests/test_render.py | Bezier89/conda-build | 95a118f8f06230120514fe0066a52a152ec2349b | [
"BSD-3-Clause"
] | null | null | null | import os
from conda_build import api
def test_output_with_noarch_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch'] = 'python'
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_output_with_noarch_python_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch_python'] = True
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
| 34.4 | 65 | 0.763566 | import os
from conda_build import api
def test_output_with_noarch_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch'] = 'python'
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_output_with_noarch_python_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch_python'] = True
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
| true | true |
f7f8d874c7f1ef1a0ee56b54a7dc60dd2fd12539 | 3,380 | py | Python | src/alembic_utils/pg_view.py | gyana/alembic_utils | a4bc7f5f025335faad7b178eb84ab78093e525ec | [
"MIT"
] | null | null | null | src/alembic_utils/pg_view.py | gyana/alembic_utils | a4bc7f5f025335faad7b178eb84ab78093e525ec | [
"MIT"
] | null | null | null | src/alembic_utils/pg_view.py | gyana/alembic_utils | a4bc7f5f025335faad7b178eb84ab78093e525ec | [
"MIT"
] | null | null | null | # pylint: disable=unused-argument,invalid-name,line-too-long
from __future__ import annotations
from typing import List
from parse import parse
from sqlalchemy import text as sql_text
from alembic_utils.exceptions import SQLParseFailure
from alembic_utils.replaceable_entity import ReplaceableEntity
class PGView(ReplaceableEntity):
"""A PostgreSQL View compatible with `alembic revision --autogenerate`
**Parameters:**
* **schema** - *str*: A SQL schema name
* **signature** - *str*: A SQL view's call signature
* **definition** - *str*: The SQL select statement body of the view
"""
@classmethod
def from_sql(cls, sql: str) -> PGView:
"""Create an instance from a SQL string"""
template = "create{}view{:s}{schema}.{signature}{:s}as{:s}{definition}"
result = parse(template, sql, case_sensitive=False)
if result is not None:
# If the signature includes column e.g. my_view (col1, col2, col3) remove them
signature = result["signature"].split("(")[0]
return cls(
schema=result["schema"],
# strip quote characters
signature=signature.replace('"', ""),
definition=result["definition"],
)
raise SQLParseFailure(f'Failed to parse SQL into PGView """{sql}"""')
def to_sql_statement_create(self) -> str:
"""Generates a SQL "create view" statement"""
return sql_text(
f'CREATE VIEW {self.literal_schema}."{self.signature}" AS {self.definition}'
)
def to_sql_statement_drop(self) -> str:
"""Generates a SQL "drop view" statement"""
return sql_text(f'DROP VIEW {self.literal_schema}."{self.signature}"')
def to_sql_statement_create_or_replace(self) -> str:
"""Generates a SQL "create or replace view" statement"""
return sql_text(
f'CREATE OR REPLACE VIEW {self.literal_schema}."{self.signature}" AS {self.definition}'
)
@classmethod
def from_database(cls, connection, schema) -> List[PGView]:
"""Get a list of all functions defined in the db"""
sql = sql_text(
f"""
select
schemaname schema_name,
viewname view_name,
definition
from
pg_views
where
schemaname not in ('pg_catalog', 'information_schema')
and schemaname::text = '{schema}';
"""
)
rows = connection.execute(sql).fetchall()
db_views = [PGView(x[0], x[1], x[2]) for x in rows]
for view in db_views:
assert view is not None
return db_views
def get_compare_identity_query(self) -> str:
"""Return SQL string that returns 1 row for existing DB object"""
return f"""
select
-- Schema is appended in python
viewname view_name
from
pg_views
where
schemaname::text = '{self.schema}';
"""
def get_compare_definition_query(self) -> str:
"""Return SQL string that returns 1 row for existing DB object"""
return f"""
select
-- Schema is appended in python
viewname view_name,
definition
from
pg_views
where
schemaname::text = '{self.schema}';
"""
| 32.5 | 99 | 0.59497 |
from __future__ import annotations
from typing import List
from parse import parse
from sqlalchemy import text as sql_text
from alembic_utils.exceptions import SQLParseFailure
from alembic_utils.replaceable_entity import ReplaceableEntity
class PGView(ReplaceableEntity):
@classmethod
def from_sql(cls, sql: str) -> PGView:
template = "create{}view{:s}{schema}.{signature}{:s}as{:s}{definition}"
result = parse(template, sql, case_sensitive=False)
if result is not None:
signature = result["signature"].split("(")[0]
return cls(
schema=result["schema"],
signature=signature.replace('"', ""),
definition=result["definition"],
)
raise SQLParseFailure(f'Failed to parse SQL into PGView """{sql}"""')
def to_sql_statement_create(self) -> str:
return sql_text(
f'CREATE VIEW {self.literal_schema}."{self.signature}" AS {self.definition}'
)
def to_sql_statement_drop(self) -> str:
return sql_text(f'DROP VIEW {self.literal_schema}."{self.signature}"')
def to_sql_statement_create_or_replace(self) -> str:
return sql_text(
f'CREATE OR REPLACE VIEW {self.literal_schema}."{self.signature}" AS {self.definition}'
)
@classmethod
def from_database(cls, connection, schema) -> List[PGView]:
sql = sql_text(
f"""
select
schemaname schema_name,
viewname view_name,
definition
from
pg_views
where
schemaname not in ('pg_catalog', 'information_schema')
and schemaname::text = '{schema}';
"""
)
rows = connection.execute(sql).fetchall()
db_views = [PGView(x[0], x[1], x[2]) for x in rows]
for view in db_views:
assert view is not None
return db_views
def get_compare_identity_query(self) -> str:
return f"""
select
-- Schema is appended in python
viewname view_name
from
pg_views
where
schemaname::text = '{self.schema}';
"""
def get_compare_definition_query(self) -> str:
return f"""
select
-- Schema is appended in python
viewname view_name,
definition
from
pg_views
where
schemaname::text = '{self.schema}';
"""
| true | true |
f7f8d877672e9bc13db5730cbd60b24743cfcd47 | 1,214 | py | Python | test/unit/test_column.py | mv1742/dbt-spark | a8a85c54d10920af1c5efcbb4d2a51eb7cfcad11 | [
"Apache-2.0"
] | 92 | 2019-03-23T07:23:55.000Z | 2021-06-15T18:18:32.000Z | test/unit/test_column.py | mv1742/dbt-spark | a8a85c54d10920af1c5efcbb4d2a51eb7cfcad11 | [
"Apache-2.0"
] | 156 | 2019-03-21T03:26:58.000Z | 2021-06-29T15:30:51.000Z | test/unit/test_column.py | mv1742/dbt-spark | a8a85c54d10920af1c5efcbb4d2a51eb7cfcad11 | [
"Apache-2.0"
] | 58 | 2019-04-12T09:09:43.000Z | 2021-06-24T15:25:11.000Z | import unittest
from dbt.adapters.spark import SparkColumn
class TestSparkColumn(unittest.TestCase):
def test_convert_table_stats_with_no_statistics(self):
self.assertDictEqual(
SparkColumn.convert_table_stats(None),
{}
)
def test_convert_table_stats_with_bytes(self):
self.assertDictEqual(
SparkColumn.convert_table_stats("123456789 bytes"),
{
'stats:bytes:description': '',
'stats:bytes:include': True,
'stats:bytes:label': 'bytes',
'stats:bytes:value': 123456789
}
)
def test_convert_table_stats_with_bytes_and_rows(self):
self.assertDictEqual(
SparkColumn.convert_table_stats("1234567890 bytes, 12345678 rows"),
{
'stats:bytes:description': '',
'stats:bytes:include': True,
'stats:bytes:label': 'bytes',
'stats:bytes:value': 1234567890,
'stats:rows:description': '',
'stats:rows:include': True,
'stats:rows:label': 'rows',
'stats:rows:value': 12345678
}
)
| 31.128205 | 79 | 0.556013 | import unittest
from dbt.adapters.spark import SparkColumn
class TestSparkColumn(unittest.TestCase):
def test_convert_table_stats_with_no_statistics(self):
self.assertDictEqual(
SparkColumn.convert_table_stats(None),
{}
)
def test_convert_table_stats_with_bytes(self):
self.assertDictEqual(
SparkColumn.convert_table_stats("123456789 bytes"),
{
'stats:bytes:description': '',
'stats:bytes:include': True,
'stats:bytes:label': 'bytes',
'stats:bytes:value': 123456789
}
)
def test_convert_table_stats_with_bytes_and_rows(self):
self.assertDictEqual(
SparkColumn.convert_table_stats("1234567890 bytes, 12345678 rows"),
{
'stats:bytes:description': '',
'stats:bytes:include': True,
'stats:bytes:label': 'bytes',
'stats:bytes:value': 1234567890,
'stats:rows:description': '',
'stats:rows:include': True,
'stats:rows:label': 'rows',
'stats:rows:value': 12345678
}
)
| true | true |
f7f8d8d82dec9b4a1cc6352811a0559852bc9b6d | 3,250 | py | Python | test/integration/test_attendance.py | PatrickLaban/cfapi | aeecf4a034a9dda3dd033f241f7c37e52a8c02c9 | [
"MIT"
] | 1 | 2021-09-05T17:21:49.000Z | 2021-09-05T17:21:49.000Z | test/integration/test_attendance.py | PatrickLaban/cfapi | aeecf4a034a9dda3dd033f241f7c37e52a8c02c9 | [
"MIT"
] | null | null | null | test/integration/test_attendance.py | PatrickLaban/cfapi | aeecf4a034a9dda3dd033f241f7c37e52a8c02c9 | [
"MIT"
] | null | null | null | import json
from test.factories import AttendanceFactory, OrganizationFactory
from test.harness import IntegrationTest
from app import db, Attendance
class TestAttendance(IntegrationTest):
def test_attendance(self):
cfsf = OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
cfsf_att = AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
oakland = OrganizationFactory(name="Open Oakland")
url = "https://www.codeforamerica.org/api/organizations/Open-Oakland"
oakland_att = AttendanceFactory(organization_name="Open Oakland", organization_url=url)
db.session.commit()
response = self.app.get('/api/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, dict)
self.assertTrue("total" in response.keys())
self.assertTrue("weekly" in response.keys())
# Check amounts
attendance = Attendance.query.all()
total = 0
weekly = {}
for att in attendance:
total += att.total
for week in att.weekly.keys():
if week in weekly.keys():
weekly[week] += att.weekly[week]
else:
weekly[week] = att.weekly[week]
self.assertEqual(response["total"],total)
self.assertEqual(response["weekly"],weekly)
def test_orgs_attendance(self):
OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
OrganizationFactory(name="Open Oakland")
url = "https://www.codeforamerica.org/api/organizations/Open-Oakland"
AttendanceFactory(organization_name="Open Oakland", organization_url=url)
db.session.commit()
response = self.app.get('/api/organizations/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, list)
self.assertTrue("organization_name" in response[0].keys())
self.assertTrue("cfapi_url" in response[0].keys())
self.assertTrue("total" in response[0].keys())
self.assertTrue("weekly" in response[0].keys())
def test_org_attendance(self):
OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
db.session.commit()
response = self.app.get('/api/organizations/Code-for-San-Francisco/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, dict)
self.assertTrue("organization_name" in response.keys())
self.assertTrue("cfapi_url" in response.keys())
self.assertTrue("total" in response.keys())
self.assertTrue("weekly" in response.keys())
| 43.333333 | 102 | 0.675692 | import json
from test.factories import AttendanceFactory, OrganizationFactory
from test.harness import IntegrationTest
from app import db, Attendance
class TestAttendance(IntegrationTest):
def test_attendance(self):
cfsf = OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
cfsf_att = AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
oakland = OrganizationFactory(name="Open Oakland")
url = "https://www.codeforamerica.org/api/organizations/Open-Oakland"
oakland_att = AttendanceFactory(organization_name="Open Oakland", organization_url=url)
db.session.commit()
response = self.app.get('/api/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, dict)
self.assertTrue("total" in response.keys())
self.assertTrue("weekly" in response.keys())
attendance = Attendance.query.all()
total = 0
weekly = {}
for att in attendance:
total += att.total
for week in att.weekly.keys():
if week in weekly.keys():
weekly[week] += att.weekly[week]
else:
weekly[week] = att.weekly[week]
self.assertEqual(response["total"],total)
self.assertEqual(response["weekly"],weekly)
def test_orgs_attendance(self):
OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
OrganizationFactory(name="Open Oakland")
url = "https://www.codeforamerica.org/api/organizations/Open-Oakland"
AttendanceFactory(organization_name="Open Oakland", organization_url=url)
db.session.commit()
response = self.app.get('/api/organizations/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, list)
self.assertTrue("organization_name" in response[0].keys())
self.assertTrue("cfapi_url" in response[0].keys())
self.assertTrue("total" in response[0].keys())
self.assertTrue("weekly" in response[0].keys())
def test_org_attendance(self):
OrganizationFactory(name="Code for San Francisco")
url = "https://www.codeforamerica.org/api/organizations/Code-for-San-Francisco"
AttendanceFactory(organization_name="Code for San Francisco", organization_url=url)
db.session.commit()
response = self.app.get('/api/organizations/Code-for-San-Francisco/attendance')
self.assertEquals(response.status_code, 200)
response = json.loads(response.data)
self.assertIsInstance(response, dict)
self.assertTrue("organization_name" in response.keys())
self.assertTrue("cfapi_url" in response.keys())
self.assertTrue("total" in response.keys())
self.assertTrue("weekly" in response.keys())
| true | true |
f7f8d9686f7b6252a64bd3c45d4c9fbc91c7ffad | 6,238 | py | Python | src/concurrency/managers.py | GStepien/CSToolkit | 7a6872356f71843816a53c42b9abf5ae1ed97adf | [
"BSD-3-Clause"
] | null | null | null | src/concurrency/managers.py | GStepien/CSToolkit | 7a6872356f71843816a53c42b9abf5ae1ed97adf | [
"BSD-3-Clause"
] | null | null | null | src/concurrency/managers.py | GStepien/CSToolkit | 7a6872356f71843816a53c42b9abf5ae1ed97adf | [
"BSD-3-Clause"
] | null | null | null | """
Note: Regular Managers cannot be pickled. This module adds support for picklable
managers so that subprocesses can create data structures managed by a common parent manager.
"""
from __future__ import annotations
import abc
from typing import Any, Optional, final, Callable, Tuple, Final, Protocol, runtime_checkable
import multiprocessing.managers as mp_mngr
import utils.types.containers as con
from concurrency import cs
class Im:
class PicklableSyncManagedMixin(cs.Im.HasCSLMixin):
__f_picklable_sync_manager: Final[Optional[Im.PicklableSyncManager]]
def __init__(self,
*args: Any,
csl: cs.En.CSL,
picklable_sync_manager: Optional[Im.PicklableSyncManager],
**kwargs: Any):
if (csl >= cs.En.CSL.MULTI_PROCESS) != (picklable_sync_manager is not None):
raise ValueError(f"A {Im.PicklableSyncManager.__name__} must be provided "
f"if and only if csl >= {cs.En.CSL.MULTI_PROCESS.name}.")
self.__f_picklable_sync_manager = picklable_sync_manager
super().__init__(*args,
csl=csl,
**kwargs)
@final
def _c_get_manager(self) -> Optional[Im.PicklableSyncManager]:
return self.__f_picklable_sync_manager
class PicklableSyncManager(mp_mngr.SyncManager):
__f_address: Optional[Any]
__f_is_proxy: Optional[bool]
__f_was_pickled: bool
def __init__(self,
address: Optional[Any] = None,
authkey: Optional[bytes] = None):
if authkey is not None:
raise NotImplementedError(f"'authkey' support not (yet) implemented for "
f"{Im.PicklableSyncManager.__name__}.")
super().__init__(address=address,
authkey=authkey)
self.__c_init(address=address)
@final
def __c_init(self,
address: Optional[Any]) -> None:
self.__f_address = address
self.__f_is_proxy = None
self.__f_was_pickled = False
@final
@property
def is_proxy(self) -> Optional[bool]:
"""
None = Not decided yet (neither start() nor connect() called yet), False = Is server, True = Is Proxy
"""
return self.__f_is_proxy
@final
@property
def init_address(self) -> Optional[Any]:
return self.__f_address
def connect(self) -> None:
if self.__f_address is None:
raise ValueError(f"Cannot connect to 'None' address.")
super().connect()
self.__f_is_proxy = True
def start(self,
initializer: Optional[Callable[..., Any]] = None,
initargs: con.Pr.Iterable[Any] = ()) -> None:
if self.__f_was_pickled:
raise ValueError(f"Cannot start a manager that has been pickled and unpickled before. "
f"Start original manger instance and use this instance to connect to it.")
super().start(initializer=initializer,
initargs=initargs)
self.__f_is_proxy = False
def __getstate__(self) -> Tuple[Any, ...]:
address: Any
if self.__f_is_proxy is True:
assert self.__f_address is not None and self.__f_address == self.address
# If this is a proxy, it must be connected to some BaseManager Server (i.e., an address must have
# been provided in the first place),
# get address of this server
address = self.__f_address
elif self.__f_is_proxy is False:
address = self.address
assert address is not None and (self.__f_address is None or self.__f_address == address)
# If this is a server, it must have an address on which it is listening (either a provided one
# or an address chosen by the server if no address was provided in the first place)
# get address of this server
else:
assert self.__f_is_proxy is None and self.__f_address == self.address
# The above must be True if provided address is None (server not started yet -> has no address yet)
# as well as if an address was provided.
# This instance has neither been started nor connected. Pickling only makes
# sense if we have an address to connect to after unpickling (otherwise we have two independent
# manager server instances):
if self.__f_address is None:
raise ValueError(f"Cannot pickle manager instance if no server address is known (yet). "
f"Start this instance and retry.")
else:
address = self.__f_address
assert address is not None
return (address,
self.__f_is_proxy)
def __setstate__(self, state: Tuple[Any, ...]) -> None:
assert (isinstance(state, tuple)
and len(state) == 2
and state[0] is not None
and (state[1] is None or isinstance(state[1], bool)))
address: Final[Any] = state[0]
is_proxy: Final[Optional[bool]] = state[1]
super().__init__(address=address,
authkey=None)
self.__c_init(address=address)
assert self.__f_address == address and self.__f_is_proxy is None and not self.__f_was_pickled
self.__f_was_pickled = True
if isinstance(is_proxy, bool):
# Connect to the address of the other manager instance (independent whether
# the other one is itself a proxy or a server instance).
self.__f_is_proxy = True
self.connect()
else:
assert is_proxy is None
# Nothing else to do here
| 42.148649 | 115 | 0.569894 |
from __future__ import annotations
import abc
from typing import Any, Optional, final, Callable, Tuple, Final, Protocol, runtime_checkable
import multiprocessing.managers as mp_mngr
import utils.types.containers as con
from concurrency import cs
class Im:
class PicklableSyncManagedMixin(cs.Im.HasCSLMixin):
__f_picklable_sync_manager: Final[Optional[Im.PicklableSyncManager]]
def __init__(self,
*args: Any,
csl: cs.En.CSL,
picklable_sync_manager: Optional[Im.PicklableSyncManager],
**kwargs: Any):
if (csl >= cs.En.CSL.MULTI_PROCESS) != (picklable_sync_manager is not None):
raise ValueError(f"A {Im.PicklableSyncManager.__name__} must be provided "
f"if and only if csl >= {cs.En.CSL.MULTI_PROCESS.name}.")
self.__f_picklable_sync_manager = picklable_sync_manager
super().__init__(*args,
csl=csl,
**kwargs)
@final
def _c_get_manager(self) -> Optional[Im.PicklableSyncManager]:
return self.__f_picklable_sync_manager
class PicklableSyncManager(mp_mngr.SyncManager):
__f_address: Optional[Any]
__f_is_proxy: Optional[bool]
__f_was_pickled: bool
def __init__(self,
address: Optional[Any] = None,
authkey: Optional[bytes] = None):
if authkey is not None:
raise NotImplementedError(f"'authkey' support not (yet) implemented for "
f"{Im.PicklableSyncManager.__name__}.")
super().__init__(address=address,
authkey=authkey)
self.__c_init(address=address)
@final
def __c_init(self,
address: Optional[Any]) -> None:
self.__f_address = address
self.__f_is_proxy = None
self.__f_was_pickled = False
@final
@property
def is_proxy(self) -> Optional[bool]:
return self.__f_is_proxy
@final
@property
def init_address(self) -> Optional[Any]:
return self.__f_address
def connect(self) -> None:
if self.__f_address is None:
raise ValueError(f"Cannot connect to 'None' address.")
super().connect()
self.__f_is_proxy = True
def start(self,
initializer: Optional[Callable[..., Any]] = None,
initargs: con.Pr.Iterable[Any] = ()) -> None:
if self.__f_was_pickled:
raise ValueError(f"Cannot start a manager that has been pickled and unpickled before. "
f"Start original manger instance and use this instance to connect to it.")
super().start(initializer=initializer,
initargs=initargs)
self.__f_is_proxy = False
def __getstate__(self) -> Tuple[Any, ...]:
address: Any
if self.__f_is_proxy is True:
assert self.__f_address is not None and self.__f_address == self.address
address = self.__f_address
elif self.__f_is_proxy is False:
address = self.address
assert address is not None and (self.__f_address is None or self.__f_address == address)
else:
assert self.__f_is_proxy is None and self.__f_address == self.address
if self.__f_address is None:
raise ValueError(f"Cannot pickle manager instance if no server address is known (yet). "
f"Start this instance and retry.")
else:
address = self.__f_address
assert address is not None
return (address,
self.__f_is_proxy)
def __setstate__(self, state: Tuple[Any, ...]) -> None:
assert (isinstance(state, tuple)
and len(state) == 2
and state[0] is not None
and (state[1] is None or isinstance(state[1], bool)))
address: Final[Any] = state[0]
is_proxy: Final[Optional[bool]] = state[1]
super().__init__(address=address,
authkey=None)
self.__c_init(address=address)
assert self.__f_address == address and self.__f_is_proxy is None and not self.__f_was_pickled
self.__f_was_pickled = True
if isinstance(is_proxy, bool):
self.__f_is_proxy = True
self.connect()
else:
assert is_proxy is None
| true | true |
f7f8d9ca1460dacb3395d80fc8f87522919650b4 | 7,565 | py | Python | main/handler.py | t24kc/raspberry-pi | 505a504019165fb4a66c13bc27eff443bc083b1d | [
"MIT"
] | 1 | 2019-10-05T21:17:23.000Z | 2019-10-05T21:17:23.000Z | main/handler.py | t24kc/raspberry-pi | 505a504019165fb4a66c13bc27eff443bc083b1d | [
"MIT"
] | 1 | 2021-02-08T21:01:02.000Z | 2021-02-08T21:01:02.000Z | main/handler.py | t24kc/raspberry-pi | 505a504019165fb4a66c13bc27eff443bc083b1d | [
"MIT"
] | null | null | null | from datetime import datetime
from time import sleep
from lib.mail import Mail
from lib.spread_sheet import SpreadSheet
from sensor.SHT31 import SHT31
from sensor.BH1750FVI import BH1750FVI
from sensor.VL6180 import VL6180X
from sensor.CO2MINI import CO2MINI
from sensor.relay_module import RelayModule
import matplotlib.pyplot as plt
import schedule
import yaml
DEFAULT_COLUMNS = [
"Time",
"Distance(mm)",
"Light(lux)",
"Light(klux/h)",
"Temperature(C)",
"Humidity(%)",
"CO2(ppm)",
"WaterFlag",
]
DEFAULT_DATA_IMAGE_PATH = "data/figure.png"
class Scheduler(object):
def __init__(self, config):
self.params = {
"distance": None,
"light": None,
"light_klux": None,
"light_total": 0,
"temperature": None,
"humidity": None,
"co2": None,
"alert_remaining": None,
}
self._config = config
self._full_alert_remaining()
self._mail_client = Mail(
self._config["google"]["credentials_path"],
self._config["google"]["token_path"],
)
self._spread_sheet_client = SpreadSheet(
self._config["google"]["service_account_path"],
self._config["google"]["spread_sheet_id"],
)
if not self._spread_sheet_client.get_label_value("A1"):
self._spread_sheet_client.append_row(DEFAULT_COLUMNS)
self._vl6180x_sensor = VL6180X()
self._bh1750fvi_sensor = BH1750FVI()
self._sht31_sensor = SHT31()
self._relay_module = RelayModule()
self._co2mini_sensor = CO2MINI()
def monitoring_job(self):
self._fetch_params()
self._logging_params()
self._alert_params()
if self._is_water_flag():
self.turn_on_water()
def mail_job(self):
dframe = self._spread_sheet_client.get_dataframe(diff_days=7)
kwargs = {"kind": "line", "use_index": True, "rot": 45}
setting_list = [
{"title": "Light(lux)", "x": "Time", "y": "Light(lux)"},
{"title": "CO2(ppm)", "x": "Time", "y": "CO2(ppm)"},
{"title": "Temperature(C)", "x": "Time", "y": "Temperature(C)"},
{"title": "Humidity(%)", "x": "Time", "y": "Humidity(%)"},
]
fig, axes = plt.subplots(
ncols=2, nrows=2, figsize=(20, 15), sharex="col")
for ax, setting in zip(axes.ravel(), setting_list):
dframe.plot(
setting["x"], setting["y"], ax=ax, **kwargs, title=setting["title"]
)
plt.savefig(DEFAULT_DATA_IMAGE_PATH)
self._send_mail(
self._config["mail"]["summary"]["subject"],
self._config["mail"]["summary"]["body"],
DEFAULT_DATA_IMAGE_PATH,
)
def _fetch_params(self):
light = self._bh1750fvi_sensor.get_light()
light_klux = (
light *
self._config["scheduler"]["monitoring_interval_minutes"] / 60000
)
self._co2mini_sensor.read_data()
self.params.update(
{
"distance": self._vl6180x_sensor.get_distance(),
"light": light,
"light_klux": light_klux,
"temperature": self._sht31_sensor.get_temperature(),
"humidity": self._sht31_sensor.get_humidity(),
"co2": self._co2mini_sensor.get_co2(),
}
)
self.params["light_total"] += light_klux
def _logging_params(self):
current_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
values = [
current_datetime,
round(self.params["distance"], 1),
round(self.params["light"], 1),
round(self.params["light_klux"], 1),
round(self.params["temperature"], 1),
round(self.params["humidity"], 1),
round(self.params["co2"], 1),
int(self._is_water_flag()),
]
self._spread_sheet_client.append_row(values)
print(values)
def _alert_params(self):
if not self._is_alert_flag():
self._full_alert_remaining()
return
self.params["alert_remaining"] -= 1
if self.params["alert_remaining"] > 0:
return
body = ""
if self._is_temperature_upper_limit():
body = self._config["mail"]["alert"]["body"]["temperature_upper"].format(
self.params["temperature"]
)
elif self._is_temperature_lower_limit():
body = self._config["mail"]["alert"]["body"]["temperature_lower"].format(
self.params["temperature"]
)
elif self._is_co2_upper_limit():
body = self._config["mail"]["alert"]["body"]["co2_upper"].format(
self.params["co2"]
)
elif self._is_co2_lower_limit():
body = self._config["mail"]["alert"]["body"]["co2_lower"].format(
self.params["co2"]
)
self._send_mail(self._config["mail"]["alert"]["subject"], body)
self._full_alert_remaining()
def _send_mail(self, subject, body, image_file=None):
if image_file:
message = self._mail_client.create_message_with_image(
self._config["mail"]["to_address"], subject, body, image_file
)
else:
message = self._mail_client.create_message(
self._config["mail"]["to_address"], subject, body
)
self._mail_client.send_message(message)
def _full_alert_remaining(self):
self.params["alert_remaining"] = self._config["alert"]["consecutive_time"]
def _is_alert_flag(self):
return (
self._is_temperature_upper_limit()
or self._is_temperature_lower_limit()
or self._is_co2_upper_limit()
or self._is_co2_lower_limit()
)
def _is_temperature_upper_limit(self):
return (
self._config["alert"]["temperature_upper_limit"]
< self.params["temperature"]
)
def _is_temperature_lower_limit(self):
return (
self.params["temperature"]
< self._config["alert"]["temperature_lower_limit"]
)
def _is_co2_upper_limit(self):
return self._config["alert"]["co2_upper_limit"] < self.params["co2"]
def _is_co2_lower_limit(self):
return self.params["co2"] < self._config["alert"]["co2_lower_limit"]
def _is_water_flag(self):
return (
self.params["light_total"] > self._config["sensor"]["solar_radiation_limit"]
)
def turn_on_water(self):
self.params["light_total"] = 0
self._relay_module.setup()
self._relay_module.turn_on_water(
self._config["sensor"]["water_turn_on_time"])
def turn_off_water(self):
self._relay_module.turn_off_water()
def cleanup(self):
self._relay_module.cleanup()
def main():
with open("config.yaml") as file:
config = yaml.full_load(file)
scheduler = Scheduler(config)
schedule.every(config["scheduler"]["monitoring_interval_minutes"]).minutes.do(
scheduler.monitoring_job
)
schedule.every(config["scheduler"]["summary_mail_interval_days"]).days.do(
scheduler.mail_job
)
try:
while True:
schedule.run_pending()
sleep(1)
except KeyboardInterrupt:
scheduler.cleanup()
pass
if __name__ == "__main__":
main()
| 31.785714 | 88 | 0.578982 | from datetime import datetime
from time import sleep
from lib.mail import Mail
from lib.spread_sheet import SpreadSheet
from sensor.SHT31 import SHT31
from sensor.BH1750FVI import BH1750FVI
from sensor.VL6180 import VL6180X
from sensor.CO2MINI import CO2MINI
from sensor.relay_module import RelayModule
import matplotlib.pyplot as plt
import schedule
import yaml
DEFAULT_COLUMNS = [
"Time",
"Distance(mm)",
"Light(lux)",
"Light(klux/h)",
"Temperature(C)",
"Humidity(%)",
"CO2(ppm)",
"WaterFlag",
]
DEFAULT_DATA_IMAGE_PATH = "data/figure.png"
class Scheduler(object):
def __init__(self, config):
self.params = {
"distance": None,
"light": None,
"light_klux": None,
"light_total": 0,
"temperature": None,
"humidity": None,
"co2": None,
"alert_remaining": None,
}
self._config = config
self._full_alert_remaining()
self._mail_client = Mail(
self._config["google"]["credentials_path"],
self._config["google"]["token_path"],
)
self._spread_sheet_client = SpreadSheet(
self._config["google"]["service_account_path"],
self._config["google"]["spread_sheet_id"],
)
if not self._spread_sheet_client.get_label_value("A1"):
self._spread_sheet_client.append_row(DEFAULT_COLUMNS)
self._vl6180x_sensor = VL6180X()
self._bh1750fvi_sensor = BH1750FVI()
self._sht31_sensor = SHT31()
self._relay_module = RelayModule()
self._co2mini_sensor = CO2MINI()
def monitoring_job(self):
self._fetch_params()
self._logging_params()
self._alert_params()
if self._is_water_flag():
self.turn_on_water()
def mail_job(self):
dframe = self._spread_sheet_client.get_dataframe(diff_days=7)
kwargs = {"kind": "line", "use_index": True, "rot": 45}
setting_list = [
{"title": "Light(lux)", "x": "Time", "y": "Light(lux)"},
{"title": "CO2(ppm)", "x": "Time", "y": "CO2(ppm)"},
{"title": "Temperature(C)", "x": "Time", "y": "Temperature(C)"},
{"title": "Humidity(%)", "x": "Time", "y": "Humidity(%)"},
]
fig, axes = plt.subplots(
ncols=2, nrows=2, figsize=(20, 15), sharex="col")
for ax, setting in zip(axes.ravel(), setting_list):
dframe.plot(
setting["x"], setting["y"], ax=ax, **kwargs, title=setting["title"]
)
plt.savefig(DEFAULT_DATA_IMAGE_PATH)
self._send_mail(
self._config["mail"]["summary"]["subject"],
self._config["mail"]["summary"]["body"],
DEFAULT_DATA_IMAGE_PATH,
)
def _fetch_params(self):
light = self._bh1750fvi_sensor.get_light()
light_klux = (
light *
self._config["scheduler"]["monitoring_interval_minutes"] / 60000
)
self._co2mini_sensor.read_data()
self.params.update(
{
"distance": self._vl6180x_sensor.get_distance(),
"light": light,
"light_klux": light_klux,
"temperature": self._sht31_sensor.get_temperature(),
"humidity": self._sht31_sensor.get_humidity(),
"co2": self._co2mini_sensor.get_co2(),
}
)
self.params["light_total"] += light_klux
def _logging_params(self):
current_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
values = [
current_datetime,
round(self.params["distance"], 1),
round(self.params["light"], 1),
round(self.params["light_klux"], 1),
round(self.params["temperature"], 1),
round(self.params["humidity"], 1),
round(self.params["co2"], 1),
int(self._is_water_flag()),
]
self._spread_sheet_client.append_row(values)
print(values)
def _alert_params(self):
if not self._is_alert_flag():
self._full_alert_remaining()
return
self.params["alert_remaining"] -= 1
if self.params["alert_remaining"] > 0:
return
body = ""
if self._is_temperature_upper_limit():
body = self._config["mail"]["alert"]["body"]["temperature_upper"].format(
self.params["temperature"]
)
elif self._is_temperature_lower_limit():
body = self._config["mail"]["alert"]["body"]["temperature_lower"].format(
self.params["temperature"]
)
elif self._is_co2_upper_limit():
body = self._config["mail"]["alert"]["body"]["co2_upper"].format(
self.params["co2"]
)
elif self._is_co2_lower_limit():
body = self._config["mail"]["alert"]["body"]["co2_lower"].format(
self.params["co2"]
)
self._send_mail(self._config["mail"]["alert"]["subject"], body)
self._full_alert_remaining()
def _send_mail(self, subject, body, image_file=None):
if image_file:
message = self._mail_client.create_message_with_image(
self._config["mail"]["to_address"], subject, body, image_file
)
else:
message = self._mail_client.create_message(
self._config["mail"]["to_address"], subject, body
)
self._mail_client.send_message(message)
def _full_alert_remaining(self):
self.params["alert_remaining"] = self._config["alert"]["consecutive_time"]
def _is_alert_flag(self):
return (
self._is_temperature_upper_limit()
or self._is_temperature_lower_limit()
or self._is_co2_upper_limit()
or self._is_co2_lower_limit()
)
def _is_temperature_upper_limit(self):
return (
self._config["alert"]["temperature_upper_limit"]
< self.params["temperature"]
)
def _is_temperature_lower_limit(self):
return (
self.params["temperature"]
< self._config["alert"]["temperature_lower_limit"]
)
def _is_co2_upper_limit(self):
return self._config["alert"]["co2_upper_limit"] < self.params["co2"]
def _is_co2_lower_limit(self):
return self.params["co2"] < self._config["alert"]["co2_lower_limit"]
def _is_water_flag(self):
return (
self.params["light_total"] > self._config["sensor"]["solar_radiation_limit"]
)
def turn_on_water(self):
self.params["light_total"] = 0
self._relay_module.setup()
self._relay_module.turn_on_water(
self._config["sensor"]["water_turn_on_time"])
def turn_off_water(self):
self._relay_module.turn_off_water()
def cleanup(self):
self._relay_module.cleanup()
def main():
with open("config.yaml") as file:
config = yaml.full_load(file)
scheduler = Scheduler(config)
schedule.every(config["scheduler"]["monitoring_interval_minutes"]).minutes.do(
scheduler.monitoring_job
)
schedule.every(config["scheduler"]["summary_mail_interval_days"]).days.do(
scheduler.mail_job
)
try:
while True:
schedule.run_pending()
sleep(1)
except KeyboardInterrupt:
scheduler.cleanup()
pass
if __name__ == "__main__":
main()
| true | true |
f7f8da7af22881c2f4baf51a409c9e4fba74d88a | 179 | py | Python | team-violet-code/storage-provider/src/__init__.py | shaunkane21/omh-dsu-ri | c00e1bccaa29c4efafbb270e5660d062f591c98b | [
"Apache-2.0"
] | null | null | null | team-violet-code/storage-provider/src/__init__.py | shaunkane21/omh-dsu-ri | c00e1bccaa29c4efafbb270e5660d062f591c98b | [
"Apache-2.0"
] | null | null | null | team-violet-code/storage-provider/src/__init__.py | shaunkane21/omh-dsu-ri | c00e1bccaa29c4efafbb270e5660d062f591c98b | [
"Apache-2.0"
] | null | null | null | ''' all controllers for various collections of database '''
import os
import glob
__all__ = [os.path.basename(f)[:-3]
for f in glob.glob(os.path.dirname(__file__) + "/*.py")]
| 29.833333 | 60 | 0.687151 | import os
import glob
__all__ = [os.path.basename(f)[:-3]
for f in glob.glob(os.path.dirname(__file__) + "/*.py")]
| true | true |
f7f8daa2a2a01f21ca0d4d4284828b76074d55bd | 630 | py | Python | mmf/utils/phoc/build_phoc.py | anas-awadalla/mmf | 306f8f758831b2abf2c7ef5a8f010670a2cb33ed | [
"BSD-3-Clause"
] | 3,252 | 2018-07-27T02:32:24.000Z | 2020-05-07T17:54:46.000Z | mmf/utils/phoc/build_phoc.py | anas-awadalla/mmf | 306f8f758831b2abf2c7ef5a8f010670a2cb33ed | [
"BSD-3-Clause"
] | 914 | 2020-05-07T18:36:26.000Z | 2022-03-31T05:45:26.000Z | mmf/utils/phoc/build_phoc.py | anas-awadalla/mmf | 306f8f758831b2abf2c7ef5a8f010670a2cb33ed | [
"BSD-3-Clause"
] | 490 | 2020-05-07T20:05:10.000Z | 2022-03-31T14:17:23.000Z | import numpy as np
from .cphoc import build_phoc as _build_phoc_raw
_alphabet = {
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l",
"m",
"n",
"o",
"p",
"q",
"r",
"s",
"t",
"u",
"v",
"w",
"x",
"y",
"z",
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
} # NoQA
def build_phoc(token):
token = token.lower().strip()
token = "".join([c for c in token if c in _alphabet])
phoc = _build_phoc_raw(token)
phoc = np.array(phoc, dtype=np.float32)
return phoc
| 12.115385 | 57 | 0.384127 | import numpy as np
from .cphoc import build_phoc as _build_phoc_raw
_alphabet = {
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l",
"m",
"n",
"o",
"p",
"q",
"r",
"s",
"t",
"u",
"v",
"w",
"x",
"y",
"z",
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
}
def build_phoc(token):
token = token.lower().strip()
token = "".join([c for c in token if c in _alphabet])
phoc = _build_phoc_raw(token)
phoc = np.array(phoc, dtype=np.float32)
return phoc
| true | true |
f7f8dc3b8dcffaef8d601fb6423f82d8ba44405e | 19,934 | py | Python | tests/model/test_grouping.py | diegodelemos/asclepias-broker | 363ebf620ceea8cd4bd387d414c9cfaca226ce35 | [
"MIT"
] | null | null | null | tests/model/test_grouping.py | diegodelemos/asclepias-broker | 363ebf620ceea8cd4bd387d414c9cfaca226ce35 | [
"MIT"
] | null | null | null | tests/model/test_grouping.py | diegodelemos/asclepias-broker | 363ebf620ceea8cd4bd387d414c9cfaca226ce35 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018 CERN.
#
# Asclepias Broker is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test broker model."""
from helpers import assert_grouping, create_objects_from_relations, \
generate_payloads
from asclepias_broker.api import EventAPI
from asclepias_broker.api.ingestion import get_group_from_id, \
get_or_create_groups, merge_identity_groups, merge_version_groups
from asclepias_broker.models import Group, GroupM2M, GroupMetadata, \
GroupRelationship, GroupRelationshipM2M, GroupRelationshipMetadata, \
GroupType, Identifier, Identifier2Group, Relation, Relationship, \
Relationship2GroupRelationship
def _handle_events(evtsrc):
events = generate_payloads(evtsrc)
for ev in events:
EventAPI.handle_event(ev)
def off_test_simple_id_group_merge(db):
"""Test simple ID groups merging."""
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'B', '2018-01-01'],
]
_handle_events(evtsrc)
# {'A', 'B'}
assert Group.query.count() == 2
Group.query.filter_by(type=GroupType.Version).one()
Group.query.filter_by(type=GroupType.Identity).one()
assert Identifier.query.count() == 2
assert Relationship.query.count() == 1
assert Identifier2Group.query.count() == 2
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'C', '2018-01-01'],
]
_handle_events(evtsrc)
# {'A', 'B', 'C'}
assert Group.query.count() == 1
assert Identifier.query.count() == 3
assert Identifier2Group.query.count() == 3
evtsrc = [
['C', 'D', 'IsIdenticalTo', 'E', '2018-01-01'],
]
_handle_events(evtsrc)
# {'A', 'B', 'C'}, {'D', 'E'}
assert Group.query.count() == 2
assert Identifier.query.count() == 5
assert Identifier2Group.query.count() == 5
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'D', '2018-01-01'],
]
_handle_events(evtsrc)
# {'A', 'B', 'C', 'D', 'E'}
assert Group.query.count() == 1
assert Identifier.query.count() == 5
assert Identifier2Group.query.count() == 5
def test_get_or_create_groups(db):
"""Test creating groups (Identity and Version) for an identifier."""
id1 = Identifier(value='A', scheme='doi')
db.session.add(id1)
# id2 = Identifier(value='B', scheme='doi')
# rel = Relationship(source=id1, target=id2,
# relation=Relation.IsIdenticalTo)
assert not Group.query.count()
assert not GroupM2M.query.count()
assert not Identifier2Group.query.count()
id_g, ver_g = get_or_create_groups(id1)
db.session.commit()
def _check_groups(identifier, id_g, ver_g):
assert Group.query.count() == 2
assert GroupM2M.query.count() == 1
assert Identifier2Group.query.count() == 1
assert Group.query.filter_by(type=GroupType.Identity).one() == id_g
assert Group.query.filter_by(type=GroupType.Version).one() == ver_g
id2g = Identifier2Group.query.one()
g2g = GroupM2M.query.one()
assert id2g.identifier == identifier
assert id2g.group == id_g
assert g2g.group == ver_g
assert g2g.subgroup == id_g
_check_groups(id1, id_g, ver_g)
# Fetch the ID again and try to create groups again
id2 = Identifier.get('A', 'doi')
assert id2
id_g, ver_g = get_or_create_groups(id1)
db.session.commit()
# Make sure nothing changed
_check_groups(id2, id_g, ver_g)
# Add a new, separate identifier
id3 = Identifier(value='B', scheme='doi')
db.session.add(id3)
id_g, ver_g = get_or_create_groups(id3)
assert Group.query.count() == 4
assert GroupM2M.query.count() == 2
assert Identifier2Group.query.count() == 2
def test_merge_version_groups(db):
"""Test group merging.
Note: This test is merging Version groups. This does not automatically
merge the Identity groups below!
"""
rels = [
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
]
create_objects_from_relations(rels)
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'], # Idx=5
[0],
[1],
[2],
[3],
[4],
[5],
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
# Identity group relations:
(2, Relation.Cites, 0),
(2, Relation.Cites, 1),
(0, Relation.Cites, 3),
(1, Relation.Cites, 3),
(4, Relation.Cites, 1),
(4, Relation.IsRelatedTo, 1),
(0, Relation.Cites, 5),
(0, Relation.IsRelatedTo, 5),
# Version group relations:
(8, Relation.Cites, 6),
(8, Relation.Cites, 7),
(6, Relation.Cites, 9),
(7, Relation.Cites, 9),
(10, Relation.Cites, 7),
(10, Relation.IsRelatedTo, 7),
(6, Relation.Cites, 11),
(6, Relation.IsRelatedTo, 11),
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8]),
(17, [9]),
(18, [10]),
(19, [11]),
(20, [12]),
(21, [13]),
(22, [14]),
(23, [15]),
]
)
assert_grouping(grouping)
# Merge Version groups of A and B
# This merges only the version groups, not Identity groups
id_grp1 = get_group_from_id('A', group_type=GroupType.Version)
id_grp2 = get_group_from_id('B', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
db.session.commit()
# Version groups and relations after merging:
# C-Cites-AB (squashed C-Cites-A and C-Cites-B)
# AB-Cites-D (squashed A-Cites-D and B-Cites-D)
# E-Cites-AB
# E-IsRelatedTo-AB (not squashed with above, because of different relation)
# AB-Cites-F
# AB-IsRelatedTo-F (not squashed with above, because of different relation)
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'], # Idx=5
[0, 1], # {AB}
[2], # {C}
[3], # {D}
[4], # {E}
[5], # {F}
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
# Identity group relations:
(2, Relation.Cites, 0), # C-Cites-A Idx=8
(2, Relation.Cites, 1), # C-Cites-B
(0, Relation.Cites, 3), # A-Cites-D
(1, Relation.Cites, 3), # B-Cites-D
(4, Relation.Cites, 1), # E-Cites-B
(4, Relation.IsRelatedTo, 1), # E-IsRelatedTo-B
(0, Relation.Cites, 5), # A-Cites-F
(0, Relation.IsRelatedTo, 5), # A-IsRelatedTo-F
# Version group relations:
(7, Relation.Cites, 6), # C-Cites-AB Idx=16
(6, Relation.Cites, 8), # AB-Cites-D
(9, Relation.Cites, 6), # E-Cites-AB
(9, Relation.IsRelatedTo, 6), # E-IsRelatedTo-AB
(6, Relation.Cites, 10), # AB-Cites-F
(6, Relation.IsRelatedTo, 10), # AB-IsRelatedTo-F
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8, 9]),
(17, [10, 11]),
(18, [12]),
(19, [13]),
(20, [14]),
(21, [15]),
]
)
assert_grouping(grouping)
# Merge Version groups of C and D and also E and F
id_grp1 = get_group_from_id('C', group_type=GroupType.Version)
id_grp2 = get_group_from_id('D', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
id_grp1 = get_group_from_id('E', group_type=GroupType.Version)
id_grp2 = get_group_from_id('F', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
db.session.commit()
# Version groups and relations after merging:
# CD-Cites-AB (squashed C-Cites-A and C-Cites-B)
# AB-Cites-CD (squashed A-Cites-D and B-Cites-D)
# EF-Cites-AB
# EF-IsRelatedTo-AB
# AB-Cites-EF
# AB-IsRelatedTo-EF
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'], # Idx=5
[0, 1], # {AB}
[2, 3], # {CD}
[4, 5], # {EF}
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
# Identity group relations:
(2, Relation.Cites, 0), # C-Cites-A Idx=8
(2, Relation.Cites, 1), # C-Cites-B
(0, Relation.Cites, 3), # A-Cites-D
(1, Relation.Cites, 3), # B-Cites-D
(4, Relation.Cites, 1), # E-Cites-B
(4, Relation.IsRelatedTo, 1), # E-IsRelatedTo-B
(0, Relation.Cites, 5), # A-Cites-F
(0, Relation.IsRelatedTo, 5), # A-IsRelatedTo-F
# Version group relations:
(7, Relation.Cites, 6), # CD-Cites-AB Idx=16
(6, Relation.Cites, 7), # AB-Cites-CD
(8, Relation.Cites, 6), # EF-Cites-AB
(8, Relation.IsRelatedTo, 6), # EF-IsRelatedTo-AB
(6, Relation.Cites, 8), # AB-Cites-EF
(6, Relation.IsRelatedTo, 8), # AB-IsRelatedTo-EF
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8, 9]),
(17, [10, 11]),
(18, [12]),
(19, [13]),
(20, [14]),
(21, [15]),
]
)
assert_grouping(grouping)
def test_merge_identity_groups(db):
"""Test group merging.
Note: This test is merging Version groups until only one is left.
This does not automatically merge the Identity groups below!
"""
rels = [
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
]
metadata = [
(
{'Title': 'Title of A v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator A v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of C v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator C v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of B v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator B v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of C v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator C v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of D v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator D v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of A v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator A v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of D v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator D v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of B v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator B v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
)
]
create_objects_from_relations(rels, metadata=metadata)
grouping = (
# Groups and GroupM2M
[
# Identity groups
['A'],
['B'],
['C'],
['D'],
# Version groups
[0],
[1],
[2],
[3],
],
# Relationships
[
# Identifier relationships
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
# Identity group relationships:
(0, Relation.Cites, 2),
(1, Relation.Cites, 2),
(3, Relation.Cites, 0),
(3, Relation.Cites, 1),
# Version group relationships:
(4, Relation.Cites, 6),
(5, Relation.Cites, 6),
(7, Relation.Cites, 4),
(7, Relation.Cites, 5),
],
# Relationships M2M
[
(4, [0]),
(5, [1]),
(6, [2]),
(7, [3]),
(8, [4]),
(9, [5]),
(10, [6]),
(11, [7]),
]
)
assert_grouping(grouping)
# Merge Version groups of A and B
# This merges only the version groups, not Identity groups
id_grp1 = get_group_from_id('A')
id_grp2 = get_group_from_id('B')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
# Version groups and relations after merging:
# C-Cites-AB (squashed C-Cites-A and C-Cites-B)
# AB-Cites-D (squashed A-Cites-D and B-Cites-D)
# E-Cites-AB
# E-IsRelatedTo-AB (not squashed with above, because of different relation)
# AB-Cites-F
# AB-IsRelatedTo-F (not squashed with above, because of different relation)
grouping = (
[
['A', 'B'],
['C'],
['D'],
[0], # {AB}
[1], # {C}
[2], # {D}
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
# Identity group relations:
(0, Relation.Cites, 1), # AB-Cites-C
(2, Relation.Cites, 0), # D-Cites-AB
# Version group relations:
(3, Relation.Cites, 4), # AB-Cites-C
(5, Relation.Cites, 3), # D-CItes-AB
],
[
(4, [0, 1, ]),
(5, [2, 3, ]),
(6, [4]),
(7, [5]),
]
)
assert_grouping(grouping)
# Merge Version groups of C and D
id_grp1 = get_group_from_id('C')
id_grp2 = get_group_from_id('D')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A', 'B'],
['C', 'D'],
[0], # {AB}
[1], # {CD}
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
# Identity group relations:
(0, Relation.Cites, 1), # AB-Cites-CD
(1, Relation.Cites, 0), # CD-Cites-AB
# Version group relations:
(2, Relation.Cites, 3), # AB-Cites-CD
(3, Relation.Cites, 2), # CD-CItes-AB
],
[
(4, [0, 1, ]),
(5, [2, 3, ]),
(6, [4]),
(7, [5]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('A').data
id_grp2 = get_group_from_id('B').data
assert id_grp1 == id_grp2 and id_grp1.json['Title'] == 'Title of B v2'
id_grp1 = get_group_from_id('A')
id_grp2 = get_group_from_id('C')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A', 'B', 'C', 'D'],
[0], # {ABCD}
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
# No group relations for only one identity and one version
],
[] # No relations M2M
)
id_grp1 = get_group_from_id('A').data
id_grp2 = get_group_from_id('B').data
id_grp3 = get_group_from_id('C').data
id_grp4 = get_group_from_id('D').data
# All metadata should be merged to that of the last "D" object
assert id_grp1 == id_grp2 == id_grp3 == id_grp4 and \
id_grp1.json['Title'] == 'Title of D v2'
assert_grouping(grouping)
| 32.94876 | 79 | 0.467242 |
from helpers import assert_grouping, create_objects_from_relations, \
generate_payloads
from asclepias_broker.api import EventAPI
from asclepias_broker.api.ingestion import get_group_from_id, \
get_or_create_groups, merge_identity_groups, merge_version_groups
from asclepias_broker.models import Group, GroupM2M, GroupMetadata, \
GroupRelationship, GroupRelationshipM2M, GroupRelationshipMetadata, \
GroupType, Identifier, Identifier2Group, Relation, Relationship, \
Relationship2GroupRelationship
def _handle_events(evtsrc):
events = generate_payloads(evtsrc)
for ev in events:
EventAPI.handle_event(ev)
def off_test_simple_id_group_merge(db):
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'B', '2018-01-01'],
]
_handle_events(evtsrc)
assert Group.query.count() == 2
Group.query.filter_by(type=GroupType.Version).one()
Group.query.filter_by(type=GroupType.Identity).one()
assert Identifier.query.count() == 2
assert Relationship.query.count() == 1
assert Identifier2Group.query.count() == 2
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'C', '2018-01-01'],
]
_handle_events(evtsrc)
assert Group.query.count() == 1
assert Identifier.query.count() == 3
assert Identifier2Group.query.count() == 3
evtsrc = [
['C', 'D', 'IsIdenticalTo', 'E', '2018-01-01'],
]
_handle_events(evtsrc)
assert Group.query.count() == 2
assert Identifier.query.count() == 5
assert Identifier2Group.query.count() == 5
evtsrc = [
['C', 'A', 'IsIdenticalTo', 'D', '2018-01-01'],
]
_handle_events(evtsrc)
assert Group.query.count() == 1
assert Identifier.query.count() == 5
assert Identifier2Group.query.count() == 5
def test_get_or_create_groups(db):
id1 = Identifier(value='A', scheme='doi')
db.session.add(id1)
assert not Group.query.count()
assert not GroupM2M.query.count()
assert not Identifier2Group.query.count()
id_g, ver_g = get_or_create_groups(id1)
db.session.commit()
def _check_groups(identifier, id_g, ver_g):
assert Group.query.count() == 2
assert GroupM2M.query.count() == 1
assert Identifier2Group.query.count() == 1
assert Group.query.filter_by(type=GroupType.Identity).one() == id_g
assert Group.query.filter_by(type=GroupType.Version).one() == ver_g
id2g = Identifier2Group.query.one()
g2g = GroupM2M.query.one()
assert id2g.identifier == identifier
assert id2g.group == id_g
assert g2g.group == ver_g
assert g2g.subgroup == id_g
_check_groups(id1, id_g, ver_g)
id2 = Identifier.get('A', 'doi')
assert id2
id_g, ver_g = get_or_create_groups(id1)
db.session.commit()
_check_groups(id2, id_g, ver_g)
id3 = Identifier(value='B', scheme='doi')
db.session.add(id3)
id_g, ver_g = get_or_create_groups(id3)
assert Group.query.count() == 4
assert GroupM2M.query.count() == 2
assert Identifier2Group.query.count() == 2
def test_merge_version_groups(db):
rels = [
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
]
create_objects_from_relations(rels)
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'],
[0],
[1],
[2],
[3],
[4],
[5],
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
(2, Relation.Cites, 0),
(2, Relation.Cites, 1),
(0, Relation.Cites, 3),
(1, Relation.Cites, 3),
(4, Relation.Cites, 1),
(4, Relation.IsRelatedTo, 1),
(0, Relation.Cites, 5),
(0, Relation.IsRelatedTo, 5),
(8, Relation.Cites, 6),
(8, Relation.Cites, 7),
(6, Relation.Cites, 9),
(7, Relation.Cites, 9),
(10, Relation.Cites, 7),
(10, Relation.IsRelatedTo, 7),
(6, Relation.Cites, 11),
(6, Relation.IsRelatedTo, 11),
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8]),
(17, [9]),
(18, [10]),
(19, [11]),
(20, [12]),
(21, [13]),
(22, [14]),
(23, [15]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('A', group_type=GroupType.Version)
id_grp2 = get_group_from_id('B', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'],
[0, 1],
[2],
[3],
[4],
[5],
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
(2, Relation.Cites, 0),
(2, Relation.Cites, 1),
(0, Relation.Cites, 3),
(1, Relation.Cites, 3),
(4, Relation.Cites, 1),
(4, Relation.IsRelatedTo, 1),
(0, Relation.Cites, 5),
(0, Relation.IsRelatedTo, 5),
(7, Relation.Cites, 6),
(6, Relation.Cites, 8),
(9, Relation.Cites, 6),
(9, Relation.IsRelatedTo, 6),
(6, Relation.Cites, 10),
(6, Relation.IsRelatedTo, 10),
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8, 9]),
(17, [10, 11]),
(18, [12]),
(19, [13]),
(20, [14]),
(21, [15]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('C', group_type=GroupType.Version)
id_grp2 = get_group_from_id('D', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
id_grp1 = get_group_from_id('E', group_type=GroupType.Version)
id_grp2 = get_group_from_id('F', group_type=GroupType.Version)
merge_version_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A'],
['B'],
['C'],
['D'],
['E'],
['F'],
[0, 1],
[2, 3],
[4, 5],
],
[
('C', Relation.Cites, 'A'),
('C', Relation.Cites, 'B'),
('A', Relation.Cites, 'D'),
('B', Relation.Cites, 'D'),
('E', Relation.Cites, 'B'),
('E', Relation.IsRelatedTo, 'B'),
('A', Relation.Cites, 'F'),
('A', Relation.IsRelatedTo, 'F'),
(2, Relation.Cites, 0),
(2, Relation.Cites, 1),
(0, Relation.Cites, 3),
(1, Relation.Cites, 3),
(4, Relation.Cites, 1),
(4, Relation.IsRelatedTo, 1),
(0, Relation.Cites, 5),
(0, Relation.IsRelatedTo, 5),
(7, Relation.Cites, 6),
(6, Relation.Cites, 7),
(8, Relation.Cites, 6),
(8, Relation.IsRelatedTo, 6),
(6, Relation.Cites, 8),
(6, Relation.IsRelatedTo, 8),
],
[
(8, [0]),
(9, [1]),
(10, [2]),
(11, [3]),
(12, [4]),
(13, [5]),
(14, [6]),
(15, [7]),
(16, [8, 9]),
(17, [10, 11]),
(18, [12]),
(19, [13]),
(20, [14]),
(21, [15]),
]
)
assert_grouping(grouping)
def test_merge_identity_groups(db):
rels = [
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
]
metadata = [
(
{'Title': 'Title of A v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator A v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of C v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator C v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of B v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator B v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of C v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator C v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of D v1',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator D v1',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of A v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator A v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
),
(
{'Title': 'Title of D v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator D v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]},
{'LinkPublicationDate': '2018-01-01',
'LinkProvider': [{'Name': 'Foobar'}]},
{'Title': 'Title of B v2',
'Type': {'Name': 'literature',
'SubType': 'journal article',
'SubTypeSchema': 'datacite'},
'PublicationDate': '2018-01-01',
'Creator': [{'Name': 'Creator B v2',
'Identifier': [{'ID': '0000-0001-2345-6789',
'IDScheme': 'orcid'}]}]}
)
]
create_objects_from_relations(rels, metadata=metadata)
grouping = (
[
['A'],
['B'],
['C'],
['D'],
[0],
[1],
[2],
[3],
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
(0, Relation.Cites, 2),
(1, Relation.Cites, 2),
(3, Relation.Cites, 0),
(3, Relation.Cites, 1),
(4, Relation.Cites, 6),
(5, Relation.Cites, 6),
(7, Relation.Cites, 4),
(7, Relation.Cites, 5),
],
[
(4, [0]),
(5, [1]),
(6, [2]),
(7, [3]),
(8, [4]),
(9, [5]),
(10, [6]),
(11, [7]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('A')
id_grp2 = get_group_from_id('B')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A', 'B'],
['C'],
['D'],
[0],
[1],
[2],
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
(0, Relation.Cites, 1),
(2, Relation.Cites, 0),
(3, Relation.Cites, 4),
(5, Relation.Cites, 3),
],
[
(4, [0, 1, ]),
(5, [2, 3, ]),
(6, [4]),
(7, [5]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('C')
id_grp2 = get_group_from_id('D')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A', 'B'],
['C', 'D'],
[0],
[1],
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
(0, Relation.Cites, 1),
(1, Relation.Cites, 0),
(2, Relation.Cites, 3),
(3, Relation.Cites, 2),
],
[
(4, [0, 1, ]),
(5, [2, 3, ]),
(6, [4]),
(7, [5]),
]
)
assert_grouping(grouping)
id_grp1 = get_group_from_id('A').data
id_grp2 = get_group_from_id('B').data
assert id_grp1 == id_grp2 and id_grp1.json['Title'] == 'Title of B v2'
id_grp1 = get_group_from_id('A')
id_grp2 = get_group_from_id('C')
merge_identity_groups(id_grp1, id_grp2)
db.session.commit()
grouping = (
[
['A', 'B', 'C', 'D'],
[0],
],
[
('A', Relation.Cites, 'C'),
('B', Relation.Cites, 'C'),
('D', Relation.Cites, 'A'),
('D', Relation.Cites, 'B'),
],
[]
)
id_grp1 = get_group_from_id('A').data
id_grp2 = get_group_from_id('B').data
id_grp3 = get_group_from_id('C').data
id_grp4 = get_group_from_id('D').data
assert id_grp1 == id_grp2 == id_grp3 == id_grp4 and \
id_grp1.json['Title'] == 'Title of D v2'
assert_grouping(grouping)
| true | true |
f7f8dc4eea8d9545658fcc1bcc7abc41265a8a74 | 3,649 | py | Python | fire/main_test.py | loynoir/python-fire | 1f99e282f079bea992105f609ad1a70ed60d6635 | [
"Apache-2.0"
] | null | null | null | fire/main_test.py | loynoir/python-fire | 1f99e282f079bea992105f609ad1a70ed60d6635 | [
"Apache-2.0"
] | null | null | null | fire/main_test.py | loynoir/python-fire | 1f99e282f079bea992105f609ad1a70ed60d6635 | [
"Apache-2.0"
] | 1 | 2021-04-10T14:17:27.000Z | 2021-04-10T14:17:27.000Z | # Copyright (C) 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test using Fire via `python -m fire`."""
import os
import tempfile
from fire import __main__
from fire import testutils
class MainModuleTest(testutils.BaseTestCase):
"""Tests to verify the behavior of __main__ (python -m fire)."""
def testNameSetting(self):
# Confirm one of the usage lines has the gettempdir member.
with self.assertOutputMatches("gettempdir"):
__main__.main(["__main__.py", "tempfile"])
def testArgPassing(self):
expected = os.path.join("part1", "part2", "part3")
with self.assertOutputMatches("%s\n" % expected):
__main__.main(["__main__.py", "os.path", "join", "part1", "part2", "part3"])
with self.assertOutputMatches("%s\n" % expected):
__main__.main(
["__main__.py", "os", "path", "-", "join", "part1", "part2", "part3"]
)
class MainModuleFileTest(testutils.BaseTestCase):
"""Tests to verify correct import behavior for file executables."""
def setUp(self):
super(MainModuleFileTest, self).setUp()
self.file = tempfile.NamedTemporaryFile(suffix=".py")
self.file.write(b"class Foo:\n def double(self, n):\n return 2 * n\n")
self.file.flush()
self.file2 = tempfile.NamedTemporaryFile()
def testFileNameFire(self):
# Confirm that the file is correctly imported and doubles the number.
with self.assertOutputMatches("4"):
__main__.main(["__main__.py", self.file.name, "Foo", "double", "--n", "2"])
def testFileNameFailure(self):
# Confirm that an existing file without a .py suffix raises a ValueError.
with self.assertRaises(ValueError):
__main__.main(["__main__.py", self.file2.name, "Foo", "double", "--n", "2"])
def testFileNameModuleDuplication(self):
# Confirm that a file that masks a module still loads the module.
with self.assertOutputMatches("gettempdir"):
dirname = os.path.dirname(self.file.name)
with testutils.ChangeDirectory(dirname):
with open("tempfile", "w"):
__main__.main(
[
"__main__.py",
"tempfile",
]
)
os.remove("tempfile")
def testFileNameModuleFileFailure(self):
# Confirm that an invalid file that masks a non-existent module fails.
with self.assertRaisesRegex(
ValueError, r"Fire can only be called on \.py files\."
): # pylint: disable=line-too-long, # pytype: disable=attribute-error
dirname = os.path.dirname(self.file.name)
with testutils.ChangeDirectory(dirname):
with open("foobar", "w"):
__main__.main(
[
"__main__.py",
"foobar",
]
)
os.remove("foobar")
if __name__ == "__main__":
testutils.main()
| 37.234694 | 88 | 0.597698 |
import os
import tempfile
from fire import __main__
from fire import testutils
class MainModuleTest(testutils.BaseTestCase):
def testNameSetting(self):
with self.assertOutputMatches("gettempdir"):
__main__.main(["__main__.py", "tempfile"])
def testArgPassing(self):
expected = os.path.join("part1", "part2", "part3")
with self.assertOutputMatches("%s\n" % expected):
__main__.main(["__main__.py", "os.path", "join", "part1", "part2", "part3"])
with self.assertOutputMatches("%s\n" % expected):
__main__.main(
["__main__.py", "os", "path", "-", "join", "part1", "part2", "part3"]
)
class MainModuleFileTest(testutils.BaseTestCase):
def setUp(self):
super(MainModuleFileTest, self).setUp()
self.file = tempfile.NamedTemporaryFile(suffix=".py")
self.file.write(b"class Foo:\n def double(self, n):\n return 2 * n\n")
self.file.flush()
self.file2 = tempfile.NamedTemporaryFile()
def testFileNameFire(self):
with self.assertOutputMatches("4"):
__main__.main(["__main__.py", self.file.name, "Foo", "double", "--n", "2"])
def testFileNameFailure(self):
with self.assertRaises(ValueError):
__main__.main(["__main__.py", self.file2.name, "Foo", "double", "--n", "2"])
def testFileNameModuleDuplication(self):
with self.assertOutputMatches("gettempdir"):
dirname = os.path.dirname(self.file.name)
with testutils.ChangeDirectory(dirname):
with open("tempfile", "w"):
__main__.main(
[
"__main__.py",
"tempfile",
]
)
os.remove("tempfile")
def testFileNameModuleFileFailure(self):
with self.assertRaisesRegex(
ValueError, r"Fire can only be called on \.py files\."
): rname(self.file.name)
with testutils.ChangeDirectory(dirname):
with open("foobar", "w"):
__main__.main(
[
"__main__.py",
"foobar",
]
)
os.remove("foobar")
if __name__ == "__main__":
testutils.main()
| true | true |
f7f8dd91c4b452880c9229d78c718790c4e62939 | 2,842 | py | Python | meeting/hooks.py | patilpuja/Meeting | 2394eb0af4ca8d06f270aabefa138eeda9fb7088 | [
"MIT"
] | null | null | null | meeting/hooks.py | patilpuja/Meeting | 2394eb0af4ca8d06f270aabefa138eeda9fb7088 | [
"MIT"
] | null | null | null | meeting/hooks.py | patilpuja/Meeting | 2394eb0af4ca8d06f270aabefa138eeda9fb7088 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "meeting"
app_title = "Meeting_App"
app_publisher = "DPI"
app_description = "meeting_app"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "pooja@gmail.com"
app_license = "MIT"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/meeting/css/meeting.css"
# app_include_js = "/assets/meeting/js/meeting.js"
# include js, css files in header of web template
# web_include_css = "/assets/meeting/css/meeting.css"
# web_include_js = "/assets/meeting/js/meeting.js"
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Website user home page (by function)
# get_website_user_home_page = "meeting.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "meeting.install.before_install"
# after_install = "meeting.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "meeting.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# Document Events
# ---------------
# Hook on document methods and events
doc_events = {
"User": {
"after_insert": "meeting.api.make_orientation_meeting"
}
}
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
# scheduler_events = {
# "all": [
# "meeting.tasks.all"
# ],
# "daily": [
# "meeting.tasks.daily"
# ],
# "hourly": [
# "meeting.tasks.hourly"
# ],
# "weekly": [
# "meeting.tasks.weekly"
# ]
# "monthly": [
# "meeting.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "meeting.install.before_tests"
# Overriding Whitelisted Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "meeting.event.get_events"
# }
| 22.203125 | 78 | 0.663969 |
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "meeting"
app_title = "Meeting_App"
app_publisher = "DPI"
app_description = "meeting_app"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "pooja@gmail.com"
app_license = "MIT"
doc_events = {
"User": {
"after_insert": "meeting.api.make_orientation_meeting"
}
}
| true | true |
f7f8deb246360d15f4a58e2f3c3f90b3607a00a1 | 9,306 | py | Python | contrib/tools/python3/src/Lib/multiprocessing/spawn.py | jochenater/catboost | de2786fbc633b0d6ea6a23b3862496c6151b95c2 | [
"Apache-2.0"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | contrib/tools/python3/src/Lib/multiprocessing/spawn.py | birichie/catboost | de75c6af12cf490700e76c22072fbdc15b35d679 | [
"Apache-2.0"
] | 1,978 | 2017-07-18T09:17:58.000Z | 2022-03-31T14:28:43.000Z | contrib/tools/python3/src/Lib/multiprocessing/spawn.py | birichie/catboost | de75c6af12cf490700e76c22072fbdc15b35d679 | [
"Apache-2.0"
] | 1,228 | 2017-07-18T09:03:13.000Z | 2022-03-29T05:57:40.000Z | #
# Code used to start processes when using the spawn or forkserver
# start methods.
#
# multiprocessing/spawn.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
import os
import sys
import runpy
import types
from . import get_start_method, set_start_method
from . import process
from .context import reduction
from . import util
__all__ = ['_main', 'freeze_support', 'set_executable', 'get_executable',
'get_preparation_data', 'get_command_line', 'import_main_path']
#
# _python_exe is the assumed path to the python executable.
# People embedding Python want to modify it.
#
if sys.platform != 'win32':
WINEXE = False
WINSERVICE = False
else:
WINEXE = getattr(sys, 'frozen', False)
WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
if WINSERVICE:
_python_exe = os.path.join(sys.exec_prefix, 'python.exe')
else:
_python_exe = sys.executable
def set_executable(exe):
global _python_exe
_python_exe = exe
def get_executable():
return _python_exe
#
#
#
def is_forking(argv):
'''
Return whether commandline indicates we are forking
'''
if len(argv) >= 2 and argv[1] == '--multiprocessing-fork':
return True
else:
return False
def freeze_support():
'''
Run code for process object if this in not the main process
'''
if is_forking(sys.argv):
kwds = {}
for arg in sys.argv[2:]:
name, value = arg.split('=')
if value == 'None':
kwds[name] = None
else:
kwds[name] = int(value)
spawn_main(**kwds)
sys.exit()
def get_command_line(**kwds):
'''
Returns prefix of command line used for spawning a child process
'''
if False and getattr(sys, 'frozen', False):
return ([sys.executable, '--multiprocessing-fork'] +
['%s=%r' % item for item in kwds.items()])
else:
prog = 'from multiprocessing.spawn import spawn_main; spawn_main(%s)'
prog %= ', '.join('%s=%r' % item for item in kwds.items())
opts = util._args_from_interpreter_flags()
return [_python_exe] + opts + ['-c', prog, '--multiprocessing-fork']
def spawn_main(pipe_handle, parent_pid=None, tracker_fd=None):
'''
Run code specified by data received over pipe
'''
assert is_forking(sys.argv), "Not forking"
if sys.platform == 'win32':
import msvcrt
import _winapi
if parent_pid is not None:
source_process = _winapi.OpenProcess(
_winapi.SYNCHRONIZE | _winapi.PROCESS_DUP_HANDLE,
False, parent_pid)
else:
source_process = None
new_handle = reduction.duplicate(pipe_handle,
source_process=source_process)
fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY)
parent_sentinel = source_process
else:
from . import resource_tracker
resource_tracker._resource_tracker._fd = tracker_fd
fd = pipe_handle
parent_sentinel = os.dup(pipe_handle)
exitcode = _main(fd, parent_sentinel)
sys.exit(exitcode)
def _main(fd, parent_sentinel):
with os.fdopen(fd, 'rb', closefd=True) as from_parent:
process.current_process()._inheriting = True
try:
preparation_data = reduction.pickle.load(from_parent)
prepare(preparation_data)
self = reduction.pickle.load(from_parent)
finally:
del process.current_process()._inheriting
return self._bootstrap(parent_sentinel)
def _check_not_importing_main():
if getattr(process.current_process(), '_inheriting', False):
raise RuntimeError('''
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.
This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.''')
def get_preparation_data(name):
'''
Return info about parent needed by child to unpickle process object
'''
_check_not_importing_main()
d = dict(
log_to_stderr=util._log_to_stderr,
authkey=process.current_process().authkey,
)
if util._logger is not None:
d['log_level'] = util._logger.getEffectiveLevel()
sys_path=sys.path.copy()
try:
i = sys_path.index('')
except ValueError:
pass
else:
sys_path[i] = process.ORIGINAL_DIR
d.update(
name=name,
sys_path=sys_path,
sys_argv=sys.argv,
orig_dir=process.ORIGINAL_DIR,
dir=os.getcwd(),
start_method=get_start_method(),
)
# Figure out whether to initialise main in the subprocess as a module
# or through direct execution (or to leave it alone entirely)
main_module = sys.modules['__main__']
main_mod_name = getattr(main_module.__spec__, "name", None)
if main_mod_name is not None:
d['init_main_from_name'] = main_mod_name
elif sys.platform != 'win32' or (not WINEXE and not WINSERVICE):
main_path = getattr(main_module, '__file__', None)
if main_path is not None:
if (not os.path.isabs(main_path) and
process.ORIGINAL_DIR is not None):
main_path = os.path.join(process.ORIGINAL_DIR, main_path)
d['init_main_from_path'] = os.path.normpath(main_path)
return d
#
# Prepare current process
#
old_main_modules = []
def prepare(data):
'''
Try to get current process ready to unpickle process object
'''
if 'name' in data:
process.current_process().name = data['name']
if 'authkey' in data:
process.current_process().authkey = data['authkey']
if 'log_to_stderr' in data and data['log_to_stderr']:
util.log_to_stderr()
if 'log_level' in data:
util.get_logger().setLevel(data['log_level'])
if 'sys_path' in data:
sys.path = data['sys_path']
if 'sys_argv' in data:
sys.argv = data['sys_argv']
if 'dir' in data:
os.chdir(data['dir'])
if 'orig_dir' in data:
process.ORIGINAL_DIR = data['orig_dir']
if 'start_method' in data:
set_start_method(data['start_method'], force=True)
if 'init_main_from_name' in data:
_fixup_main_from_name(data['init_main_from_name'])
elif 'init_main_from_path' in data:
_fixup_main_from_path(data['init_main_from_path'])
# Multiprocessing module helpers to fix up the main module in
# spawned subprocesses
def _fixup_main_from_name(mod_name):
# __main__.py files for packages, directories, zip archives, etc, run
# their "main only" code unconditionally, so we don't even try to
# populate anything in __main__, nor do we make any changes to
# __main__ attributes
current_main = sys.modules['__main__']
if mod_name == "__main__" or mod_name.endswith(".__main__"):
return
# If this process was forked, __main__ may already be populated
if getattr(current_main.__spec__, "name", None) == mod_name:
return
# Otherwise, __main__ may contain some non-main code where we need to
# support unpickling it properly. We rerun it as __mp_main__ and make
# the normal __main__ an alias to that
old_main_modules.append(current_main)
main_module = types.ModuleType("__mp_main__")
main_content = runpy.run_module(mod_name,
run_name="__mp_main__",
alter_sys=True)
main_module.__dict__.update(main_content)
sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
def _fixup_main_from_path(main_path):
# If this process was forked, __main__ may already be populated
current_main = sys.modules['__main__']
# Unfortunately, the main ipython launch script historically had no
# "if __name__ == '__main__'" guard, so we work around that
# by treating it like a __main__.py file
# See https://github.com/ipython/ipython/issues/4698
main_name = os.path.splitext(os.path.basename(main_path))[0]
if main_name == 'ipython':
return
# Otherwise, if __file__ already has the setting we expect,
# there's nothing more to do
if getattr(current_main, '__file__', None) == main_path:
return
# If the parent process has sent a path through rather than a module
# name we assume it is an executable script that may contain
# non-main code that needs to be executed
old_main_modules.append(current_main)
main_module = types.ModuleType("__mp_main__")
main_content = runpy.run_path(main_path,
run_name="__mp_main__")
main_module.__dict__.update(main_content)
sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
def import_main_path(main_path):
'''
Set sys.modules['__main__'] to module at main_path
'''
_fixup_main_from_path(main_path)
| 31.228188 | 77 | 0.651408 |
import os
import sys
import runpy
import types
from . import get_start_method, set_start_method
from . import process
from .context import reduction
from . import util
__all__ = ['_main', 'freeze_support', 'set_executable', 'get_executable',
'get_preparation_data', 'get_command_line', 'import_main_path']
if sys.platform != 'win32':
WINEXE = False
WINSERVICE = False
else:
WINEXE = getattr(sys, 'frozen', False)
WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
if WINSERVICE:
_python_exe = os.path.join(sys.exec_prefix, 'python.exe')
else:
_python_exe = sys.executable
def set_executable(exe):
global _python_exe
_python_exe = exe
def get_executable():
return _python_exe
def is_forking(argv):
if len(argv) >= 2 and argv[1] == '--multiprocessing-fork':
return True
else:
return False
def freeze_support():
if is_forking(sys.argv):
kwds = {}
for arg in sys.argv[2:]:
name, value = arg.split('=')
if value == 'None':
kwds[name] = None
else:
kwds[name] = int(value)
spawn_main(**kwds)
sys.exit()
def get_command_line(**kwds):
if False and getattr(sys, 'frozen', False):
return ([sys.executable, '--multiprocessing-fork'] +
['%s=%r' % item for item in kwds.items()])
else:
prog = 'from multiprocessing.spawn import spawn_main; spawn_main(%s)'
prog %= ', '.join('%s=%r' % item for item in kwds.items())
opts = util._args_from_interpreter_flags()
return [_python_exe] + opts + ['-c', prog, '--multiprocessing-fork']
def spawn_main(pipe_handle, parent_pid=None, tracker_fd=None):
assert is_forking(sys.argv), "Not forking"
if sys.platform == 'win32':
import msvcrt
import _winapi
if parent_pid is not None:
source_process = _winapi.OpenProcess(
_winapi.SYNCHRONIZE | _winapi.PROCESS_DUP_HANDLE,
False, parent_pid)
else:
source_process = None
new_handle = reduction.duplicate(pipe_handle,
source_process=source_process)
fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY)
parent_sentinel = source_process
else:
from . import resource_tracker
resource_tracker._resource_tracker._fd = tracker_fd
fd = pipe_handle
parent_sentinel = os.dup(pipe_handle)
exitcode = _main(fd, parent_sentinel)
sys.exit(exitcode)
def _main(fd, parent_sentinel):
with os.fdopen(fd, 'rb', closefd=True) as from_parent:
process.current_process()._inheriting = True
try:
preparation_data = reduction.pickle.load(from_parent)
prepare(preparation_data)
self = reduction.pickle.load(from_parent)
finally:
del process.current_process()._inheriting
return self._bootstrap(parent_sentinel)
def _check_not_importing_main():
if getattr(process.current_process(), '_inheriting', False):
raise RuntimeError('''
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.
This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.''')
def get_preparation_data(name):
_check_not_importing_main()
d = dict(
log_to_stderr=util._log_to_stderr,
authkey=process.current_process().authkey,
)
if util._logger is not None:
d['log_level'] = util._logger.getEffectiveLevel()
sys_path=sys.path.copy()
try:
i = sys_path.index('')
except ValueError:
pass
else:
sys_path[i] = process.ORIGINAL_DIR
d.update(
name=name,
sys_path=sys_path,
sys_argv=sys.argv,
orig_dir=process.ORIGINAL_DIR,
dir=os.getcwd(),
start_method=get_start_method(),
)
main_module = sys.modules['__main__']
main_mod_name = getattr(main_module.__spec__, "name", None)
if main_mod_name is not None:
d['init_main_from_name'] = main_mod_name
elif sys.platform != 'win32' or (not WINEXE and not WINSERVICE):
main_path = getattr(main_module, '__file__', None)
if main_path is not None:
if (not os.path.isabs(main_path) and
process.ORIGINAL_DIR is not None):
main_path = os.path.join(process.ORIGINAL_DIR, main_path)
d['init_main_from_path'] = os.path.normpath(main_path)
return d
old_main_modules = []
def prepare(data):
if 'name' in data:
process.current_process().name = data['name']
if 'authkey' in data:
process.current_process().authkey = data['authkey']
if 'log_to_stderr' in data and data['log_to_stderr']:
util.log_to_stderr()
if 'log_level' in data:
util.get_logger().setLevel(data['log_level'])
if 'sys_path' in data:
sys.path = data['sys_path']
if 'sys_argv' in data:
sys.argv = data['sys_argv']
if 'dir' in data:
os.chdir(data['dir'])
if 'orig_dir' in data:
process.ORIGINAL_DIR = data['orig_dir']
if 'start_method' in data:
set_start_method(data['start_method'], force=True)
if 'init_main_from_name' in data:
_fixup_main_from_name(data['init_main_from_name'])
elif 'init_main_from_path' in data:
_fixup_main_from_path(data['init_main_from_path'])
def _fixup_main_from_name(mod_name):
# populate anything in __main__, nor do we make any changes to
# __main__ attributes
current_main = sys.modules['__main__']
if mod_name == "__main__" or mod_name.endswith(".__main__"):
return
# If this process was forked, __main__ may already be populated
if getattr(current_main.__spec__, "name", None) == mod_name:
return
# Otherwise, __main__ may contain some non-main code where we need to
# support unpickling it properly. We rerun it as __mp_main__ and make
# the normal __main__ an alias to that
old_main_modules.append(current_main)
main_module = types.ModuleType("__mp_main__")
main_content = runpy.run_module(mod_name,
run_name="__mp_main__",
alter_sys=True)
main_module.__dict__.update(main_content)
sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
def _fixup_main_from_path(main_path):
# If this process was forked, __main__ may already be populated
current_main = sys.modules['__main__']
# Unfortunately, the main ipython launch script historically had no
# "if __name__ == '__main__'" guard, so we work around that
# by treating it like a __main__.py file
# See https://github.com/ipython/ipython/issues/4698
main_name = os.path.splitext(os.path.basename(main_path))[0]
if main_name == 'ipython':
return
# Otherwise, if __file__ already has the setting we expect,
# there's nothing more to do
if getattr(current_main, '__file__', None) == main_path:
return
old_main_modules.append(current_main)
main_module = types.ModuleType("__mp_main__")
main_content = runpy.run_path(main_path,
run_name="__mp_main__")
main_module.__dict__.update(main_content)
sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
def import_main_path(main_path):
_fixup_main_from_path(main_path)
| true | true |
f7f8dec58445e5be8843c73bc4033625170f83dc | 2,768 | py | Python | scripts/west_commands/export.py | Weining2019/zephyr | 0ccbe8ab6aedf100b9d31177e0b48eaa3d112e5c | [
"Apache-2.0"
] | 5 | 2020-07-07T00:52:37.000Z | 2022-03-27T12:56:16.000Z | scripts/west_commands/export.py | Weining2019/zephyr | 0ccbe8ab6aedf100b9d31177e0b48eaa3d112e5c | [
"Apache-2.0"
] | 3 | 2021-08-11T18:14:01.000Z | 2021-08-11T22:41:10.000Z | scripts/west_commands/export.py | Weining2019/zephyr | 0ccbe8ab6aedf100b9d31177e0b48eaa3d112e5c | [
"Apache-2.0"
] | 5 | 2020-12-05T01:03:13.000Z | 2022-01-28T13:39:17.000Z | # Copyright (c) 2020 Nordic Semiconductor ASA
#
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
from shutil import rmtree
from subprocess import CalledProcessError
from west.commands import WestCommand
from west import log
from zcmake import run_cmake
EXPORT_DESCRIPTION = '''\
This command registers the current Zephyr installation as a CMake
config package in the CMake user package registry.
In Windows, the CMake user package registry is found in:
HKEY_CURRENT_USER\\Software\\Kitware\\CMake\\Packages\\
In Linux and MacOS, the CMake user package registry is found in:
~/.cmake/packages/'''
class ZephyrExport(WestCommand):
def __init__(self):
super().__init__(
'zephyr-export',
# Keep this in sync with the string in west-commands.yml.
'export Zephyr installation as a CMake config package',
EXPORT_DESCRIPTION,
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description)
return parser
def do_run(self, args, unknown_args):
# The 'share' subdirectory of the top level zephyr repository.
share = Path(__file__).parents[2] / 'share'
run_cmake_and_clean_up(share / 'zephyr-package' / 'cmake')
run_cmake_and_clean_up(share / 'zephyrunittest-package' / 'cmake')
def run_cmake_and_clean_up(path):
# Run a package installation script, cleaning up afterwards.
#
# Filtering out lines that start with -- ignores the normal
# CMake status messages and instead only prints the important
# information.
try:
lines = run_cmake(['-S', str(path), '-B', str(path)],
capture_output=True)
finally:
msg = [line for line in lines if not line.startswith('-- ')]
log.inf('\n'.join(msg))
clean_up(path)
def clean_up(path):
try:
run_cmake(['-P', str(path / 'pristine.cmake')],
capture_output=True)
except CalledProcessError:
# Do our best to clean up even though CMake failed.
log.wrn(f'Failed to make {path} pristine; '
'removing known generated files...')
for subpath in ['CMakeCache.txt', 'CMakeFiles', 'build.ninja',
'cmake_install.cmake', 'rules.ninja']:
remove_if_exists(Path(path) / subpath)
def remove_if_exists(pathobj):
if pathobj.is_file():
log.inf(f'- removing: {pathobj}')
pathobj.unlink()
elif pathobj.is_dir():
log.inf(f'- removing: {pathobj}')
rmtree(pathobj)
| 32.564706 | 74 | 0.65354 |
import argparse
from pathlib import Path
from shutil import rmtree
from subprocess import CalledProcessError
from west.commands import WestCommand
from west import log
from zcmake import run_cmake
EXPORT_DESCRIPTION = '''\
This command registers the current Zephyr installation as a CMake
config package in the CMake user package registry.
In Windows, the CMake user package registry is found in:
HKEY_CURRENT_USER\\Software\\Kitware\\CMake\\Packages\\
In Linux and MacOS, the CMake user package registry is found in:
~/.cmake/packages/'''
class ZephyrExport(WestCommand):
def __init__(self):
super().__init__(
'zephyr-export',
'export Zephyr installation as a CMake config package',
EXPORT_DESCRIPTION,
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description)
return parser
def do_run(self, args, unknown_args):
share = Path(__file__).parents[2] / 'share'
run_cmake_and_clean_up(share / 'zephyr-package' / 'cmake')
run_cmake_and_clean_up(share / 'zephyrunittest-package' / 'cmake')
def run_cmake_and_clean_up(path):
try:
lines = run_cmake(['-S', str(path), '-B', str(path)],
capture_output=True)
finally:
msg = [line for line in lines if not line.startswith('-- ')]
log.inf('\n'.join(msg))
clean_up(path)
def clean_up(path):
try:
run_cmake(['-P', str(path / 'pristine.cmake')],
capture_output=True)
except CalledProcessError:
log.wrn(f'Failed to make {path} pristine; '
'removing known generated files...')
for subpath in ['CMakeCache.txt', 'CMakeFiles', 'build.ninja',
'cmake_install.cmake', 'rules.ninja']:
remove_if_exists(Path(path) / subpath)
def remove_if_exists(pathobj):
if pathobj.is_file():
log.inf(f'- removing: {pathobj}')
pathobj.unlink()
elif pathobj.is_dir():
log.inf(f'- removing: {pathobj}')
rmtree(pathobj)
| true | true |
f7f8df1646228e276bbcb4fed58b38a6f7b1427f | 3,542 | py | Python | app/main.py | tum-ai/expingo-inpainting-service | 657f65316c179f85507350d55e4ab4ac429552a0 | [
"MIT"
] | null | null | null | app/main.py | tum-ai/expingo-inpainting-service | 657f65316c179f85507350d55e4ab4ac429552a0 | [
"MIT"
] | 3 | 2020-11-08T12:33:08.000Z | 2020-11-15T23:20:06.000Z | app/main.py | tum-ai/expingo-inpainting-service | 657f65316c179f85507350d55e4ab4ac429552a0 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import uvicorn
import tensorflow as tf
import neuralgym as ng
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from fastapi import FastAPI, UploadFile, File
from fastapi import HTTPException
from inpaint.inpainting_model import InpaintCAModel
class PaintRequest(BaseModel):
image: str
mask: str
FLAGS = ng.Config('inpaint.yml')
MODEL_DIR = "../model_logs/places2"
MODEL = InpaintCAModel()
app = FastAPI()
origins = [
"*"
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"]
)
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.post("/inpaint/")
async def create_upload_file(request: PaintRequest):
import base64
import io
from PIL import Image
image = request.image
mask = request.mask
image = image.split(",", 1)[1]
mask = mask.split(",", 1)[1]
base64_decoded_image = base64.b64decode(image)
image = Image.open(io.BytesIO(base64_decoded_image))
image = np.array(image)
base64_decoded_mask = base64.b64decode(mask)
mask = Image.open(io.BytesIO(base64_decoded_mask))
mask = np.array(mask)
# mask is always PNG, image might have only 3 dimensions.
mask = mask[:, :, :3]
if image.shape[2] == 4:
image = image[:, :, :3]
# Catch weird error that image is turned if format is jpg and upright
if image.shape[0] == mask.shape[1] and image.shape[1] == mask.shape[0]:
image = np.flip(np.transpose(image, (1, 0, 2)), axis=1)
if image.shape != mask.shape:
raise HTTPException(
status_code=400,
detail=f"Image and Mask have unequal shape. {image.shape} vs {mask.shape}")
# Image and Mask must be same dimension by now. Both have dimensions (x, y, 3)
h, w, _ = image.shape
grid = 8
image = image[:h // grid * grid, :w // grid * grid, :]
mask = mask[:h // grid * grid, :w // grid * grid, :]
print('Shape of image: {}'.format(image.shape))
image = np.expand_dims(image, 0)
mask = np.expand_dims(mask, 0)
print(image.shape)
print(mask.shape)
input_image = np.concatenate([image, mask], axis=2)
print(input_image.shape)
sess_config = tf.ConfigProto()
sess_config.gpu_options.allow_growth = True
with tf.Session(config=sess_config) as sess:
input_image = tf.constant(input_image, dtype=tf.float32)
output = MODEL.build_server_graph(FLAGS, input_image)
output = (output + 1.) * 127.5
output = tf.reverse(output, [-1])
output = tf.saturate_cast(output, tf.uint8)
# load pretrained model
vars_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
assign_ops = []
for var in vars_list:
vname = var.name
from_name = vname
var_value = tf.contrib.framework.load_variable(MODEL_DIR, from_name)
assign_ops.append(tf.assign(var, var_value))
sess.run(assign_ops)
print('Model loaded.')
result = sess.run(output)
cv2.imwrite("output.png", result[0])
tf.reset_default_graph()
#return FileResponse("output.png", media_type="image/png")
with open("output.png", "rb") as image_file:
image_string = "data:image/png;base64,{}".format(base64.b64encode(image_file.read()).decode())
return {
"image": image_string
}
if __name__ == '__main__':
uvicorn.run(app, host="0.0.0.0", port=8080)
| 28.796748 | 102 | 0.649068 | import numpy as np
import cv2
import uvicorn
import tensorflow as tf
import neuralgym as ng
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from fastapi import FastAPI, UploadFile, File
from fastapi import HTTPException
from inpaint.inpainting_model import InpaintCAModel
class PaintRequest(BaseModel):
image: str
mask: str
FLAGS = ng.Config('inpaint.yml')
MODEL_DIR = "../model_logs/places2"
MODEL = InpaintCAModel()
app = FastAPI()
origins = [
"*"
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"]
)
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.post("/inpaint/")
async def create_upload_file(request: PaintRequest):
import base64
import io
from PIL import Image
image = request.image
mask = request.mask
image = image.split(",", 1)[1]
mask = mask.split(",", 1)[1]
base64_decoded_image = base64.b64decode(image)
image = Image.open(io.BytesIO(base64_decoded_image))
image = np.array(image)
base64_decoded_mask = base64.b64decode(mask)
mask = Image.open(io.BytesIO(base64_decoded_mask))
mask = np.array(mask)
mask = mask[:, :, :3]
if image.shape[2] == 4:
image = image[:, :, :3]
if image.shape[0] == mask.shape[1] and image.shape[1] == mask.shape[0]:
image = np.flip(np.transpose(image, (1, 0, 2)), axis=1)
if image.shape != mask.shape:
raise HTTPException(
status_code=400,
detail=f"Image and Mask have unequal shape. {image.shape} vs {mask.shape}")
h, w, _ = image.shape
grid = 8
image = image[:h // grid * grid, :w // grid * grid, :]
mask = mask[:h // grid * grid, :w // grid * grid, :]
print('Shape of image: {}'.format(image.shape))
image = np.expand_dims(image, 0)
mask = np.expand_dims(mask, 0)
print(image.shape)
print(mask.shape)
input_image = np.concatenate([image, mask], axis=2)
print(input_image.shape)
sess_config = tf.ConfigProto()
sess_config.gpu_options.allow_growth = True
with tf.Session(config=sess_config) as sess:
input_image = tf.constant(input_image, dtype=tf.float32)
output = MODEL.build_server_graph(FLAGS, input_image)
output = (output + 1.) * 127.5
output = tf.reverse(output, [-1])
output = tf.saturate_cast(output, tf.uint8)
vars_list = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
assign_ops = []
for var in vars_list:
vname = var.name
from_name = vname
var_value = tf.contrib.framework.load_variable(MODEL_DIR, from_name)
assign_ops.append(tf.assign(var, var_value))
sess.run(assign_ops)
print('Model loaded.')
result = sess.run(output)
cv2.imwrite("output.png", result[0])
tf.reset_default_graph()
with open("output.png", "rb") as image_file:
image_string = "data:image/png;base64,{}".format(base64.b64encode(image_file.read()).decode())
return {
"image": image_string
}
if __name__ == '__main__':
uvicorn.run(app, host="0.0.0.0", port=8080)
| true | true |
f7f8e0c398f5f0c5fbab1bf4f63ac794b3017391 | 10,586 | py | Python | kinto/core/utils.py | swhgoon/kinto | 10001d44bb08e4fbc74da31a41a4eaa461e0fd7f | [
"Apache-2.0"
] | null | null | null | kinto/core/utils.py | swhgoon/kinto | 10001d44bb08e4fbc74da31a41a4eaa461e0fd7f | [
"Apache-2.0"
] | null | null | null | kinto/core/utils.py | swhgoon/kinto | 10001d44bb08e4fbc74da31a41a4eaa461e0fd7f | [
"Apache-2.0"
] | 1 | 2020-07-15T04:27:08.000Z | 2020-07-15T04:27:08.000Z | import ast
import hashlib
import hmac
import os
import re
import six
import time
from base64 import b64decode, b64encode
from binascii import hexlify
from six.moves.urllib import parse as urlparse
from enum import Enum
# ujson is not installable with pypy
try: # pragma: no cover
import ujson as json # NOQA
def json_serializer(v, **kw):
return json.dumps(v, escape_forward_slashes=False)
except ImportError: # pragma: no cover
import json # NOQA
json_serializer = json.dumps
try:
# Register psycopg2cffi as psycopg2
from psycopg2cffi import compat
except ImportError: # pragma: no cover
pass
else: # pragma: no cover
compat.register()
try:
import sqlalchemy
except ImportError: # pragma: no cover
sqlalchemy = None
from pyramid import httpexceptions
from pyramid.request import Request, apply_request_extensions
from pyramid.settings import aslist
from pyramid.view import render_view_to_response
from cornice import cors
from colander import null
def strip_whitespace(v):
"""Remove whitespace, newlines, and tabs from the beginning/end
of a string.
:param str v: the string to strip.
:rtype: str
"""
return v.strip(' \t\n\r') if v is not null else v
def msec_time():
"""Return current epoch time in milliseconds.
:rtype: int
"""
return int(time.time() * 1000.0) # floor
def classname(obj):
"""Get a classname from an object.
:rtype: str
"""
return obj.__class__.__name__.lower()
def merge_dicts(a, b):
"""Merge b into a recursively, without overwriting values.
:param dict a: the dict that will be altered with values of `b`.
:rtype: None
"""
for k, v in b.items():
if isinstance(v, dict):
merge_dicts(a.setdefault(k, {}), v)
else:
a.setdefault(k, v)
def random_bytes_hex(bytes_length):
"""Return a hexstring of bytes_length cryptographic-friendly random bytes.
:param integer bytes_length: number of random bytes.
:rtype: str
"""
return hexlify(os.urandom(bytes_length)).decode('utf-8')
def native_value(value):
"""Convert string value to native python values.
:param str value: value to interprete.
:returns: the value coerced to python type
"""
if isinstance(value, six.string_types):
if value.lower() in ['on', 'true', 'yes']:
value = True
elif value.lower() in ['off', 'false', 'no']:
value = False
try:
return ast.literal_eval(value)
except (ValueError, SyntaxError):
pass
return value
def read_env(key, value):
"""Read the setting key from environment variables.
:param key: the setting name
:param value: default value if undefined in environment
:returns: the value from environment, coerced to python type
"""
envkey = key.replace('.', '_').replace('-', '_').upper()
return native_value(os.getenv(envkey, value))
def encode64(content, encoding='utf-8'):
"""Encode some content in base64.
:rtype: str
"""
return b64encode(content.encode(encoding)).decode(encoding)
def decode64(encoded_content, encoding='utf-8'):
"""Decode some base64 encoded content.
:rtype: str
"""
return b64decode(encoded_content.encode(encoding)).decode(encoding)
def hmac_digest(secret, message, encoding='utf-8'):
"""Return hex digest of a message HMAC using secret"""
if isinstance(secret, six.text_type):
secret = secret.encode(encoding)
return hmac.new(secret,
message.encode(encoding),
hashlib.sha256).hexdigest()
def dict_subset(d, keys):
"""Return a dict with the specified keys"""
result = {}
for key in keys:
if '.' in key:
field, subfield = key.split('.', 1)
if isinstance(d.get(field), dict):
subvalue = dict_subset(d[field], [subfield])
result.setdefault(field, {}).update(subvalue)
elif field in d:
result[field] = d[field]
else:
if key in d:
result[key] = d[key]
return result
class COMPARISON(Enum):
LT = '<'
MIN = '>='
MAX = '<='
NOT = '!='
EQ = '=='
GT = '>'
IN = 'in'
EXCLUDE = 'exclude'
def reapply_cors(request, response):
"""Reapply cors headers to the new response with regards to the request.
We need to re-apply the CORS checks done by Cornice, in case we're
recreating the response from scratch.
"""
service = request.current_service
if service:
request.info['cors_checked'] = False
cors.apply_cors_post_request(service, request, response)
response = cors.ensure_origin(service, request, response)
else:
# No existing service is concerned, and Cornice is not implied.
origin = request.headers.get('Origin')
if origin:
settings = request.registry.settings
allowed_origins = set(aslist(settings['cors_origins']))
required_origins = {'*', decode_header(origin)}
if allowed_origins.intersection(required_origins):
origin = encode_header(origin)
response.headers['Access-Control-Allow-Origin'] = origin
# Import service here because kinto.core import utils
from kinto.core import Service
if Service.default_cors_headers:
headers = ','.join(Service.default_cors_headers)
response.headers['Access-Control-Expose-Headers'] = headers
return response
def current_service(request):
"""Return the Cornice service matching the specified request.
:returns: the service or None if unmatched.
:rtype: cornice.Service
"""
if request.matched_route:
services = request.registry.cornice_services
pattern = request.matched_route.pattern
try:
service = services[pattern]
except KeyError:
return None
else:
return service
def current_resource_name(request):
"""Return the name used when the kinto.core resource was registered along its
viewset.
:returns: the resource identifier.
:rtype: str
"""
service = current_service(request)
resource_name = service.viewset.get_name(service.resource)
return resource_name
def build_request(original, dict_obj):
"""
Transform a dict object into a ``pyramid.request.Request`` object.
It sets a ``parent`` attribute on the resulting request assigned with
the `original` request specified.
:param original: the original request.
:param dict_obj: a dict object with the sub-request specifications.
"""
api_prefix = '/%s' % original.upath_info.split('/')[1]
path = dict_obj['path']
if not path.startswith(api_prefix):
path = api_prefix + path
path = path.encode('utf-8')
method = dict_obj.get('method') or 'GET'
headers = dict(original.headers)
headers.update(**dict_obj.get('headers') or {})
# Body can have different length, do not use original header.
headers.pop('Content-Length', None)
payload = dict_obj.get('body') or ''
# Payload is always a dict (from ``BatchRequestSchema.body``).
# Send it as JSON for subrequests.
if isinstance(payload, dict):
headers['Content-Type'] = encode_header(
'application/json; charset=utf-8')
payload = json.dumps(payload)
if six.PY3: # pragma: no cover
path = path.decode('latin-1')
request = Request.blank(path=path,
headers=headers,
POST=payload,
method=method)
request.registry = original.registry
apply_request_extensions(request)
# This is used to distinguish subrequests from direct incoming requests.
# See :func:`kinto.core.initialization.setup_logging()`
request.parent = original
return request
def build_response(response, request):
"""
Transform a ``pyramid.response.Response`` object into a serializable dict.
:param response: a response object, returned by Pyramid.
:param request: the request that was used to get the response.
"""
dict_obj = {}
dict_obj['path'] = urlparse.unquote(request.path)
dict_obj['status'] = response.status_code
dict_obj['headers'] = dict(response.headers)
body = ''
if request.method != 'HEAD':
# XXX : Pyramid should not have built response body for HEAD!
try:
body = response.json
except ValueError:
body = response.body
dict_obj['body'] = body
return dict_obj
def follow_subrequest(request, subrequest, **kwargs):
"""Run a subrequest (e.g. batch), and follow the redirection if any.
:rtype: tuple
:returns: the reponse and the redirection request (or `subrequest`
if no redirection happened.)
"""
try:
try:
return request.invoke_subrequest(subrequest, **kwargs), subrequest
except Exception as e:
resp = render_view_to_response(e, subrequest)
if not resp or resp.status_code >= 500:
raise e
raise resp
except httpexceptions.HTTPRedirection as e:
new_location = e.headers['Location']
new_request = Request.blank(path=new_location,
headers=subrequest.headers,
POST=subrequest.body,
method=subrequest.method)
new_request.bound_data = subrequest.bound_data
new_request.parent = getattr(subrequest, 'parent', None)
return request.invoke_subrequest(new_request, **kwargs), new_request
def encode_header(value, encoding='utf-8'):
"""Make sure the value is of type ``str`` in both PY2 and PY3."""
value_type = type(value)
if value_type != str:
# Test for Python3
if value_type == six.binary_type: # pragma: no cover
value = value.decode(encoding)
# Test for Python2
elif value_type == six.text_type: # pragma: no cover
value = value.encode(encoding)
return value
def decode_header(value, encoding='utf-8'):
"""Make sure the header is an unicode string."""
if type(value) == six.binary_type:
value = value.decode(encoding)
return value
def strip_uri_prefix(path):
"""
Remove potential version prefix in URI.
"""
return re.sub(r'^(/v\d+)?', '', six.text_type(path))
| 29.162534 | 81 | 0.636501 | import ast
import hashlib
import hmac
import os
import re
import six
import time
from base64 import b64decode, b64encode
from binascii import hexlify
from six.moves.urllib import parse as urlparse
from enum import Enum
try:
import ujson as json
def json_serializer(v, **kw):
return json.dumps(v, escape_forward_slashes=False)
except ImportError:
import json
json_serializer = json.dumps
try:
from psycopg2cffi import compat
except ImportError:
pass
else:
compat.register()
try:
import sqlalchemy
except ImportError:
sqlalchemy = None
from pyramid import httpexceptions
from pyramid.request import Request, apply_request_extensions
from pyramid.settings import aslist
from pyramid.view import render_view_to_response
from cornice import cors
from colander import null
def strip_whitespace(v):
return v.strip(' \t\n\r') if v is not null else v
def msec_time():
return int(time.time() * 1000.0)
def classname(obj):
return obj.__class__.__name__.lower()
def merge_dicts(a, b):
for k, v in b.items():
if isinstance(v, dict):
merge_dicts(a.setdefault(k, {}), v)
else:
a.setdefault(k, v)
def random_bytes_hex(bytes_length):
return hexlify(os.urandom(bytes_length)).decode('utf-8')
def native_value(value):
if isinstance(value, six.string_types):
if value.lower() in ['on', 'true', 'yes']:
value = True
elif value.lower() in ['off', 'false', 'no']:
value = False
try:
return ast.literal_eval(value)
except (ValueError, SyntaxError):
pass
return value
def read_env(key, value):
envkey = key.replace('.', '_').replace('-', '_').upper()
return native_value(os.getenv(envkey, value))
def encode64(content, encoding='utf-8'):
return b64encode(content.encode(encoding)).decode(encoding)
def decode64(encoded_content, encoding='utf-8'):
return b64decode(encoded_content.encode(encoding)).decode(encoding)
def hmac_digest(secret, message, encoding='utf-8'):
if isinstance(secret, six.text_type):
secret = secret.encode(encoding)
return hmac.new(secret,
message.encode(encoding),
hashlib.sha256).hexdigest()
def dict_subset(d, keys):
result = {}
for key in keys:
if '.' in key:
field, subfield = key.split('.', 1)
if isinstance(d.get(field), dict):
subvalue = dict_subset(d[field], [subfield])
result.setdefault(field, {}).update(subvalue)
elif field in d:
result[field] = d[field]
else:
if key in d:
result[key] = d[key]
return result
class COMPARISON(Enum):
LT = '<'
MIN = '>='
MAX = '<='
NOT = '!='
EQ = '=='
GT = '>'
IN = 'in'
EXCLUDE = 'exclude'
def reapply_cors(request, response):
service = request.current_service
if service:
request.info['cors_checked'] = False
cors.apply_cors_post_request(service, request, response)
response = cors.ensure_origin(service, request, response)
else:
origin = request.headers.get('Origin')
if origin:
settings = request.registry.settings
allowed_origins = set(aslist(settings['cors_origins']))
required_origins = {'*', decode_header(origin)}
if allowed_origins.intersection(required_origins):
origin = encode_header(origin)
response.headers['Access-Control-Allow-Origin'] = origin
from kinto.core import Service
if Service.default_cors_headers:
headers = ','.join(Service.default_cors_headers)
response.headers['Access-Control-Expose-Headers'] = headers
return response
def current_service(request):
if request.matched_route:
services = request.registry.cornice_services
pattern = request.matched_route.pattern
try:
service = services[pattern]
except KeyError:
return None
else:
return service
def current_resource_name(request):
service = current_service(request)
resource_name = service.viewset.get_name(service.resource)
return resource_name
def build_request(original, dict_obj):
api_prefix = '/%s' % original.upath_info.split('/')[1]
path = dict_obj['path']
if not path.startswith(api_prefix):
path = api_prefix + path
path = path.encode('utf-8')
method = dict_obj.get('method') or 'GET'
headers = dict(original.headers)
headers.update(**dict_obj.get('headers') or {})
headers.pop('Content-Length', None)
payload = dict_obj.get('body') or ''
if isinstance(payload, dict):
headers['Content-Type'] = encode_header(
'application/json; charset=utf-8')
payload = json.dumps(payload)
if six.PY3:
path = path.decode('latin-1')
request = Request.blank(path=path,
headers=headers,
POST=payload,
method=method)
request.registry = original.registry
apply_request_extensions(request)
request.parent = original
return request
def build_response(response, request):
dict_obj = {}
dict_obj['path'] = urlparse.unquote(request.path)
dict_obj['status'] = response.status_code
dict_obj['headers'] = dict(response.headers)
body = ''
if request.method != 'HEAD':
try:
body = response.json
except ValueError:
body = response.body
dict_obj['body'] = body
return dict_obj
def follow_subrequest(request, subrequest, **kwargs):
try:
try:
return request.invoke_subrequest(subrequest, **kwargs), subrequest
except Exception as e:
resp = render_view_to_response(e, subrequest)
if not resp or resp.status_code >= 500:
raise e
raise resp
except httpexceptions.HTTPRedirection as e:
new_location = e.headers['Location']
new_request = Request.blank(path=new_location,
headers=subrequest.headers,
POST=subrequest.body,
method=subrequest.method)
new_request.bound_data = subrequest.bound_data
new_request.parent = getattr(subrequest, 'parent', None)
return request.invoke_subrequest(new_request, **kwargs), new_request
def encode_header(value, encoding='utf-8'):
value_type = type(value)
if value_type != str:
if value_type == six.binary_type:
value = value.decode(encoding)
elif value_type == six.text_type:
value = value.encode(encoding)
return value
def decode_header(value, encoding='utf-8'):
if type(value) == six.binary_type:
value = value.decode(encoding)
return value
def strip_uri_prefix(path):
return re.sub(r'^(/v\d+)?', '', six.text_type(path))
| true | true |
f7f8e150c8827dd48b3e52b4c49d30c6140d0fc8 | 4,652 | py | Python | google/cloud/security/scanner/scanners/forwarding_rule_scanner.py | pombredanne/forseti-security | 68a9a88243460065e00b6c131b3d9abd0331fb37 | [
"Apache-2.0"
] | 1 | 2018-03-26T08:15:21.000Z | 2018-03-26T08:15:21.000Z | google/cloud/security/scanner/scanners/forwarding_rule_scanner.py | pombredanne/forseti-security | 68a9a88243460065e00b6c131b3d9abd0331fb37 | [
"Apache-2.0"
] | null | null | null | google/cloud/security/scanner/scanners/forwarding_rule_scanner.py | pombredanne/forseti-security | 68a9a88243460065e00b6c131b3d9abd0331fb37 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Scanner for the Forwarding Rules rules engine."""
from google.cloud.security.common.util import log_util
from google.cloud.security.common.data_access import forwarding_rules_dao
from google.cloud.security.scanner.audit import forwarding_rule_rules_engine
from google.cloud.security.scanner.scanners import base_scanner
LOGGER = log_util.get_logger(__name__)
class ForwardingRuleScanner(base_scanner.BaseScanner):
"""Pipeline for forwarding rules from dao"""
def __init__(self, global_configs, scanner_configs, snapshot_timestamp,
rules):
"""Initialization.
Args:
global_configs (dict): Global configurations.
scanner_configs (dict): Scanner configurations.
snapshot_timestamp (str): Timestamp, formatted as YYYYMMDDTHHMMSSZ.
rules (str): Fully-qualified path and filename of the rules file.
"""
super(ForwardingRuleScanner, self).__init__(
global_configs,
scanner_configs,
snapshot_timestamp,
rules)
self.rules_engine = forwarding_rule_rules_engine.\
ForwardingRuleRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp)
self.rules_engine.build_rule_book(self.global_configs)
def run(self):
forwarding_rules = self._retrieve()
all_violations = self._find_violations(forwarding_rules)
self._output_results(all_violations)
@staticmethod
def _flatten_violations(violations):
"""Flatten RuleViolations into a dict for each RuleViolation member.
Args:
violations (list): The RuleViolations to flatten.
Yields:
dict: Iterator of RuleViolations as a dict per member.
"""
for violation in violations:
violation_data = {}
violation_data['violation_type'] = violation.violation_type
violation_data['target'] = violation.target
violation_data['load_balancing_scheme'] = \
violation.load_balancing_scheme
violation_data['port'] = violation.port
violation_data['port_range'] = violation.port_range
violation_data['ip_protocol'] = violation.ip_protocol
violation_data['ip_address'] = violation.ip_address
yield {
'resource_id': violation.resource_id,
'resource_type': violation.resource_type,
'rule_index': violation.rule_index,
'rule_name': violation.violation_type,
'violation_type': violation.violation_type,
'violation_data': violation_data,
}
def _output_results(self, all_violations):
"""Output results.
Args:
all_violations (list): All violations
"""
all_violations = self._flatten_violations(all_violations)
self._output_results_to_db(all_violations)
def _retrieve(self):
"""Runs the data collection.
Returns:
list: forwarding rule list.
"""
forwarding_rules = forwarding_rules_dao \
.ForwardingRulesDao(self.global_configs) \
.get_forwarding_rules(self.snapshot_timestamp)
return forwarding_rules
def _find_violations(self, forwarding_rules):
"""Find violations in forwarding rules.
Args:
forwarding_rules (list): Forwarding rule to find violations in
Returns:
list: A list of forwarding rule violations
"""
all_violations = []
LOGGER.info('Finding Forwarding Rule Violations...')
for forwarding_rule in forwarding_rules:
LOGGER.debug('%s', forwarding_rule)
violations = self.rules_engine.find_policy_violations(
forwarding_rule)
LOGGER.debug(violations)
if violations is not None:
all_violations.append(violations)
return all_violations
| 37.821138 | 79 | 0.663371 |
from google.cloud.security.common.util import log_util
from google.cloud.security.common.data_access import forwarding_rules_dao
from google.cloud.security.scanner.audit import forwarding_rule_rules_engine
from google.cloud.security.scanner.scanners import base_scanner
LOGGER = log_util.get_logger(__name__)
class ForwardingRuleScanner(base_scanner.BaseScanner):
def __init__(self, global_configs, scanner_configs, snapshot_timestamp,
rules):
super(ForwardingRuleScanner, self).__init__(
global_configs,
scanner_configs,
snapshot_timestamp,
rules)
self.rules_engine = forwarding_rule_rules_engine.\
ForwardingRuleRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp)
self.rules_engine.build_rule_book(self.global_configs)
def run(self):
forwarding_rules = self._retrieve()
all_violations = self._find_violations(forwarding_rules)
self._output_results(all_violations)
@staticmethod
def _flatten_violations(violations):
for violation in violations:
violation_data = {}
violation_data['violation_type'] = violation.violation_type
violation_data['target'] = violation.target
violation_data['load_balancing_scheme'] = \
violation.load_balancing_scheme
violation_data['port'] = violation.port
violation_data['port_range'] = violation.port_range
violation_data['ip_protocol'] = violation.ip_protocol
violation_data['ip_address'] = violation.ip_address
yield {
'resource_id': violation.resource_id,
'resource_type': violation.resource_type,
'rule_index': violation.rule_index,
'rule_name': violation.violation_type,
'violation_type': violation.violation_type,
'violation_data': violation_data,
}
def _output_results(self, all_violations):
all_violations = self._flatten_violations(all_violations)
self._output_results_to_db(all_violations)
def _retrieve(self):
forwarding_rules = forwarding_rules_dao \
.ForwardingRulesDao(self.global_configs) \
.get_forwarding_rules(self.snapshot_timestamp)
return forwarding_rules
def _find_violations(self, forwarding_rules):
all_violations = []
LOGGER.info('Finding Forwarding Rule Violations...')
for forwarding_rule in forwarding_rules:
LOGGER.debug('%s', forwarding_rule)
violations = self.rules_engine.find_policy_violations(
forwarding_rule)
LOGGER.debug(violations)
if violations is not None:
all_violations.append(violations)
return all_violations
| true | true |
f7f8e209d791db1e7d711c2c75fca6e125fce1a5 | 10,809 | py | Python | latent_programmer/models/relative_attention.py | GurcanDurukan/google-research | 85fcb6fedaeb9a91e6a27ac469773771fca41634 | [
"Apache-2.0"
] | 1 | 2021-09-30T22:00:31.000Z | 2021-09-30T22:00:31.000Z | latent_programmer/models/relative_attention.py | GurcanDurukan/google-research | 85fcb6fedaeb9a91e6a27ac469773771fca41634 | [
"Apache-2.0"
] | 6 | 2022-02-10T00:01:02.000Z | 2022-02-10T07:24:45.000Z | latent_programmer/models/relative_attention.py | LaudateCorpus1/google-research | f349d160966ae1e63755ee2c97481f59331da4b5 | [
"Apache-2.0"
] | 2 | 2021-09-03T13:42:04.000Z | 2021-09-14T21:20:51.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements relative self-attention."""
# pylint: disable=attribute-defined-outside-init,g-bare-generic
# pytype: disable=wrong-arg-count
# pytype: disable=wrong-keyword-args
# pytype: disable=attribute-error
import functools
from typing import Any, Callable, Iterable, Optional
from flax.linen import attention
from flax.linen import initializers
from flax.linen import linear
from flax.linen import module
from jax import lax
import jax.numpy as jnp
PRNGKey = Any
Shape = Iterable[int]
Dtype = Any
Array = Any
def make_relative_position_bucket(relative_position, causal=False,
num_buckets=32, max_distance=128):
"""Translate relative position to a bucket number for relative attention."""
# Adapted from Mesh Tensorflow:
# https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py
relative_buckets = 0
if causal:
num_buckets //= 2
relative_buckets += (relative_position > 0) * num_buckets
relative_position = jnp.abs(relative_position)
else:
relative_position = -jnp.clip(relative_position, a_max=0)
max_exact = num_buckets // 2
is_small = relative_position < max_exact
relative_position_if_large = max_exact + (
jnp.log(relative_position / max_exact) / jnp.log(max_distance / max_exact)
* (num_buckets - max_exact)
)
relative_position_if_large = jnp.clip(relative_position_if_large,
a_max=num_buckets - 1)
relative_buckets += jnp.where(is_small, relative_position,
relative_position_if_large)
return relative_buckets.astype(jnp.int32)
class RelativeMultiHeadDotProductAttention(module.Module):
"""Dot-product attention with relative positional encodings.
Attributes:
num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])
should be divisible by the number of heads.
dtype: the dtype of the computation (default: float32)
qkv_features: dimension of the key, query, and value.
out_features: dimension of the last projection
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rate: dropout rate
deterministic: if false, the attention weight is masked randomly
using dropout, whereas if true, the attention weights
are deterministic.
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
kernel_init: initializer for the kernel of the Dense layers.
bias_init: initializer for the bias of the Dense layers.
use_bias: bool: whether pointwise QKVO dense transforms use bias.
decode: whether to prepare and use an autoregressive cache.
causal: whether to only attend to past tokens.
num_relative_position_buckets: number of buckets for relative positions
for attention.
"""
num_heads: int
dtype: Dtype = jnp.float32
qkv_features: Optional[int] = None
out_features: Optional[int] = None
broadcast_dropout: bool = True
dropout_rate: float = 0.
deterministic: Optional[bool] = None
precision: Any = None
kernel_init: Callable[[PRNGKey, Shape, Dtype], Array] = (
linear.default_kernel_init)
bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.zeros
use_bias: bool = True
decode: bool = False
num_relative_position_buckets: int = 32
causal: bool = False
@module.compact
def __call__(self,
inputs_q,
inputs_kv,
mask = None,
deterministic = None):
"""Applies multi-head dot product attention on the input data.
Projects the inputs into multi-headed query, key, and value vectors,
applies dot-product attention and project the results to an output vector.
Args:
inputs_q: input queries of shape
`[batch_sizes..., length, features]`.
inputs_kv: key/values of shape
`[batch_sizes..., length, features]`.
mask: attention mask of shape
`[batch_sizes..., num_heads, query_length, key/value_length]`.
Attention weights are masked out if their corresponding mask value
is `False`.
deterministic: if false, the attention weight is masked randomly
using dropout, whereas if true, the attention weights
are deterministic.
Returns:
output of shape `[batch_sizes..., length, features]`.
"""
if self.dropout_rate > 0.: # Require `deterministic` only if using dropout.
deterministic = module.merge_param('deterministic', self.deterministic,
deterministic)
features = self.out_features or inputs_q.shape[-1]
qkv_features = self.qkv_features or inputs_q.shape[-1]
assert qkv_features % self.num_heads == 0, (
'Memory dimension must be divisible by number of heads.')
head_dim = qkv_features // self.num_heads
dense = functools.partial(linear.DenseGeneral,
axis=-1,
features=(self.num_heads, head_dim),
kernel_init=self.kernel_init,
bias_init=self.bias_init,
use_bias=self.use_bias,
precision=self.precision)
relative_attention_embed = linear.Embed(
num_embeddings=self.num_relative_position_buckets,
features=self.num_heads,
embedding_init=initializers.normal(stddev=1.0),
dtype=self.dtype)
# project inputs_q to multi-headed q/k/v
# dimensions are then [batch..., length, n_heads, n_features_per_head]
query, key, value = (dense(dtype=self.dtype, name='query')(inputs_q),
dense(dtype=self.dtype, name='key')(inputs_kv),
dense(dtype=self.dtype, name='value')(inputs_kv))
query_length = inputs_q.shape[-2]
key_length = inputs_kv.shape[-2]
context_position = jnp.arange(query_length, dtype=jnp.int32)[:, None]
memory_position = jnp.arange(key_length, dtype=jnp.int32)[None, :]
relative_position = memory_position - context_position
relative_position_bucket = make_relative_position_bucket(
relative_position,
causal=self.causal,
num_buckets=self.num_relative_position_buckets)
bias = relative_attention_embed(relative_position_bucket)
bias = bias.transpose((2, 0, 1))[None, :, :, :]
# During fast autoregressive decoding, we feed one position at a time,
# and cache the keys and values step by step.
if self.decode:
# detect if we're initializing by absence of existing cache data.
is_initialized = self.has_variable('cache', 'cached_key')
cached_key = self.variable('cache', 'cached_key',
jnp.zeros, key.shape, key.dtype)
cached_value = self.variable('cache', 'cached_value',
jnp.zeros, value.shape, value.dtype)
cache_index = self.variable('cache', 'cache_index',
lambda: jnp.array(0, dtype=jnp.int32))
if is_initialized:
*batch_dims, max_length, num_heads, depth_per_head = (
cached_key.value.shape)
# shape check of cached keys against query input
expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)
if expected_shape != query.shape:
raise ValueError('Autoregressive cache shape error, '
'expected query shape %s instead got %s.' %
(expected_shape, query.shape))
# update key, value caches with our new 1d spatial slices
cur_index = cache_index.value
indices = (0,) * len(batch_dims) + (cur_index, 0, 0)
key = lax.dynamic_update_slice(cached_key.value, key, indices)
value = lax.dynamic_update_slice(cached_value.value, value, indices)
cached_key.value = key
cached_value.value = value
cache_index.value = cache_index.value + 1
# causal mask for cached decoder self-attention:
# our single query position should only attend to those key
# positions that have already been generated and cached,
# not the remaining zero elements.
mask = attention.combine_masks(
mask,
jnp.broadcast_to(jnp.arange(max_length) <= cur_index,
tuple(batch_dims) + (1, 1, max_length)))
bias = lax.dynamic_slice(
bias,
(0, 0, cur_index, 0),
(1, self.num_heads, 1, max_length))
# Convert the boolean attention mask to an attention bias.
if mask is not None:
# attention mask in the form of attention bias
bias += lax.select(
mask > 0,
jnp.full(mask.shape, 0.).astype(self.dtype),
jnp.full(mask.shape, -1e10).astype(self.dtype))
dropout_rng = None
if not deterministic and self.dropout_rate > 0.:
dropout_rng = self.make_rng('dropout')
# apply attention
x = attention.dot_product_attention(
query,
key,
value,
bias=bias,
dropout_rng=dropout_rng,
dropout_rate=self.dropout_rate,
broadcast_dropout=self.broadcast_dropout,
deterministic=deterministic,
dtype=self.dtype,
precision=self.precision) # pytype: disable=wrong-keyword-args
# back to the original inputs dimensions
out = linear.DenseGeneral(features=features,
axis=(-2, -1),
kernel_init=self.kernel_init,
bias_init=self.bias_init,
use_bias=self.use_bias,
dtype=self.dtype,
precision=self.precision,
name='out')(x)
return out
class RelativeSelfAttention(RelativeMultiHeadDotProductAttention):
"""Self-attention special case."""
@module.compact
def __call__(self, inputs_q, mask = None,
deterministic = None):
return super().__call__(inputs_q, inputs_q, mask,
deterministic=deterministic)
| 41.413793 | 134 | 0.655472 |
import functools
from typing import Any, Callable, Iterable, Optional
from flax.linen import attention
from flax.linen import initializers
from flax.linen import linear
from flax.linen import module
from jax import lax
import jax.numpy as jnp
PRNGKey = Any
Shape = Iterable[int]
Dtype = Any
Array = Any
def make_relative_position_bucket(relative_position, causal=False,
num_buckets=32, max_distance=128):
relative_buckets = 0
if causal:
num_buckets //= 2
relative_buckets += (relative_position > 0) * num_buckets
relative_position = jnp.abs(relative_position)
else:
relative_position = -jnp.clip(relative_position, a_max=0)
max_exact = num_buckets // 2
is_small = relative_position < max_exact
relative_position_if_large = max_exact + (
jnp.log(relative_position / max_exact) / jnp.log(max_distance / max_exact)
* (num_buckets - max_exact)
)
relative_position_if_large = jnp.clip(relative_position_if_large,
a_max=num_buckets - 1)
relative_buckets += jnp.where(is_small, relative_position,
relative_position_if_large)
return relative_buckets.astype(jnp.int32)
class RelativeMultiHeadDotProductAttention(module.Module):
num_heads: int
dtype: Dtype = jnp.float32
qkv_features: Optional[int] = None
out_features: Optional[int] = None
broadcast_dropout: bool = True
dropout_rate: float = 0.
deterministic: Optional[bool] = None
precision: Any = None
kernel_init: Callable[[PRNGKey, Shape, Dtype], Array] = (
linear.default_kernel_init)
bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.zeros
use_bias: bool = True
decode: bool = False
num_relative_position_buckets: int = 32
causal: bool = False
@module.compact
def __call__(self,
inputs_q,
inputs_kv,
mask = None,
deterministic = None):
if self.dropout_rate > 0.:
deterministic = module.merge_param('deterministic', self.deterministic,
deterministic)
features = self.out_features or inputs_q.shape[-1]
qkv_features = self.qkv_features or inputs_q.shape[-1]
assert qkv_features % self.num_heads == 0, (
'Memory dimension must be divisible by number of heads.')
head_dim = qkv_features // self.num_heads
dense = functools.partial(linear.DenseGeneral,
axis=-1,
features=(self.num_heads, head_dim),
kernel_init=self.kernel_init,
bias_init=self.bias_init,
use_bias=self.use_bias,
precision=self.precision)
relative_attention_embed = linear.Embed(
num_embeddings=self.num_relative_position_buckets,
features=self.num_heads,
embedding_init=initializers.normal(stddev=1.0),
dtype=self.dtype)
query, key, value = (dense(dtype=self.dtype, name='query')(inputs_q),
dense(dtype=self.dtype, name='key')(inputs_kv),
dense(dtype=self.dtype, name='value')(inputs_kv))
query_length = inputs_q.shape[-2]
key_length = inputs_kv.shape[-2]
context_position = jnp.arange(query_length, dtype=jnp.int32)[:, None]
memory_position = jnp.arange(key_length, dtype=jnp.int32)[None, :]
relative_position = memory_position - context_position
relative_position_bucket = make_relative_position_bucket(
relative_position,
causal=self.causal,
num_buckets=self.num_relative_position_buckets)
bias = relative_attention_embed(relative_position_bucket)
bias = bias.transpose((2, 0, 1))[None, :, :, :]
if self.decode:
is_initialized = self.has_variable('cache', 'cached_key')
cached_key = self.variable('cache', 'cached_key',
jnp.zeros, key.shape, key.dtype)
cached_value = self.variable('cache', 'cached_value',
jnp.zeros, value.shape, value.dtype)
cache_index = self.variable('cache', 'cache_index',
lambda: jnp.array(0, dtype=jnp.int32))
if is_initialized:
*batch_dims, max_length, num_heads, depth_per_head = (
cached_key.value.shape)
# shape check of cached keys against query input
expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)
if expected_shape != query.shape:
raise ValueError('Autoregressive cache shape error, '
'expected query shape %s instead got %s.' %
(expected_shape, query.shape))
# update key, value caches with our new 1d spatial slices
cur_index = cache_index.value
indices = (0,) * len(batch_dims) + (cur_index, 0, 0)
key = lax.dynamic_update_slice(cached_key.value, key, indices)
value = lax.dynamic_update_slice(cached_value.value, value, indices)
cached_key.value = key
cached_value.value = value
cache_index.value = cache_index.value + 1
# causal mask for cached decoder self-attention:
# our single query position should only attend to those key
# positions that have already been generated and cached,
# not the remaining zero elements.
mask = attention.combine_masks(
mask,
jnp.broadcast_to(jnp.arange(max_length) <= cur_index,
tuple(batch_dims) + (1, 1, max_length)))
bias = lax.dynamic_slice(
bias,
(0, 0, cur_index, 0),
(1, self.num_heads, 1, max_length))
# Convert the boolean attention mask to an attention bias.
if mask is not None:
# attention mask in the form of attention bias
bias += lax.select(
mask > 0,
jnp.full(mask.shape, 0.).astype(self.dtype),
jnp.full(mask.shape, -1e10).astype(self.dtype))
dropout_rng = None
if not deterministic and self.dropout_rate > 0.:
dropout_rng = self.make_rng('dropout')
# apply attention
x = attention.dot_product_attention(
query,
key,
value,
bias=bias,
dropout_rng=dropout_rng,
dropout_rate=self.dropout_rate,
broadcast_dropout=self.broadcast_dropout,
deterministic=deterministic,
dtype=self.dtype,
precision=self.precision) # pytype: disable=wrong-keyword-args
# back to the original inputs dimensions
out = linear.DenseGeneral(features=features,
axis=(-2, -1),
kernel_init=self.kernel_init,
bias_init=self.bias_init,
use_bias=self.use_bias,
dtype=self.dtype,
precision=self.precision,
name='out')(x)
return out
class RelativeSelfAttention(RelativeMultiHeadDotProductAttention):
@module.compact
def __call__(self, inputs_q, mask = None,
deterministic = None):
return super().__call__(inputs_q, inputs_q, mask,
deterministic=deterministic)
| true | true |
f7f8e30f155d1aeea266bf1aeebc782ce4d04b0b | 7,432 | py | Python | sdk/python/pulumi_azure_nextgen/compute/latest/get_image.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/compute/latest/get_image.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/compute/latest/get_image.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetImageResult',
'AwaitableGetImageResult',
'get_image',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:compute:getImage'.""", DeprecationWarning)
@pulumi.output_type
class GetImageResult:
"""
The source user image virtual hard disk. The virtual hard disk will be copied before being attached to the virtual machine. If SourceImage is provided, the destination virtual hard drive must not exist.
"""
def __init__(__self__, extended_location=None, hyper_v_generation=None, id=None, location=None, name=None, provisioning_state=None, source_virtual_machine=None, storage_profile=None, tags=None, type=None):
if extended_location and not isinstance(extended_location, dict):
raise TypeError("Expected argument 'extended_location' to be a dict")
pulumi.set(__self__, "extended_location", extended_location)
if hyper_v_generation and not isinstance(hyper_v_generation, str):
raise TypeError("Expected argument 'hyper_v_generation' to be a str")
pulumi.set(__self__, "hyper_v_generation", hyper_v_generation)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source_virtual_machine and not isinstance(source_virtual_machine, dict):
raise TypeError("Expected argument 'source_virtual_machine' to be a dict")
pulumi.set(__self__, "source_virtual_machine", source_virtual_machine)
if storage_profile and not isinstance(storage_profile, dict):
raise TypeError("Expected argument 'storage_profile' to be a dict")
pulumi.set(__self__, "storage_profile", storage_profile)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:
"""
The extended location of the Image.
"""
return pulumi.get(self, "extended_location")
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> Optional[str]:
"""
Gets the HyperVGenerationType of the VirtualMachine created from the image
"""
return pulumi.get(self, "hyper_v_generation")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourceVirtualMachine")
def source_virtual_machine(self) -> Optional['outputs.SubResourceResponse']:
"""
The source virtual machine from which Image is created.
"""
return pulumi.get(self, "source_virtual_machine")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional['outputs.ImageStorageProfileResponse']:
"""
Specifies the storage settings for the virtual machine disks.
"""
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableGetImageResult(GetImageResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetImageResult(
extended_location=self.extended_location,
hyper_v_generation=self.hyper_v_generation,
id=self.id,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
source_virtual_machine=self.source_virtual_machine,
storage_profile=self.storage_profile,
tags=self.tags,
type=self.type)
def get_image(expand: Optional[str] = None,
image_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetImageResult:
"""
The source user image virtual hard disk. The virtual hard disk will be copied before being attached to the virtual machine. If SourceImage is provided, the destination virtual hard drive must not exist.
Latest API Version: 2020-12-01.
:param str expand: The expand expression to apply on the operation.
:param str image_name: The name of the image.
:param str resource_group_name: The name of the resource group.
"""
pulumi.log.warn("get_image is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:compute:getImage'.")
__args__ = dict()
__args__['expand'] = expand
__args__['imageName'] = image_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:compute/latest:getImage', __args__, opts=opts, typ=GetImageResult).value
return AwaitableGetImageResult(
extended_location=__ret__.extended_location,
hyper_v_generation=__ret__.hyper_v_generation,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
source_virtual_machine=__ret__.source_virtual_machine,
storage_profile=__ret__.storage_profile,
tags=__ret__.tags,
type=__ret__.type)
| 38.910995 | 209 | 0.66873 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetImageResult',
'AwaitableGetImageResult',
'get_image',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:compute:getImage'.""", DeprecationWarning)
@pulumi.output_type
class GetImageResult:
def __init__(__self__, extended_location=None, hyper_v_generation=None, id=None, location=None, name=None, provisioning_state=None, source_virtual_machine=None, storage_profile=None, tags=None, type=None):
if extended_location and not isinstance(extended_location, dict):
raise TypeError("Expected argument 'extended_location' to be a dict")
pulumi.set(__self__, "extended_location", extended_location)
if hyper_v_generation and not isinstance(hyper_v_generation, str):
raise TypeError("Expected argument 'hyper_v_generation' to be a str")
pulumi.set(__self__, "hyper_v_generation", hyper_v_generation)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source_virtual_machine and not isinstance(source_virtual_machine, dict):
raise TypeError("Expected argument 'source_virtual_machine' to be a dict")
pulumi.set(__self__, "source_virtual_machine", source_virtual_machine)
if storage_profile and not isinstance(storage_profile, dict):
raise TypeError("Expected argument 'storage_profile' to be a dict")
pulumi.set(__self__, "storage_profile", storage_profile)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:
return pulumi.get(self, "extended_location")
@property
@pulumi.getter(name="hyperVGeneration")
def hyper_v_generation(self) -> Optional[str]:
return pulumi.get(self, "hyper_v_generation")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourceVirtualMachine")
def source_virtual_machine(self) -> Optional['outputs.SubResourceResponse']:
return pulumi.get(self, "source_virtual_machine")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional['outputs.ImageStorageProfileResponse']:
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetImageResult(GetImageResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetImageResult(
extended_location=self.extended_location,
hyper_v_generation=self.hyper_v_generation,
id=self.id,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
source_virtual_machine=self.source_virtual_machine,
storage_profile=self.storage_profile,
tags=self.tags,
type=self.type)
def get_image(expand: Optional[str] = None,
image_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetImageResult:
pulumi.log.warn("get_image is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:compute:getImage'.")
__args__ = dict()
__args__['expand'] = expand
__args__['imageName'] = image_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:compute/latest:getImage', __args__, opts=opts, typ=GetImageResult).value
return AwaitableGetImageResult(
extended_location=__ret__.extended_location,
hyper_v_generation=__ret__.hyper_v_generation,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
source_virtual_machine=__ret__.source_virtual_machine,
storage_profile=__ret__.storage_profile,
tags=__ret__.tags,
type=__ret__.type)
| true | true |
f7f8e3c3e3f8a8fa75035d3ef14d861b7c2f6cf0 | 1,570 | py | Python | dmrg_helpers/extract/estimator_name.py | iglpdc/dmrg_helpers | df27ff06395c0a4779c2d2723d57524da55cc14a | [
"MIT"
] | 1 | 2019-09-26T13:06:04.000Z | 2019-09-26T13:06:04.000Z | dmrg_helpers/extract/estimator_name.py | iglpdc/dmrg_helpers | df27ff06395c0a4779c2d2723d57524da55cc14a | [
"MIT"
] | null | null | null | dmrg_helpers/extract/estimator_name.py | iglpdc/dmrg_helpers | df27ff06395c0a4779c2d2723d57524da55cc14a | [
"MIT"
] | null | null | null | '''A class for estimator names.
'''
from dmrg_helpers.extract.tuple_to_key import tuple_to_key
from sqlite3 import register_adapter, register_converter
class EstimatorName(object):
"""A class to store estimator names into the database.
You use this function to handle estimator names inside the databaseself.
Parameters
----------
operators: a tuple of strings.
The names of the several single-site operators that compose the
correlator.
"""
def __init__(self, operators):
super(EstimatorName, self).__init__()
self.operators = operators
def adapt_estimator_name(estimator_name):
'''Adapts the estimator name to the database format.
You use this function to introduce an estimator name into the database.
Parameters
----------
estimator_name: an EstimatorName.
The estimator name you want to adapt.
Returns
-------
a string in the format to be stored in the database.
'''
return tuple_to_key(estimator_name.operators)
def convert_estimator_name(s):
'''Converts back an entry of the database to an EstimatorName object.
You use this function when extracting an estimator name from the database.
Parameters
----------
s : a string
An estimator name as stored in the database.
Returns
-------
an EstimatorName object.
'''
operators = s.split(':')
return EstimatorName(operators)
register_adapter(EstimatorName, adapt_estimator_name)
register_converter('estimator_name', convert_estimator_name)
| 28.035714 | 78 | 0.696178 | from dmrg_helpers.extract.tuple_to_key import tuple_to_key
from sqlite3 import register_adapter, register_converter
class EstimatorName(object):
def __init__(self, operators):
super(EstimatorName, self).__init__()
self.operators = operators
def adapt_estimator_name(estimator_name):
return tuple_to_key(estimator_name.operators)
def convert_estimator_name(s):
operators = s.split(':')
return EstimatorName(operators)
register_adapter(EstimatorName, adapt_estimator_name)
register_converter('estimator_name', convert_estimator_name)
| true | true |
f7f8e40e478faeef4cc94983cb3df4e20d3c6a14 | 38 | py | Python | python/testData/refactoring/introduceVariable/leftQuoteSubstring.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/refactoring/introduceVariable/leftQuoteSubstring.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/refactoring/introduceVariable/leftQuoteSubstring.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | a = "hello"
print(a + " world" + "!")
| 12.666667 | 25 | 0.447368 | a = "hello"
print(a + " world" + "!")
| true | true |
f7f8e425bf5d7b764bc951d3653fe2c9c693bfa4 | 8,499 | py | Python | tests/originalTests/wizard_test.py | Sikiru98/jsonpickle-master | ebaf6ee73827535dba50c3f8cf53e7a3b52cb310 | [
"BSD-3-Clause"
] | 956 | 2015-06-24T15:04:55.000Z | 2022-03-30T06:25:04.000Z | scripts/external_libs/jsonpickle-2.0.0/tests/wizard_test.py | hjat2005/trex-core | 400f03c86c844a0096dff3f6b13e58a808aaefff | [
"Apache-2.0"
] | 782 | 2015-09-20T15:19:00.000Z | 2022-03-31T23:52:05.000Z | scripts/external_libs/jsonpickle-2.0.0/tests/wizard_test.py | hjat2005/trex-core | 400f03c86c844a0096dff3f6b13e58a808aaefff | [
"Apache-2.0"
] | 429 | 2015-06-27T19:34:21.000Z | 2022-03-23T11:02:51.000Z | """Wizard tests from petrounias.org
http://www.petrounias.org/articles/2014/09/16/pickling-python-collections-with-non-built-in-type-keys-and-cycles/
Includes functionality to assist with adding compatibility to jsonpickle.
"""
from __future__ import absolute_import, division, unicode_literals
import unittest
import collections
from jsonpickle import encode, decode
class World(object):
def __init__(self):
self.wizards = []
class Wizard(object):
def __init__(self, world, name):
self.name = name
self.spells = collections.OrderedDict()
world.wizards.append(self)
def __hash__(self):
return hash('Wizard %s' % self.name)
def __eq__(self, other):
for (ka, va), (kb, vb) in zip(self.spells.items(), other.spells.items()):
if ka.name != kb.name:
print('Wizards differ: %s != %s' % (ka.name, kb.name))
return False
for sa, sb in zip(va, vb):
if sa != sb:
print('Spells differ: %s != %s' % (sa.name, sb.name))
return False
return self.name == other.name
def __cmp__(self, other):
for (ka, va), (kb, vb) in zip(self.spells.items(), other.spells.items()):
cmp_name = cmp(ka.name, kb.name) # noqa: F821
if cmp_name != 0:
print('Wizards cmp: %s != %s' % (ka.name, kb.name))
return cmp_name
for sa, sb in zip(va, vb):
cmp_spell = cmp(sa, sb) # noqa: F821
if cmp_spell != 0:
print('Spells cmp: %s != %s' % (sa.name, sb.name))
return cmp_spell
return cmp(self.name, other.name) # noqa: F821
class Spell(object):
def __init__(self, caster, target, name):
self.caster = caster
self.target = target
self.name = name
try:
spells = caster.spells[target]
except KeyError:
spells = caster.spells[target] = []
spells.append(self)
def __hash__(self):
return hash(
'Spell %s by %s on %s' % (self.name, self.caster.name, self.target.name)
)
def __eq__(self, other):
return (
self.name == other.name
and self.caster.name == other.caster.name
and self.target.name == other.target.name
)
def __cmp__(self, other):
return (
cmp(self.name, other.name) # noqa: F821
or cmp(self.caster.name, other.caster.name) # noqa: F821
or cmp(self.target.name, other.target.name) # noqa: F821
) # noqa: F821
def hashsum(items):
return sum([hash(x) for x in items])
def compare_spells(a, b):
for (ka, va), (kb, vb) in zip(a.items(), b.items()):
if ka != kb:
print('Keys differ: %s != %s' % (ka, kb))
return False
return True
class MagicTestCase(unittest.TestCase):
def test_without_pickling(self):
world = World()
wizard_merlin = Wizard(world, 'Merlin')
wizard_morgana = Wizard(world, 'Morgana')
spell_a = Spell(wizard_merlin, wizard_morgana, 'magic-missile')
spell_b = Spell(wizard_merlin, wizard_merlin, 'stone-skin')
spell_c = Spell(wizard_morgana, wizard_merlin, 'geas')
self.assertEqual(wizard_merlin.spells[wizard_morgana][0], spell_a)
self.assertEqual(wizard_merlin.spells[wizard_merlin][0], spell_b)
self.assertEqual(wizard_morgana.spells[wizard_merlin][0], spell_c)
# Merlin has cast Magic Missile on Morgana, and Stone Skin on himself
self.assertEqual(wizard_merlin.spells[wizard_morgana][0].name, 'magic-missile')
self.assertEqual(wizard_merlin.spells[wizard_merlin][0].name, 'stone-skin')
# Morgana has cast Geas on Merlin
self.assertEqual(wizard_morgana.spells[wizard_merlin][0].name, 'geas')
# Merlin's first target was Morgana
merlin_spells = wizard_merlin.spells
merlin_spells_keys = list(merlin_spells.keys())
self.assertTrue(merlin_spells_keys[0] in wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[0], wizard_morgana)
# Merlin's second target was himself
self.assertTrue(merlin_spells_keys[1] in wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[1], wizard_merlin)
# Morgana's first target was Merlin
morgana_spells_keys = list(wizard_morgana.spells.keys())
self.assertTrue(morgana_spells_keys[0] in wizard_morgana.spells)
self.assertEqual(morgana_spells_keys[0], wizard_merlin)
# Merlin's first spell cast with himself as target is in the dictionary,
# first by looking up directly with Merlin's instance object...
self.assertEqual(wizard_merlin, wizard_merlin.spells[wizard_merlin][0].target)
# ...and then with the instance object directly from the dictionary keys
self.assertEqual(wizard_merlin, merlin_spells[merlin_spells_keys[1]][0].target)
# Ensure Merlin's object is unique...
self.assertEqual(id(wizard_merlin), id(merlin_spells_keys[1]))
# ...and consistently hashed
self.assertEqual(hash(wizard_merlin), hash(merlin_spells_keys[1]))
def test_with_pickling(self):
world = World()
wizard_merlin = Wizard(world, 'Merlin')
wizard_morgana = Wizard(world, 'Morgana')
wizard_morgana_prime = Wizard(world, 'Morgana')
self.assertEqual(wizard_morgana.__dict__, wizard_morgana_prime.__dict__)
spell_a = Spell(wizard_merlin, wizard_morgana, 'magic-missile')
spell_b = Spell(wizard_merlin, wizard_merlin, 'stone-skin')
spell_c = Spell(wizard_morgana, wizard_merlin, 'geas')
self.assertEqual(wizard_merlin.spells[wizard_morgana][0], spell_a)
self.assertEqual(wizard_merlin.spells[wizard_merlin][0], spell_b)
self.assertEqual(wizard_morgana.spells[wizard_merlin][0], spell_c)
flat_world = encode(world, keys=True)
u_world = decode(flat_world, keys=True)
u_wizard_merlin = u_world.wizards[0]
u_wizard_morgana = u_world.wizards[1]
morgana_spells_encoded = encode(wizard_morgana.spells, keys=True)
morgana_spells_decoded = decode(morgana_spells_encoded, keys=True)
self.assertEqual(wizard_morgana.spells, morgana_spells_decoded)
morgana_encoded = encode(wizard_morgana, keys=True)
morgana_decoded = decode(morgana_encoded, keys=True)
self.assertEqual(wizard_morgana, morgana_decoded)
self.assertEqual(hash(wizard_morgana), hash(morgana_decoded))
self.assertEqual(wizard_morgana.spells, morgana_decoded.spells)
# Merlin has cast Magic Missile on Morgana, and Stone Skin on himself
merlin_spells = u_wizard_merlin.spells
self.assertEqual(merlin_spells[u_wizard_morgana][0].name, 'magic-missile')
self.assertEqual(merlin_spells[u_wizard_merlin][0].name, 'stone-skin')
# Morgana has cast Geas on Merlin
self.assertEqual(u_wizard_morgana.spells[u_wizard_merlin][0].name, 'geas')
# Merlin's first target was Morgana
merlin_spells_keys = list(u_wizard_merlin.spells.keys())
self.assertTrue(merlin_spells_keys[0] in u_wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[0], u_wizard_morgana)
# Merlin's second target was himself
self.assertTrue(merlin_spells_keys[1] in u_wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[1], u_wizard_merlin)
# Morgana's first target was Merlin
morgana_spells_keys = list(u_wizard_morgana.spells.keys())
self.assertTrue(morgana_spells_keys[0] in u_wizard_morgana.spells)
self.assertEqual(morgana_spells_keys[0], u_wizard_merlin)
# Merlin's first spell cast with himself as target is in the dict.
# First try the lookup with Merlin's instance object
self.assertEqual(u_wizard_merlin, merlin_spells[u_wizard_merlin][0].target)
# Next try the lookup with the object from the dictionary keys.
self.assertEqual(
u_wizard_merlin, merlin_spells[merlin_spells_keys[1]][0].target
)
# Ensure Merlin's object is unique and consistently hashed.
self.assertEqual(id(u_wizard_merlin), id(merlin_spells_keys[1]))
self.assertEqual(hash(u_wizard_merlin), hash(merlin_spells_keys[1]))
if __name__ == '__main__':
unittest.main()
| 39.714953 | 113 | 0.65643 | from __future__ import absolute_import, division, unicode_literals
import unittest
import collections
from jsonpickle import encode, decode
class World(object):
def __init__(self):
self.wizards = []
class Wizard(object):
def __init__(self, world, name):
self.name = name
self.spells = collections.OrderedDict()
world.wizards.append(self)
def __hash__(self):
return hash('Wizard %s' % self.name)
def __eq__(self, other):
for (ka, va), (kb, vb) in zip(self.spells.items(), other.spells.items()):
if ka.name != kb.name:
print('Wizards differ: %s != %s' % (ka.name, kb.name))
return False
for sa, sb in zip(va, vb):
if sa != sb:
print('Spells differ: %s != %s' % (sa.name, sb.name))
return False
return self.name == other.name
def __cmp__(self, other):
for (ka, va), (kb, vb) in zip(self.spells.items(), other.spells.items()):
cmp_name = cmp(ka.name, kb.name)
if cmp_name != 0:
print('Wizards cmp: %s != %s' % (ka.name, kb.name))
return cmp_name
for sa, sb in zip(va, vb):
cmp_spell = cmp(sa, sb)
if cmp_spell != 0:
print('Spells cmp: %s != %s' % (sa.name, sb.name))
return cmp_spell
return cmp(self.name, other.name)
class Spell(object):
def __init__(self, caster, target, name):
self.caster = caster
self.target = target
self.name = name
try:
spells = caster.spells[target]
except KeyError:
spells = caster.spells[target] = []
spells.append(self)
def __hash__(self):
return hash(
'Spell %s by %s on %s' % (self.name, self.caster.name, self.target.name)
)
def __eq__(self, other):
return (
self.name == other.name
and self.caster.name == other.caster.name
and self.target.name == other.target.name
)
def __cmp__(self, other):
return (
cmp(self.name, other.name)
or cmp(self.caster.name, other.caster.name)
or cmp(self.target.name, other.target.name)
)
def hashsum(items):
return sum([hash(x) for x in items])
def compare_spells(a, b):
for (ka, va), (kb, vb) in zip(a.items(), b.items()):
if ka != kb:
print('Keys differ: %s != %s' % (ka, kb))
return False
return True
class MagicTestCase(unittest.TestCase):
def test_without_pickling(self):
world = World()
wizard_merlin = Wizard(world, 'Merlin')
wizard_morgana = Wizard(world, 'Morgana')
spell_a = Spell(wizard_merlin, wizard_morgana, 'magic-missile')
spell_b = Spell(wizard_merlin, wizard_merlin, 'stone-skin')
spell_c = Spell(wizard_morgana, wizard_merlin, 'geas')
self.assertEqual(wizard_merlin.spells[wizard_morgana][0], spell_a)
self.assertEqual(wizard_merlin.spells[wizard_merlin][0], spell_b)
self.assertEqual(wizard_morgana.spells[wizard_merlin][0], spell_c)
self.assertEqual(wizard_merlin.spells[wizard_morgana][0].name, 'magic-missile')
self.assertEqual(wizard_merlin.spells[wizard_merlin][0].name, 'stone-skin')
self.assertEqual(wizard_morgana.spells[wizard_merlin][0].name, 'geas')
merlin_spells = wizard_merlin.spells
merlin_spells_keys = list(merlin_spells.keys())
self.assertTrue(merlin_spells_keys[0] in wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[0], wizard_morgana)
# Merlin's second target was himself
self.assertTrue(merlin_spells_keys[1] in wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[1], wizard_merlin)
morgana_spells_keys = list(wizard_morgana.spells.keys())
self.assertTrue(morgana_spells_keys[0] in wizard_morgana.spells)
self.assertEqual(morgana_spells_keys[0], wizard_merlin)
# Merlin's first spell cast with himself as target is in the dictionary,
self.assertEqual(wizard_merlin, wizard_merlin.spells[wizard_merlin][0].target)
# ...and then with the instance object directly from the dictionary keys
self.assertEqual(wizard_merlin, merlin_spells[merlin_spells_keys[1]][0].target)
# Ensure Merlin's object is unique...
self.assertEqual(id(wizard_merlin), id(merlin_spells_keys[1]))
self.assertEqual(hash(wizard_merlin), hash(merlin_spells_keys[1]))
def test_with_pickling(self):
world = World()
wizard_merlin = Wizard(world, 'Merlin')
wizard_morgana = Wizard(world, 'Morgana')
wizard_morgana_prime = Wizard(world, 'Morgana')
self.assertEqual(wizard_morgana.__dict__, wizard_morgana_prime.__dict__)
spell_a = Spell(wizard_merlin, wizard_morgana, 'magic-missile')
spell_b = Spell(wizard_merlin, wizard_merlin, 'stone-skin')
spell_c = Spell(wizard_morgana, wizard_merlin, 'geas')
self.assertEqual(wizard_merlin.spells[wizard_morgana][0], spell_a)
self.assertEqual(wizard_merlin.spells[wizard_merlin][0], spell_b)
self.assertEqual(wizard_morgana.spells[wizard_merlin][0], spell_c)
flat_world = encode(world, keys=True)
u_world = decode(flat_world, keys=True)
u_wizard_merlin = u_world.wizards[0]
u_wizard_morgana = u_world.wizards[1]
morgana_spells_encoded = encode(wizard_morgana.spells, keys=True)
morgana_spells_decoded = decode(morgana_spells_encoded, keys=True)
self.assertEqual(wizard_morgana.spells, morgana_spells_decoded)
morgana_encoded = encode(wizard_morgana, keys=True)
morgana_decoded = decode(morgana_encoded, keys=True)
self.assertEqual(wizard_morgana, morgana_decoded)
self.assertEqual(hash(wizard_morgana), hash(morgana_decoded))
self.assertEqual(wizard_morgana.spells, morgana_decoded.spells)
merlin_spells = u_wizard_merlin.spells
self.assertEqual(merlin_spells[u_wizard_morgana][0].name, 'magic-missile')
self.assertEqual(merlin_spells[u_wizard_merlin][0].name, 'stone-skin')
self.assertEqual(u_wizard_morgana.spells[u_wizard_merlin][0].name, 'geas')
merlin_spells_keys = list(u_wizard_merlin.spells.keys())
self.assertTrue(merlin_spells_keys[0] in u_wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[0], u_wizard_morgana)
# Merlin's second target was himself
self.assertTrue(merlin_spells_keys[1] in u_wizard_merlin.spells)
self.assertEqual(merlin_spells_keys[1], u_wizard_merlin)
morgana_spells_keys = list(u_wizard_morgana.spells.keys())
self.assertTrue(morgana_spells_keys[0] in u_wizard_morgana.spells)
self.assertEqual(morgana_spells_keys[0], u_wizard_merlin)
# Merlin's first spell cast with himself as target is in the dict.
self.assertEqual(u_wizard_merlin, merlin_spells[u_wizard_merlin][0].target)
# Next try the lookup with the object from the dictionary keys.
self.assertEqual(
u_wizard_merlin, merlin_spells[merlin_spells_keys[1]][0].target
)
# Ensure Merlin's object is unique and consistently hashed.
self.assertEqual(id(u_wizard_merlin), id(merlin_spells_keys[1]))
self.assertEqual(hash(u_wizard_merlin), hash(merlin_spells_keys[1]))
if __name__ == '__main__':
unittest.main()
| true | true |
f7f8e441fd5ef08542f58f79c890f052003abc09 | 1,225 | py | Python | kpurawler.py | rendybjunior/kpurawler | 14d1693a59d64ce0ef6b4977af8b5ebfbc6b383e | [
"Apache-2.0"
] | 2 | 2019-04-04T15:18:14.000Z | 2019-04-19T10:41:02.000Z | kpurawler.py | rendybjunior/kpurawler | 14d1693a59d64ce0ef6b4977af8b5ebfbc6b383e | [
"Apache-2.0"
] | null | null | null | kpurawler.py | rendybjunior/kpurawler | 14d1693a59d64ce0ef6b4977af8b5ebfbc6b383e | [
"Apache-2.0"
] | null | null | null | import sys
import time
import scrapy
import json
class DctSpider(scrapy.Spider):
name = 'dct_crawler'
dapil_id = '4782' # ID Dapil Cianjur 2
current_ts = round(time.time() * 1000)
start_urls = ['https://infopemilu.kpu.go.id/pileg2019/pencalonan/pengajuan-calon/' + \
str(dapil_id) + '/calonDct.json?_=' + str(current_ts)]
def parse(self, response):
rows = json.loads(response.text)
for row in rows:
calon_id = row.get('id')
next_page = "https://silonpemilu.kpu.go.id/publik/calon/" + \
str(calon_id) + "/2" # 2 denotes 3rd file we download: track record
yield scrapy.Request(next_page, callback=self.save_pdf)
def save_pdf(self, response):
path = response.url.split('/')[-2] + ".pdf"
if response.status == 200:
download_size = len(response.body)
if download_size == 0:
self.logger.warn('Zero kb response on %s', path)
else:
self.logger.info('Saving PDF %s', path)
with open(path, 'wb') as f:
f.write(response.body)
else:
self.logger.warn('Not success response %s', response.status) | 37.121212 | 90 | 0.581224 | import sys
import time
import scrapy
import json
class DctSpider(scrapy.Spider):
name = 'dct_crawler'
dapil_id = '4782'
current_ts = round(time.time() * 1000)
start_urls = ['https://infopemilu.kpu.go.id/pileg2019/pencalonan/pengajuan-calon/' + \
str(dapil_id) + '/calonDct.json?_=' + str(current_ts)]
def parse(self, response):
rows = json.loads(response.text)
for row in rows:
calon_id = row.get('id')
next_page = "https://silonpemilu.kpu.go.id/publik/calon/" + \
str(calon_id) + "/2"
yield scrapy.Request(next_page, callback=self.save_pdf)
def save_pdf(self, response):
path = response.url.split('/')[-2] + ".pdf"
if response.status == 200:
download_size = len(response.body)
if download_size == 0:
self.logger.warn('Zero kb response on %s', path)
else:
self.logger.info('Saving PDF %s', path)
with open(path, 'wb') as f:
f.write(response.body)
else:
self.logger.warn('Not success response %s', response.status) | true | true |
f7f8e4c84a19ab19e4a8a596b55b41d235ea9f09 | 568 | py | Python | webapp/migrations/versions/e9fbe7694450_add_required_grants.py | ramboldio/steuerlotse | eeebb91be6c13435d621d105ffdb19e972c8f649 | [
"MIT"
] | null | null | null | webapp/migrations/versions/e9fbe7694450_add_required_grants.py | ramboldio/steuerlotse | eeebb91be6c13435d621d105ffdb19e972c8f649 | [
"MIT"
] | null | null | null | webapp/migrations/versions/e9fbe7694450_add_required_grants.py | ramboldio/steuerlotse | eeebb91be6c13435d621d105ffdb19e972c8f649 | [
"MIT"
] | null | null | null | """Add required grants
Revision ID: e9fbe7694450
Revises: c0b039d92792
Create Date: 2021-04-19 12:59:52.861502
"""
from alembic import op
import sqlalchemy as sa
from app import app
# revision identifiers, used by Alembic.
revision = 'e9fbe7694450'
down_revision = 'c0b039d92792'
branch_labels = None
depends_on = None
def upgrade():
if app.config['ENV'] in ('staging', 'production'):
op.execute("""
GRANT ALL ON TABLE "user" TO steuerlotse;
GRANT ALL ON SEQUENCE user_id_seq TO steuerlotse;
""")
def downgrade():
pass
| 18.322581 | 57 | 0.691901 | from alembic import op
import sqlalchemy as sa
from app import app
revision = 'e9fbe7694450'
down_revision = 'c0b039d92792'
branch_labels = None
depends_on = None
def upgrade():
if app.config['ENV'] in ('staging', 'production'):
op.execute("""
GRANT ALL ON TABLE "user" TO steuerlotse;
GRANT ALL ON SEQUENCE user_id_seq TO steuerlotse;
""")
def downgrade():
pass
| true | true |
f7f8e5da038078ff467546f516187ea82b512845 | 41 | py | Python | back/old/similar.py | npavlinov/OxfordHack2018 | 271d3b857cb43dd45cf7897b3316699d562c20c5 | [
"MIT"
] | null | null | null | back/old/similar.py | npavlinov/OxfordHack2018 | 271d3b857cb43dd45cf7897b3316699d562c20c5 | [
"MIT"
] | null | null | null | back/old/similar.py | npavlinov/OxfordHack2018 | 271d3b857cb43dd45cf7897b3316699d562c20c5 | [
"MIT"
] | null | null | null |
def similarity(a, b):
return a==b
| 6.833333 | 21 | 0.560976 |
def similarity(a, b):
return a==b
| true | true |
f7f8e63167f09b5a75d7932a285bf2185a61939f | 9,674 | py | Python | homeassistant/components/google.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | 1 | 2021-08-06T09:54:39.000Z | 2021-08-06T09:54:39.000Z | homeassistant/components/google.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/google.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | 1 | 2020-08-26T20:54:14.000Z | 2020-08-26T20:54:14.000Z | """
Support for Google - Calendar Event Devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/google/
NOTE TO OTHER DEVELOPERS: IF YOU ADD MORE SCOPES TO THE OAUTH THAN JUST
CALENDAR THEN USERS WILL NEED TO DELETE THEIR TOKEN_FILE. THEY WILL LOSE THEIR
REFRESH_TOKEN PIECE WHEN RE-AUTHENTICATING TO ADD MORE API ACCESS
IT'S BEST TO JUST HAVE SEPARATE OAUTH FOR DIFFERENT PIECES OF GOOGLE
"""
import logging
import os
import yaml
import voluptuous as vol
from voluptuous.error import Error as VoluptuousError
import homeassistant.helpers.config_validation as cv
import homeassistant.loader as loader
from homeassistant.setup import setup_component
from homeassistant.helpers import discovery
from homeassistant.helpers.entity import generate_entity_id
from homeassistant.helpers.event import track_time_change
from homeassistant.util import convert, dt
REQUIREMENTS = [
'google-api-python-client==1.6.2',
'oauth2client==4.0.0',
]
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'google'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_CLIENT_ID = 'client_id'
CONF_CLIENT_SECRET = 'client_secret'
CONF_TRACK_NEW = 'track_new_calendar'
CONF_CAL_ID = 'cal_id'
CONF_DEVICE_ID = 'device_id'
CONF_NAME = 'name'
CONF_ENTITIES = 'entities'
CONF_TRACK = 'track'
CONF_SEARCH = 'search'
CONF_OFFSET = 'offset'
DEFAULT_CONF_TRACK_NEW = True
DEFAULT_CONF_OFFSET = '!!'
NOTIFICATION_ID = 'google_calendar_notification'
NOTIFICATION_TITLE = 'Google Calendar Setup'
GROUP_NAME_ALL_CALENDARS = "Google Calendar Sensors"
SERVICE_SCAN_CALENDARS = 'scan_for_calendars'
SERVICE_FOUND_CALENDARS = 'found_calendar'
DATA_INDEX = 'google_calendars'
YAML_DEVICES = '{}_calendars.yaml'.format(DOMAIN)
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
TOKEN_FILE = '.{}.token'.format(DOMAIN)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_TRACK_NEW): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
_SINGLE_CALSEARCH_CONFIG = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Optional(CONF_TRACK): cv.boolean,
vol.Optional(CONF_SEARCH): vol.Any(cv.string, None),
vol.Optional(CONF_OFFSET): cv.string,
})
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_CAL_ID): cv.string,
vol.Required(CONF_ENTITIES, None):
vol.All(cv.ensure_list, [_SINGLE_CALSEARCH_CONFIG]),
}, extra=vol.ALLOW_EXTRA)
def do_authentication(hass, config):
"""Notify user of actions and authenticate.
Notify user of user_code and verification_url then poll
until we have an access token.
"""
from oauth2client.client import (
OAuth2WebServerFlow,
OAuth2DeviceCodeError,
FlowExchangeError
)
from oauth2client.file import Storage
oauth = OAuth2WebServerFlow(
config[CONF_CLIENT_ID],
config[CONF_CLIENT_SECRET],
'https://www.googleapis.com/auth/calendar.readonly',
'Home-Assistant.io',
)
persistent_notification = loader.get_component('persistent_notification')
try:
dev_flow = oauth.step1_get_device_and_user_codes()
except OAuth2DeviceCodeError as err:
persistent_notification.create(
hass, 'Error: {}<br />You will need to restart hass after fixing.'
''.format(err),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
persistent_notification.create(
hass, 'In order to authorize Home-Assistant to view your calendars '
'you must visit: <a href="{}" target="_blank">{}</a> and enter '
'code: {}'.format(dev_flow.verification_url,
dev_flow.verification_url,
dev_flow.user_code),
title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID
)
def step2_exchange(now):
"""Keep trying to validate the user_code until it expires."""
if now >= dt.as_local(dev_flow.user_code_expiry):
persistent_notification.create(
hass, 'Authenication code expired, please restart '
'Home-Assistant and try again',
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
listener()
try:
credentials = oauth.step2_exchange(device_flow_info=dev_flow)
except FlowExchangeError:
# not ready yet, call again
return
storage = Storage(hass.config.path(TOKEN_FILE))
storage.put(credentials)
do_setup(hass, config)
listener()
persistent_notification.create(
hass, 'We are all setup now. Check {} for calendars that have '
'been found'.format(YAML_DEVICES),
title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID)
listener = track_time_change(hass, step2_exchange,
second=range(0, 60, dev_flow.interval))
return True
def setup(hass, config):
"""Set up the Google platform."""
if DATA_INDEX not in hass.data:
hass.data[DATA_INDEX] = {}
conf = config.get(DOMAIN, {})
token_file = hass.config.path(TOKEN_FILE)
if not os.path.isfile(token_file):
do_authentication(hass, conf)
else:
do_setup(hass, conf)
return True
def setup_services(hass, track_new_found_calendars, calendar_service):
"""Set up the service listeners."""
def _found_calendar(call):
"""Check if we know about a calendar and generate PLATFORM_DISCOVER."""
calendar = get_calendar_info(hass, call.data)
if hass.data[DATA_INDEX].get(calendar[CONF_CAL_ID], None) is not None:
return
hass.data[DATA_INDEX].update({calendar[CONF_CAL_ID]: calendar})
update_config(
hass.config.path(YAML_DEVICES),
hass.data[DATA_INDEX][calendar[CONF_CAL_ID]]
)
discovery.load_platform(hass, 'calendar', DOMAIN,
hass.data[DATA_INDEX][calendar[CONF_CAL_ID]])
hass.services.register(
DOMAIN, SERVICE_FOUND_CALENDARS, _found_calendar,
None, schema=None)
def _scan_for_calendars(service):
"""Scan for new calendars."""
service = calendar_service.get()
cal_list = service.calendarList() # pylint: disable=no-member
calendars = cal_list.list().execute()['items']
for calendar in calendars:
calendar['track'] = track_new_found_calendars
hass.services.call(DOMAIN, SERVICE_FOUND_CALENDARS,
calendar)
hass.services.register(
DOMAIN, SERVICE_SCAN_CALENDARS,
_scan_for_calendars,
None, schema=None)
return True
def do_setup(hass, config):
"""Run the setup after we have everything configured."""
# Load calendars the user has configured
hass.data[DATA_INDEX] = load_config(hass.config.path(YAML_DEVICES))
calendar_service = GoogleCalendarService(hass.config.path(TOKEN_FILE))
track_new_found_calendars = convert(config.get(CONF_TRACK_NEW),
bool, DEFAULT_CONF_TRACK_NEW)
setup_services(hass, track_new_found_calendars, calendar_service)
# Ensure component is loaded
setup_component(hass, 'calendar', config)
for calendar in hass.data[DATA_INDEX].values():
discovery.load_platform(hass, 'calendar', DOMAIN, calendar)
# Look for any new calendars
hass.services.call(DOMAIN, SERVICE_SCAN_CALENDARS, None)
return True
class GoogleCalendarService(object):
"""Calendar service interface to Google."""
def __init__(self, token_file):
"""Init the Google Calendar service."""
self.token_file = token_file
def get(self):
"""Get the calendar service from the storage file token."""
import httplib2
from oauth2client.file import Storage
from googleapiclient import discovery as google_discovery
credentials = Storage(self.token_file).get()
http = credentials.authorize(httplib2.Http())
service = google_discovery.build(
'calendar', 'v3', http=http, cache_discovery=False)
return service
def get_calendar_info(hass, calendar):
"""Convert data from Google into DEVICE_SCHEMA."""
calendar_info = DEVICE_SCHEMA({
CONF_CAL_ID: calendar['id'],
CONF_ENTITIES: [{
CONF_TRACK: calendar['track'],
CONF_NAME: calendar['summary'],
CONF_DEVICE_ID: generate_entity_id(
'{}', calendar['summary'], hass=hass),
}]
})
return calendar_info
def load_config(path):
"""Load the google_calendar_devices.yaml."""
calendars = {}
try:
with open(path) as file:
data = yaml.load(file)
for calendar in data:
try:
calendars.update({calendar[CONF_CAL_ID]:
DEVICE_SCHEMA(calendar)})
except VoluptuousError as exception:
# keep going
_LOGGER.warning("Calendar Invalid Data: %s", exception)
except FileNotFoundError:
# When YAML file could not be loaded/did not contain a dict
return {}
return calendars
def update_config(path, calendar):
"""Write the google_calendar_devices.yaml."""
with open(path, 'a') as out:
out.write('\n')
yaml.dump([calendar], out, default_flow_style=False)
| 32.904762 | 79 | 0.666839 | import logging
import os
import yaml
import voluptuous as vol
from voluptuous.error import Error as VoluptuousError
import homeassistant.helpers.config_validation as cv
import homeassistant.loader as loader
from homeassistant.setup import setup_component
from homeassistant.helpers import discovery
from homeassistant.helpers.entity import generate_entity_id
from homeassistant.helpers.event import track_time_change
from homeassistant.util import convert, dt
REQUIREMENTS = [
'google-api-python-client==1.6.2',
'oauth2client==4.0.0',
]
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'google'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_CLIENT_ID = 'client_id'
CONF_CLIENT_SECRET = 'client_secret'
CONF_TRACK_NEW = 'track_new_calendar'
CONF_CAL_ID = 'cal_id'
CONF_DEVICE_ID = 'device_id'
CONF_NAME = 'name'
CONF_ENTITIES = 'entities'
CONF_TRACK = 'track'
CONF_SEARCH = 'search'
CONF_OFFSET = 'offset'
DEFAULT_CONF_TRACK_NEW = True
DEFAULT_CONF_OFFSET = '!!'
NOTIFICATION_ID = 'google_calendar_notification'
NOTIFICATION_TITLE = 'Google Calendar Setup'
GROUP_NAME_ALL_CALENDARS = "Google Calendar Sensors"
SERVICE_SCAN_CALENDARS = 'scan_for_calendars'
SERVICE_FOUND_CALENDARS = 'found_calendar'
DATA_INDEX = 'google_calendars'
YAML_DEVICES = '{}_calendars.yaml'.format(DOMAIN)
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
TOKEN_FILE = '.{}.token'.format(DOMAIN)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_TRACK_NEW): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
_SINGLE_CALSEARCH_CONFIG = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Optional(CONF_TRACK): cv.boolean,
vol.Optional(CONF_SEARCH): vol.Any(cv.string, None),
vol.Optional(CONF_OFFSET): cv.string,
})
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_CAL_ID): cv.string,
vol.Required(CONF_ENTITIES, None):
vol.All(cv.ensure_list, [_SINGLE_CALSEARCH_CONFIG]),
}, extra=vol.ALLOW_EXTRA)
def do_authentication(hass, config):
from oauth2client.client import (
OAuth2WebServerFlow,
OAuth2DeviceCodeError,
FlowExchangeError
)
from oauth2client.file import Storage
oauth = OAuth2WebServerFlow(
config[CONF_CLIENT_ID],
config[CONF_CLIENT_SECRET],
'https://www.googleapis.com/auth/calendar.readonly',
'Home-Assistant.io',
)
persistent_notification = loader.get_component('persistent_notification')
try:
dev_flow = oauth.step1_get_device_and_user_codes()
except OAuth2DeviceCodeError as err:
persistent_notification.create(
hass, 'Error: {}<br />You will need to restart hass after fixing.'
''.format(err),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
persistent_notification.create(
hass, 'In order to authorize Home-Assistant to view your calendars '
'you must visit: <a href="{}" target="_blank">{}</a> and enter '
'code: {}'.format(dev_flow.verification_url,
dev_flow.verification_url,
dev_flow.user_code),
title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID
)
def step2_exchange(now):
if now >= dt.as_local(dev_flow.user_code_expiry):
persistent_notification.create(
hass, 'Authenication code expired, please restart '
'Home-Assistant and try again',
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
listener()
try:
credentials = oauth.step2_exchange(device_flow_info=dev_flow)
except FlowExchangeError:
return
storage = Storage(hass.config.path(TOKEN_FILE))
storage.put(credentials)
do_setup(hass, config)
listener()
persistent_notification.create(
hass, 'We are all setup now. Check {} for calendars that have '
'been found'.format(YAML_DEVICES),
title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID)
listener = track_time_change(hass, step2_exchange,
second=range(0, 60, dev_flow.interval))
return True
def setup(hass, config):
if DATA_INDEX not in hass.data:
hass.data[DATA_INDEX] = {}
conf = config.get(DOMAIN, {})
token_file = hass.config.path(TOKEN_FILE)
if not os.path.isfile(token_file):
do_authentication(hass, conf)
else:
do_setup(hass, conf)
return True
def setup_services(hass, track_new_found_calendars, calendar_service):
def _found_calendar(call):
calendar = get_calendar_info(hass, call.data)
if hass.data[DATA_INDEX].get(calendar[CONF_CAL_ID], None) is not None:
return
hass.data[DATA_INDEX].update({calendar[CONF_CAL_ID]: calendar})
update_config(
hass.config.path(YAML_DEVICES),
hass.data[DATA_INDEX][calendar[CONF_CAL_ID]]
)
discovery.load_platform(hass, 'calendar', DOMAIN,
hass.data[DATA_INDEX][calendar[CONF_CAL_ID]])
hass.services.register(
DOMAIN, SERVICE_FOUND_CALENDARS, _found_calendar,
None, schema=None)
def _scan_for_calendars(service):
service = calendar_service.get()
cal_list = service.calendarList()
calendars = cal_list.list().execute()['items']
for calendar in calendars:
calendar['track'] = track_new_found_calendars
hass.services.call(DOMAIN, SERVICE_FOUND_CALENDARS,
calendar)
hass.services.register(
DOMAIN, SERVICE_SCAN_CALENDARS,
_scan_for_calendars,
None, schema=None)
return True
def do_setup(hass, config):
hass.data[DATA_INDEX] = load_config(hass.config.path(YAML_DEVICES))
calendar_service = GoogleCalendarService(hass.config.path(TOKEN_FILE))
track_new_found_calendars = convert(config.get(CONF_TRACK_NEW),
bool, DEFAULT_CONF_TRACK_NEW)
setup_services(hass, track_new_found_calendars, calendar_service)
setup_component(hass, 'calendar', config)
for calendar in hass.data[DATA_INDEX].values():
discovery.load_platform(hass, 'calendar', DOMAIN, calendar)
hass.services.call(DOMAIN, SERVICE_SCAN_CALENDARS, None)
return True
class GoogleCalendarService(object):
def __init__(self, token_file):
self.token_file = token_file
def get(self):
import httplib2
from oauth2client.file import Storage
from googleapiclient import discovery as google_discovery
credentials = Storage(self.token_file).get()
http = credentials.authorize(httplib2.Http())
service = google_discovery.build(
'calendar', 'v3', http=http, cache_discovery=False)
return service
def get_calendar_info(hass, calendar):
calendar_info = DEVICE_SCHEMA({
CONF_CAL_ID: calendar['id'],
CONF_ENTITIES: [{
CONF_TRACK: calendar['track'],
CONF_NAME: calendar['summary'],
CONF_DEVICE_ID: generate_entity_id(
'{}', calendar['summary'], hass=hass),
}]
})
return calendar_info
def load_config(path):
calendars = {}
try:
with open(path) as file:
data = yaml.load(file)
for calendar in data:
try:
calendars.update({calendar[CONF_CAL_ID]:
DEVICE_SCHEMA(calendar)})
except VoluptuousError as exception:
_LOGGER.warning("Calendar Invalid Data: %s", exception)
except FileNotFoundError:
return {}
return calendars
def update_config(path, calendar):
with open(path, 'a') as out:
out.write('\n')
yaml.dump([calendar], out, default_flow_style=False)
| true | true |
f7f8e661b1844ac4cfad83acdfcf206762e9ef77 | 37,604 | py | Python | lacusClient_p2pTest/app_implementation/restfulServer/flask-restful/tests/test_api.py | tavog96/distribuidosProyecto | 8aee06ca580389412809353ac312c417aa1163fa | [
"MIT"
] | 2 | 2020-01-18T15:07:37.000Z | 2020-01-18T15:07:38.000Z | lacusClient_p2pTest/app_implementation/restfulServer/flask-restful/tests/test_api.py | tavog96/distribuidosProyecto | 8aee06ca580389412809353ac312c417aa1163fa | [
"MIT"
] | null | null | null | lacusClient_p2pTest/app_implementation/restfulServer/flask-restful/tests/test_api.py | tavog96/distribuidosProyecto | 8aee06ca580389412809353ac312c417aa1163fa | [
"MIT"
] | null | null | null | import unittest
import json
from flask import Flask, Blueprint, redirect, views, abort as flask_abort
from flask.signals import got_request_exception, signals_available
try:
from mock import Mock
except:
# python3
from unittest.mock import Mock
import flask
import werkzeug
from werkzeug.exceptions import HTTPException, Unauthorized, BadRequest, NotFound, _aborter
from werkzeug.http import quote_etag, unquote_etag
from flask_restful.utils import http_status_message, unpack
import flask_restful
import flask_restful.fields
from flask_restful import OrderedDict
from json import dumps, loads, JSONEncoder
#noinspection PyUnresolvedReferences
from nose.tools import assert_equals, assert_true, assert_false # you need it for tests in form of continuations
import six
def check_unpack(expected, value):
assert_equals(expected, value)
def test_unpack():
yield check_unpack, ("hey", 200, {}), unpack("hey")
yield check_unpack, (("hey",), 200, {}), unpack(("hey",))
yield check_unpack, ("hey", 201, {}), unpack(("hey", 201))
yield check_unpack, ("hey", 201, "foo"), unpack(("hey", 201, "foo"))
yield check_unpack, (["hey", 201], 200, {}), unpack(["hey", 201])
# Add a dummy Resource to verify that the app is properly set.
class HelloWorld(flask_restful.Resource):
def get(self):
return {}
class BadMojoError(HTTPException):
pass
# Resource that always errors out
class HelloBomb(flask_restful.Resource):
def get(self):
raise BadMojoError("It burns..")
class APITestCase(unittest.TestCase):
def test_http_code(self):
self.assertEquals(http_status_message(200), 'OK')
self.assertEquals(http_status_message(404), 'Not Found')
def test_unauthorized_no_challenge_by_default(self):
app = Flask(__name__)
api = flask_restful.Api(app)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
assert_false('WWW-Authenticate' in response.headers)
def test_unauthorized(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
self.assertEquals(response.headers['WWW-Authenticate'],
'Basic realm="flask-restful"')
def test_unauthorized_custom_realm(self):
app = Flask(__name__)
app.config['HTTP_BASIC_AUTH_REALM'] = 'Foo'
api = flask_restful.Api(app, serve_challenge_on_401=True)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
self.assertEquals(response.headers['WWW-Authenticate'], 'Basic realm="Foo"')
def test_handle_error_401_sends_challege_default_realm(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
exception = HTTPException()
exception.code = 401
exception.data = {'foo': 'bar'}
with app.test_request_context('/foo'):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 401)
self.assertEquals(resp.headers['WWW-Authenticate'],
'Basic realm="flask-restful"')
def test_handle_error_401_sends_challege_configured_realm(self):
app = Flask(__name__)
app.config['HTTP_BASIC_AUTH_REALM'] = 'test-realm'
api = flask_restful.Api(app, serve_challenge_on_401=True)
with app.test_request_context('/foo'):
resp = api.handle_error(Unauthorized())
self.assertEquals(resp.status_code, 401)
self.assertEquals(resp.headers['WWW-Authenticate'],
'Basic realm="test-realm"')
def test_handle_error_does_not_swallow_exceptions(self):
app = Flask(__name__)
api = flask_restful.Api(app)
exception = BadRequest('x')
with app.test_request_context('/foo'):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 400)
self.assertEquals(resp.get_data(), b'{"message": "x"}\n')
def test_handle_error_does_not_swallow_custom_exceptions(self):
app = Flask(__name__)
errors = {'BadMojoError': {'status': 409, 'message': 'go away'}}
api = flask_restful.Api(app, errors=errors)
api.add_resource(HelloBomb, '/bomb')
app = app.test_client()
resp = app.get('/bomb')
self.assertEquals(resp.status_code, 409)
self.assertEquals(resp.content_type, api.default_mediatype)
resp_dict = json.loads(resp.data.decode())
self.assertEqual(resp_dict.get('status'), 409)
self.assertEqual(resp_dict.get('message'), 'go away')
def test_handle_error_does_not_swallow_abort_response(self):
class HelloBombAbort(flask_restful.Resource):
def get(self):
raise HTTPException(response=flask.make_response("{}", 403))
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloBombAbort, '/bomb')
app = app.test_client()
resp = app.get('/bomb')
resp_dict = json.loads(resp.data.decode())
self.assertEquals(resp.status_code, 403)
self.assertDictEqual(resp_dict, {})
def test_marshal(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
marshal_dict = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_dict, fields)
self.assertEquals(output, {'foo': 'bar'})
def test_marshal_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
marshal_dict = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_dict, fields, envelope='hey')
self.assertEquals(output, {'hey': {'foo': 'bar'}})
def test_marshal_decorator(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields)
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')])
self.assertEquals(try_me(), {'foo': 'bar'})
def test_marshal_decorator_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields, envelope='hey')
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')])
self.assertEquals(try_me(), {'hey': {'foo': 'bar'}})
def test_marshal_decorator_tuple(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields)
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')]), 200, {'X-test': 123}
self.assertEquals(try_me(), ({'foo': 'bar'}, 200, {'X-test': 123}))
def test_marshal_decorator_tuple_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields, envelope='hey')
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')]), 200, {'X-test': 123}
self.assertEquals(try_me(), ({'hey': {'foo': 'bar'}}, 200, {'X-test': 123}))
def test_marshal_field_decorator(self):
field = flask_restful.fields.Raw
@flask_restful.marshal_with_field(field)
def try_me():
return 'foo'
self.assertEquals(try_me(), 'foo')
def test_marshal_field_decorator_tuple(self):
field = flask_restful.fields.Raw
@flask_restful.marshal_with_field(field)
def try_me():
return 'foo', 200, {'X-test': 123}
self.assertEquals(('foo', 200, {'X-test': 123}), try_me())
def test_marshal_field(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw()})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_fields, fields)
self.assertEquals(output, {'foo': 'bar'})
def test_marshal_tuple(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal((marshal_fields,), fields)
self.assertEquals(output, [{'foo': 'bar'}])
def test_marshal_tuple_with_envelope(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal((marshal_fields,), fields, envelope='hey')
self.assertEquals(output, {'hey': [{'foo': 'bar'}]})
def test_marshal_nested(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested({
'fye': flask_restful.fields.String,
}))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'fye': 'fum'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', 'fum')]))])
self.assertEquals(output, expected)
def test_marshal_nested_with_non_null(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=False))
])
marshal_fields = [OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', None)])]
output = flask_restful.marshal(marshal_fields, fields)
expected = [OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', None)]))])]
self.assertEquals(output, expected)
def test_marshal_nested_with_null(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', None)])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', None)])
self.assertEquals(output, expected)
def test_allow_null_presents_data(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'blah': 'cool'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', 'cool')]))])
self.assertEquals(output, expected)
def test_marshal_nested_property(self):
class TestObject(object):
@property
def fee(self):
return {'blah': 'cool'}
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
obj = TestObject()
obj.foo = 'bar'
obj.bat = 'baz'
output = flask_restful.marshal([obj], fields)
expected = [OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', 'cool')]))])]
self.assertEquals(output, expected)
def test_marshal_list(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.String))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', ['fye', 'fum'])])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', (['fye', 'fum']))])
self.assertEquals(output, expected)
def test_marshal_list_of_nesteds(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.Nested({
'fye': flask_restful.fields.String
})))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'fye': 'fum'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', [OrderedDict([('fye', 'fum')])])])
self.assertEquals(output, expected)
def test_marshal_list_of_lists(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.List(
flask_restful.fields.String)))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', [['fye'], ['fum']])])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', [['fye'], ['fum']])])
self.assertEquals(output, expected)
def test_marshal_nested_dict(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('bar', OrderedDict([
('a', flask_restful.fields.Raw),
('b', flask_restful.fields.Raw),
])),
])
marshal_fields = OrderedDict([('foo', 'foo-val'), ('bar', 'bar-val'), ('bat', 'bat-val'),
('a', 1), ('b', 2), ('c', 3)])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'foo-val'), ('bar', OrderedDict([('a', 1), ('b', 2)]))])
self.assertEquals(output, expected)
def test_api_representation(self):
app = Mock()
api = flask_restful.Api(app)
@api.representation('foo')
def foo():
pass
self.assertEquals(api.representations['foo'], foo)
def test_api_base(self):
app = Mock()
app.configure_mock(**{'record.side_effect': AttributeError})
api = flask_restful.Api(app)
self.assertEquals(api.urls, {})
self.assertEquals(api.prefix, '')
self.assertEquals(api.default_mediatype, 'application/json')
def test_api_delayed_initialization(self):
app = Flask(__name__)
api = flask_restful.Api()
api.add_resource(HelloWorld, '/', endpoint="hello")
api.init_app(app)
with app.test_client() as client:
self.assertEquals(client.get('/').status_code, 200)
def test_api_prefix(self):
app = Mock()
app.configure_mock(**{'record.side_effect': AttributeError})
api = flask_restful.Api(app, prefix='/foo')
self.assertEquals(api.prefix, '/foo')
def test_handle_server_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
resp = api.handle_error(Exception())
self.assertEquals(resp.status_code, 500)
self.assertEquals(resp.data.decode(), dumps({
"message": "Internal Server Error"
}) + "\n")
def test_handle_error_with_code(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
exception = Exception()
exception.code = "Not an integer"
exception.data = {'foo': 'bar'}
with app.test_request_context("/foo"):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 500)
self.assertEquals(resp.data.decode(), dumps({"foo": "bar"}) + "\n")
def test_handle_auth(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
with app.test_request_context("/foo"):
resp = api.handle_error(Unauthorized())
self.assertEquals(resp.status_code, 401)
expected_data = dumps({'message': Unauthorized.description}) + "\n"
self.assertEquals(resp.data.decode(), expected_data)
self.assertTrue('WWW-Authenticate' in resp.headers)
def test_handle_api_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Test(flask_restful.Resource):
def get(self):
flask.abort(404)
api.add_resource(Test(), '/api', endpoint='api')
app = app.test_client()
resp = app.get("/api")
assert_equals(resp.status_code, 404)
assert_equals('application/json', resp.headers['Content-Type'])
data = loads(resp.data.decode())
assert_true('message' in data)
def test_handle_non_api_error(self):
app = Flask(__name__)
flask_restful.Api(app)
app = app.test_client()
resp = app.get("/foo")
self.assertEquals(resp.status_code, 404)
self.assertEquals('text/html', resp.headers['Content-Type'])
def test_non_api_error_404_catchall(self):
app = Flask(__name__)
api = flask_restful.Api(app, catch_all_404s=True)
app = app.test_client()
resp = app.get("/foo")
self.assertEquals(api.default_mediatype, resp.headers['Content-Type'])
def test_handle_error_signal(self):
if not signals_available:
# This test requires the blinker lib to run.
print("Can't test signals without signal support")
return
app = Flask(__name__)
api = flask_restful.Api(app)
exception = BadRequest()
recorded = []
def record(sender, exception):
recorded.append(exception)
got_request_exception.connect(record, app)
try:
with app.test_request_context("/foo"):
api.handle_error(exception)
self.assertEquals(len(recorded), 1)
self.assertTrue(exception is recorded[0])
finally:
got_request_exception.disconnect(record, app)
def test_handle_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
resp = api.handle_error(BadRequest())
self.assertEquals(resp.status_code, 400)
self.assertEquals(resp.data.decode(), dumps({
'message': BadRequest.description,
}) + "\n")
def test_error_router_falls_back_to_original(self):
"""Verify that if an exception occurs in the Flask-RESTful error handler,
the error_router will call the original flask error handler instead.
"""
app = Flask(__name__)
api = flask_restful.Api(app)
app.handle_exception = Mock()
api.handle_error = Mock(side_effect=Exception())
api._has_fr_route = Mock(return_value=True)
exception = Mock(spec=HTTPException)
with app.test_request_context('/foo'):
api.error_router(exception, app.handle_exception)
self.assertTrue(app.handle_exception.called_with(exception))
def test_media_types(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/json'
}):
self.assertEquals(api.mediatypes(), ['application/json'])
def test_media_types_method(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/xml; q=.5'
}):
self.assertEquals(api.mediatypes_method()(Mock()),
['application/xml', 'application/json'])
def test_media_types_q(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/json; q=1, application/xml; q=.5'
}):
self.assertEquals(api.mediatypes(),
['application/json', 'application/xml'])
def test_decorator(self):
def return_zero(func):
return 0
app = Mock(flask.Flask)
app.view_functions = {}
view = Mock()
api = flask_restful.Api(app)
api.decorators.append(return_zero)
api.output = Mock()
api.add_resource(view, '/foo', endpoint='bar')
app.add_url_rule.assert_called_with('/foo', view_func=0)
def test_add_resource_endpoint(self):
app = Mock()
app.view_functions = {}
view = Mock()
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(view, '/foo', endpoint='bar')
view.as_view.assert_called_with('bar')
def test_add_two_conflicting_resources_on_same_endpoint(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return 'foo1'
class Foo2(flask_restful.Resource):
def get(self):
return 'foo2'
api.add_resource(Foo1, '/foo', endpoint='bar')
self.assertRaises(ValueError, api.add_resource, Foo2, '/foo/toto', endpoint='bar')
def test_add_the_same_resource_on_same_endpoint(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return 'foo1'
api.add_resource(Foo1, '/foo', endpoint='bar')
api.add_resource(Foo1, '/foo/toto', endpoint='blah')
with app.test_client() as client:
foo1 = client.get('/foo')
self.assertEquals(foo1.data, b'"foo1"\n')
foo2 = client.get('/foo/toto')
self.assertEquals(foo2.data, b'"foo1"\n')
def test_add_resource(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(views.MethodView, '/foo')
app.add_url_rule.assert_called_with('/foo',
view_func=api.output())
def test_resource_decorator(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
@api.resource('/foo', endpoint='bar')
class Foo(flask_restful.Resource):
pass
app.add_url_rule.assert_called_with('/foo',
view_func=api.output())
def test_add_resource_kwargs(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(views.MethodView, '/foo', defaults={"bar": "baz"})
app.add_url_rule.assert_called_with('/foo',
view_func=api.output(),
defaults={"bar": "baz"})
def test_add_resource_forward_resource_class_parameters(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def __init__(self, *args, **kwargs):
self.one = args[0]
self.two = kwargs['secret_state']
def get(self):
return "{0} {1}".format(self.one, self.two)
api.add_resource(Foo, '/foo',
resource_class_args=('wonderful',),
resource_class_kwargs={'secret_state': 'slurm'})
with app.test_client() as client:
foo = client.get('/foo')
self.assertEquals(foo.data, b'"wonderful slurm"\n')
def test_output_unpack(self):
def make_empty_response():
return {'foo': 'bar'}
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
wrapper = api.output(make_empty_response)
resp = wrapper()
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode(), '{"foo": "bar"}\n')
def test_output_func(self):
def make_empty_response():
return flask.make_response('')
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
wrapper = api.output(make_empty_response)
resp = wrapper()
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode(), '')
def test_resource(self):
app = Flask(__name__)
resource = flask_restful.Resource()
resource.get = Mock()
with app.test_request_context("/foo"):
resource.dispatch_request()
def test_resource_resp(self):
app = Flask(__name__)
resource = flask_restful.Resource()
resource.get = Mock()
with app.test_request_context("/foo"):
resource.get.return_value = flask.make_response('')
resource.dispatch_request()
def test_resource_text_plain(self):
app = Flask(__name__)
def text(data, code, headers=None):
return flask.make_response(six.text_type(data))
class Foo(flask_restful.Resource):
representations = {
'text/plain': text,
}
def get(self):
return 'hello'
with app.test_request_context("/foo", headers={'Accept': 'text/plain'}):
resource = Foo()
resp = resource.dispatch_request()
self.assertEquals(resp.data.decode(), 'hello')
def test_resource_error(self):
app = Flask(__name__)
resource = flask_restful.Resource()
with app.test_request_context("/foo"):
self.assertRaises(AssertionError, lambda: resource.dispatch_request())
def test_resource_head(self):
app = Flask(__name__)
resource = flask_restful.Resource()
with app.test_request_context("/foo", method="HEAD"):
self.assertRaises(AssertionError, lambda: resource.dispatch_request())
def test_abort_data(self):
try:
flask_restful.abort(404, foo='bar')
assert False # We should never get here
except Exception as e:
self.assertEquals(e.data, {'foo': 'bar'})
def test_abort_no_data(self):
try:
flask_restful.abort(404)
assert False # We should never get here
except Exception as e:
self.assertEquals(False, hasattr(e, "data"))
def test_abort_custom_message(self):
try:
flask_restful.abort(404, message="no user")
assert False # We should never get here
except Exception as e:
assert_equals(e.data['message'], "no user")
def test_abort_type(self):
self.assertRaises(HTTPException, lambda: flask_restful.abort(404))
def test_endpoints(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>', endpoint="hello")
with app.test_request_context('/foo'):
self.assertFalse(api._has_fr_route())
with app.test_request_context('/ids/3'):
self.assertTrue(api._has_fr_route())
def test_url_for(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>')
with app.test_request_context('/foo'):
self.assertEqual(api.url_for(HelloWorld, id=123), '/ids/123')
def test_url_for_with_blueprint(self):
"""Verify that url_for works when an Api object is mounted on a
Blueprint.
"""
api_bp = Blueprint('api', __name__)
app = Flask(__name__)
api = flask_restful.Api(api_bp)
api.add_resource(HelloWorld, '/foo/<string:bar>')
app.register_blueprint(api_bp)
with app.test_request_context('/foo'):
self.assertEqual(api.url_for(HelloWorld, bar='baz'), '/foo/baz')
def test_fr_405(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>', endpoint="hello")
app = app.test_client()
resp = app.post('/ids/3')
self.assertEquals(resp.status_code, 405)
self.assertEquals(resp.content_type, api.default_mediatype)
# Allow can be of the form 'GET, PUT, POST'
allow = ', '.join(set(resp.headers.get_all('Allow')))
allow = set(method.strip() for method in allow.split(','))
self.assertEquals(allow,
{'HEAD', 'OPTIONS'}.union(HelloWorld.methods))
def test_exception_header_forwarded(self):
"""Test that HTTPException's headers are extended properly"""
app = Flask(__name__)
app.config['DEBUG'] = True
api = flask_restful.Api(app)
class NotModified(HTTPException):
code = 304
def __init__(self, etag, *args, **kwargs):
super(NotModified, self).__init__(*args, **kwargs)
self.etag = quote_etag(etag)
def get_headers(self, *args, **kwargs):
"""Get a list of headers."""
return [('ETag', self.etag)]
class Foo1(flask_restful.Resource):
def get(self):
flask_abort(304, etag='myETag')
api.add_resource(Foo1, '/foo')
_aborter.mapping.update({304: NotModified})
with app.test_client() as client:
foo = client.get('/foo')
self.assertEquals(foo.get_etag(),
unquote_etag(quote_etag('myETag')))
def test_exception_header_forwarding_doesnt_duplicate_headers(self):
"""Test that HTTPException's headers do not add a duplicate
Content-Length header
https://github.com/flask-restful/flask-restful/issues/534
"""
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context('/'):
r = api.handle_error(BadRequest())
self.assertEqual(len(r.headers.getlist('Content-Length')), 1)
def test_will_prettyprint_json_in_debug_mode(self):
app = Flask(__name__)
app.config['DEBUG'] = True
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return {'foo': 'bar', 'baz': 'asdf'}
api.add_resource(Foo1, '/foo', endpoint='bar')
with app.test_client() as client:
foo = client.get('/foo')
# Python's dictionaries have random order (as of "new" Pythons,
# anyway), so we can't verify the actual output here. We just
# assert that they're properly prettyprinted.
lines = foo.data.splitlines()
lines = [line.decode() for line in lines]
self.assertEquals("{", lines[0])
self.assertTrue(lines[1].startswith(' '))
self.assertTrue(lines[2].startswith(' '))
self.assertEquals("}", lines[3])
# Assert our trailing newline.
self.assertTrue(foo.data.endswith(b'\n'))
def test_read_json_settings_from_config(self):
class TestConfig(object):
RESTFUL_JSON = {'indent': 2,
'sort_keys': True,
'separators': (', ', ': ')}
app = Flask(__name__)
app.config.from_object(TestConfig)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def get(self):
return {'foo': 'bar', 'baz': 'qux'}
api.add_resource(Foo, '/foo')
with app.test_client() as client:
data = client.get('/foo').data
expected = b'{\n "baz": "qux", \n "foo": "bar"\n}\n'
self.assertEquals(data, expected)
def test_use_custom_jsonencoder(self):
class CabageEncoder(JSONEncoder):
def default(self, obj):
return 'cabbage'
class TestConfig(object):
RESTFUL_JSON = {'cls': CabageEncoder}
app = Flask(__name__)
app.config.from_object(TestConfig)
api = flask_restful.Api(app)
class Cabbage(flask_restful.Resource):
def get(self):
return {'frob': object()}
api.add_resource(Cabbage, '/cabbage')
with app.test_client() as client:
data = client.get('/cabbage').data
expected = b'{"frob": "cabbage"}\n'
self.assertEquals(data, expected)
def test_json_with_no_settings(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def get(self):
return {'foo': 'bar'}
api.add_resource(Foo, '/foo')
with app.test_client() as client:
data = client.get('/foo').data
expected = b'{"foo": "bar"}\n'
self.assertEquals(data, expected)
def test_redirect(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class FooResource(flask_restful.Resource):
def get(self):
return redirect('/')
api.add_resource(FooResource, '/api')
app = app.test_client()
resp = app.get('/api')
self.assertEquals(resp.status_code, 302)
self.assertEquals(resp.headers['Location'], 'http://localhost/')
def test_json_float_marshalled(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class FooResource(flask_restful.Resource):
fields = {'foo': flask_restful.fields.Float}
def get(self):
return flask_restful.marshal({"foo": 3.0}, self.fields)
api.add_resource(FooResource, '/api')
app = app.test_client()
resp = app.get('/api')
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode('utf-8'), '{"foo": 3.0}\n')
def test_custom_error_message(self):
errors = {
'FooError': {
'message': "api is foobar",
'status': 418,
}
}
class FooError(ValueError):
pass
app = Flask(__name__)
api = flask_restful.Api(app, errors=errors)
exception = FooError()
exception.code = 400
exception.data = {'message': 'FooError'}
with app.test_request_context("/foo"):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 418)
self.assertEqual(loads(resp.data.decode('utf8')), {"message": "api is foobar", "status": 418})
def test_calling_owns_endpoint_before_api_init(self):
api = flask_restful.Api()
try:
api.owns_endpoint('endpoint')
except AttributeError as ae:
self.fail(ae.message)
def test_selectively_apply_method_decorators(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = {'get': [upper_deco]}
def get(self):
return 'get test'
def post(self):
return 'post test'
app = Flask(__name__)
with app.test_request_context('/', method='POST'):
r = TestResource().dispatch_request()
assert r == 'post test'
with app.test_request_context('/', method='GET'):
r = TestResource().dispatch_request()
assert r == 'GET TEST'
def test_apply_all_method_decorators_if_not_mapping(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = [upper_deco]
def get(self):
return 'get test'
def post(self):
return 'post test'
app = Flask(__name__)
with app.test_request_context('/', method='POST'):
r = TestResource().dispatch_request()
assert r == 'POST TEST'
with app.test_request_context('/', method='GET'):
r = TestResource().dispatch_request()
assert r == 'GET TEST'
def test_decorators_only_applied_at_dispatch(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = [upper_deco]
def get(self):
return 'get test'
def post(self):
return 'post test'
r = TestResource()
assert r.get() == 'get test'
assert r.post() == 'post test'
if __name__ == '__main__':
unittest.main()
| 35.475472 | 113 | 0.588395 | import unittest
import json
from flask import Flask, Blueprint, redirect, views, abort as flask_abort
from flask.signals import got_request_exception, signals_available
try:
from mock import Mock
except:
from unittest.mock import Mock
import flask
import werkzeug
from werkzeug.exceptions import HTTPException, Unauthorized, BadRequest, NotFound, _aborter
from werkzeug.http import quote_etag, unquote_etag
from flask_restful.utils import http_status_message, unpack
import flask_restful
import flask_restful.fields
from flask_restful import OrderedDict
from json import dumps, loads, JSONEncoder
from nose.tools import assert_equals, assert_true, assert_false
import six
def check_unpack(expected, value):
assert_equals(expected, value)
def test_unpack():
yield check_unpack, ("hey", 200, {}), unpack("hey")
yield check_unpack, (("hey",), 200, {}), unpack(("hey",))
yield check_unpack, ("hey", 201, {}), unpack(("hey", 201))
yield check_unpack, ("hey", 201, "foo"), unpack(("hey", 201, "foo"))
yield check_unpack, (["hey", 201], 200, {}), unpack(["hey", 201])
class HelloWorld(flask_restful.Resource):
def get(self):
return {}
class BadMojoError(HTTPException):
pass
class HelloBomb(flask_restful.Resource):
def get(self):
raise BadMojoError("It burns..")
class APITestCase(unittest.TestCase):
def test_http_code(self):
self.assertEquals(http_status_message(200), 'OK')
self.assertEquals(http_status_message(404), 'Not Found')
def test_unauthorized_no_challenge_by_default(self):
app = Flask(__name__)
api = flask_restful.Api(app)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
assert_false('WWW-Authenticate' in response.headers)
def test_unauthorized(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
self.assertEquals(response.headers['WWW-Authenticate'],
'Basic realm="flask-restful"')
def test_unauthorized_custom_realm(self):
app = Flask(__name__)
app.config['HTTP_BASIC_AUTH_REALM'] = 'Foo'
api = flask_restful.Api(app, serve_challenge_on_401=True)
response = Mock()
response.headers = {}
with app.test_request_context('/foo'):
response = api.unauthorized(response)
self.assertEquals(response.headers['WWW-Authenticate'], 'Basic realm="Foo"')
def test_handle_error_401_sends_challege_default_realm(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
exception = HTTPException()
exception.code = 401
exception.data = {'foo': 'bar'}
with app.test_request_context('/foo'):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 401)
self.assertEquals(resp.headers['WWW-Authenticate'],
'Basic realm="flask-restful"')
def test_handle_error_401_sends_challege_configured_realm(self):
app = Flask(__name__)
app.config['HTTP_BASIC_AUTH_REALM'] = 'test-realm'
api = flask_restful.Api(app, serve_challenge_on_401=True)
with app.test_request_context('/foo'):
resp = api.handle_error(Unauthorized())
self.assertEquals(resp.status_code, 401)
self.assertEquals(resp.headers['WWW-Authenticate'],
'Basic realm="test-realm"')
def test_handle_error_does_not_swallow_exceptions(self):
app = Flask(__name__)
api = flask_restful.Api(app)
exception = BadRequest('x')
with app.test_request_context('/foo'):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 400)
self.assertEquals(resp.get_data(), b'{"message": "x"}\n')
def test_handle_error_does_not_swallow_custom_exceptions(self):
app = Flask(__name__)
errors = {'BadMojoError': {'status': 409, 'message': 'go away'}}
api = flask_restful.Api(app, errors=errors)
api.add_resource(HelloBomb, '/bomb')
app = app.test_client()
resp = app.get('/bomb')
self.assertEquals(resp.status_code, 409)
self.assertEquals(resp.content_type, api.default_mediatype)
resp_dict = json.loads(resp.data.decode())
self.assertEqual(resp_dict.get('status'), 409)
self.assertEqual(resp_dict.get('message'), 'go away')
def test_handle_error_does_not_swallow_abort_response(self):
class HelloBombAbort(flask_restful.Resource):
def get(self):
raise HTTPException(response=flask.make_response("{}", 403))
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloBombAbort, '/bomb')
app = app.test_client()
resp = app.get('/bomb')
resp_dict = json.loads(resp.data.decode())
self.assertEquals(resp.status_code, 403)
self.assertDictEqual(resp_dict, {})
def test_marshal(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
marshal_dict = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_dict, fields)
self.assertEquals(output, {'foo': 'bar'})
def test_marshal_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
marshal_dict = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_dict, fields, envelope='hey')
self.assertEquals(output, {'hey': {'foo': 'bar'}})
def test_marshal_decorator(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields)
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')])
self.assertEquals(try_me(), {'foo': 'bar'})
def test_marshal_decorator_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields, envelope='hey')
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')])
self.assertEquals(try_me(), {'hey': {'foo': 'bar'}})
def test_marshal_decorator_tuple(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields)
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')]), 200, {'X-test': 123}
self.assertEquals(try_me(), ({'foo': 'bar'}, 200, {'X-test': 123}))
def test_marshal_decorator_tuple_with_envelope(self):
fields = OrderedDict([('foo', flask_restful.fields.Raw)])
@flask_restful.marshal_with(fields, envelope='hey')
def try_me():
return OrderedDict([('foo', 'bar'), ('bat', 'baz')]), 200, {'X-test': 123}
self.assertEquals(try_me(), ({'hey': {'foo': 'bar'}}, 200, {'X-test': 123}))
def test_marshal_field_decorator(self):
field = flask_restful.fields.Raw
@flask_restful.marshal_with_field(field)
def try_me():
return 'foo'
self.assertEquals(try_me(), 'foo')
def test_marshal_field_decorator_tuple(self):
field = flask_restful.fields.Raw
@flask_restful.marshal_with_field(field)
def try_me():
return 'foo', 200, {'X-test': 123}
self.assertEquals(('foo', 200, {'X-test': 123}), try_me())
def test_marshal_field(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw()})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal(marshal_fields, fields)
self.assertEquals(output, {'foo': 'bar'})
def test_marshal_tuple(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal((marshal_fields,), fields)
self.assertEquals(output, [{'foo': 'bar'}])
def test_marshal_tuple_with_envelope(self):
fields = OrderedDict({'foo': flask_restful.fields.Raw})
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz')])
output = flask_restful.marshal((marshal_fields,), fields, envelope='hey')
self.assertEquals(output, {'hey': [{'foo': 'bar'}]})
def test_marshal_nested(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested({
'fye': flask_restful.fields.String,
}))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'fye': 'fum'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', 'fum')]))])
self.assertEquals(output, expected)
def test_marshal_nested_with_non_null(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=False))
])
marshal_fields = [OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', None)])]
output = flask_restful.marshal(marshal_fields, fields)
expected = [OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', None)]))])]
self.assertEquals(output, expected)
def test_marshal_nested_with_null(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', None)])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', None)])
self.assertEquals(output, expected)
def test_allow_null_presents_data(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'blah': 'cool'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', 'cool')]))])
self.assertEquals(output, expected)
def test_marshal_nested_property(self):
class TestObject(object):
@property
def fee(self):
return {'blah': 'cool'}
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.Nested(
OrderedDict([
('fye', flask_restful.fields.String),
('blah', flask_restful.fields.String)
]), allow_null=True))
])
obj = TestObject()
obj.foo = 'bar'
obj.bat = 'baz'
output = flask_restful.marshal([obj], fields)
expected = [OrderedDict([('foo', 'bar'), ('fee', OrderedDict([('fye', None), ('blah', 'cool')]))])]
self.assertEquals(output, expected)
def test_marshal_list(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.String))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', ['fye', 'fum'])])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', (['fye', 'fum']))])
self.assertEquals(output, expected)
def test_marshal_list_of_nesteds(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.Nested({
'fye': flask_restful.fields.String
})))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', {'fye': 'fum'})])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', [OrderedDict([('fye', 'fum')])])])
self.assertEquals(output, expected)
def test_marshal_list_of_lists(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('fee', flask_restful.fields.List(flask_restful.fields.List(
flask_restful.fields.String)))
])
marshal_fields = OrderedDict([('foo', 'bar'), ('bat', 'baz'), ('fee', [['fye'], ['fum']])])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'bar'), ('fee', [['fye'], ['fum']])])
self.assertEquals(output, expected)
def test_marshal_nested_dict(self):
fields = OrderedDict([
('foo', flask_restful.fields.Raw),
('bar', OrderedDict([
('a', flask_restful.fields.Raw),
('b', flask_restful.fields.Raw),
])),
])
marshal_fields = OrderedDict([('foo', 'foo-val'), ('bar', 'bar-val'), ('bat', 'bat-val'),
('a', 1), ('b', 2), ('c', 3)])
output = flask_restful.marshal(marshal_fields, fields)
expected = OrderedDict([('foo', 'foo-val'), ('bar', OrderedDict([('a', 1), ('b', 2)]))])
self.assertEquals(output, expected)
def test_api_representation(self):
app = Mock()
api = flask_restful.Api(app)
@api.representation('foo')
def foo():
pass
self.assertEquals(api.representations['foo'], foo)
def test_api_base(self):
app = Mock()
app.configure_mock(**{'record.side_effect': AttributeError})
api = flask_restful.Api(app)
self.assertEquals(api.urls, {})
self.assertEquals(api.prefix, '')
self.assertEquals(api.default_mediatype, 'application/json')
def test_api_delayed_initialization(self):
app = Flask(__name__)
api = flask_restful.Api()
api.add_resource(HelloWorld, '/', endpoint="hello")
api.init_app(app)
with app.test_client() as client:
self.assertEquals(client.get('/').status_code, 200)
def test_api_prefix(self):
app = Mock()
app.configure_mock(**{'record.side_effect': AttributeError})
api = flask_restful.Api(app, prefix='/foo')
self.assertEquals(api.prefix, '/foo')
def test_handle_server_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
resp = api.handle_error(Exception())
self.assertEquals(resp.status_code, 500)
self.assertEquals(resp.data.decode(), dumps({
"message": "Internal Server Error"
}) + "\n")
def test_handle_error_with_code(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
exception = Exception()
exception.code = "Not an integer"
exception.data = {'foo': 'bar'}
with app.test_request_context("/foo"):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 500)
self.assertEquals(resp.data.decode(), dumps({"foo": "bar"}) + "\n")
def test_handle_auth(self):
app = Flask(__name__)
api = flask_restful.Api(app, serve_challenge_on_401=True)
with app.test_request_context("/foo"):
resp = api.handle_error(Unauthorized())
self.assertEquals(resp.status_code, 401)
expected_data = dumps({'message': Unauthorized.description}) + "\n"
self.assertEquals(resp.data.decode(), expected_data)
self.assertTrue('WWW-Authenticate' in resp.headers)
def test_handle_api_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Test(flask_restful.Resource):
def get(self):
flask.abort(404)
api.add_resource(Test(), '/api', endpoint='api')
app = app.test_client()
resp = app.get("/api")
assert_equals(resp.status_code, 404)
assert_equals('application/json', resp.headers['Content-Type'])
data = loads(resp.data.decode())
assert_true('message' in data)
def test_handle_non_api_error(self):
app = Flask(__name__)
flask_restful.Api(app)
app = app.test_client()
resp = app.get("/foo")
self.assertEquals(resp.status_code, 404)
self.assertEquals('text/html', resp.headers['Content-Type'])
def test_non_api_error_404_catchall(self):
app = Flask(__name__)
api = flask_restful.Api(app, catch_all_404s=True)
app = app.test_client()
resp = app.get("/foo")
self.assertEquals(api.default_mediatype, resp.headers['Content-Type'])
def test_handle_error_signal(self):
if not signals_available:
print("Can't test signals without signal support")
return
app = Flask(__name__)
api = flask_restful.Api(app)
exception = BadRequest()
recorded = []
def record(sender, exception):
recorded.append(exception)
got_request_exception.connect(record, app)
try:
with app.test_request_context("/foo"):
api.handle_error(exception)
self.assertEquals(len(recorded), 1)
self.assertTrue(exception is recorded[0])
finally:
got_request_exception.disconnect(record, app)
def test_handle_error(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
resp = api.handle_error(BadRequest())
self.assertEquals(resp.status_code, 400)
self.assertEquals(resp.data.decode(), dumps({
'message': BadRequest.description,
}) + "\n")
def test_error_router_falls_back_to_original(self):
app = Flask(__name__)
api = flask_restful.Api(app)
app.handle_exception = Mock()
api.handle_error = Mock(side_effect=Exception())
api._has_fr_route = Mock(return_value=True)
exception = Mock(spec=HTTPException)
with app.test_request_context('/foo'):
api.error_router(exception, app.handle_exception)
self.assertTrue(app.handle_exception.called_with(exception))
def test_media_types(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/json'
}):
self.assertEquals(api.mediatypes(), ['application/json'])
def test_media_types_method(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/xml; q=.5'
}):
self.assertEquals(api.mediatypes_method()(Mock()),
['application/xml', 'application/json'])
def test_media_types_q(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo", headers={
'Accept': 'application/json; q=1, application/xml; q=.5'
}):
self.assertEquals(api.mediatypes(),
['application/json', 'application/xml'])
def test_decorator(self):
def return_zero(func):
return 0
app = Mock(flask.Flask)
app.view_functions = {}
view = Mock()
api = flask_restful.Api(app)
api.decorators.append(return_zero)
api.output = Mock()
api.add_resource(view, '/foo', endpoint='bar')
app.add_url_rule.assert_called_with('/foo', view_func=0)
def test_add_resource_endpoint(self):
app = Mock()
app.view_functions = {}
view = Mock()
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(view, '/foo', endpoint='bar')
view.as_view.assert_called_with('bar')
def test_add_two_conflicting_resources_on_same_endpoint(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return 'foo1'
class Foo2(flask_restful.Resource):
def get(self):
return 'foo2'
api.add_resource(Foo1, '/foo', endpoint='bar')
self.assertRaises(ValueError, api.add_resource, Foo2, '/foo/toto', endpoint='bar')
def test_add_the_same_resource_on_same_endpoint(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return 'foo1'
api.add_resource(Foo1, '/foo', endpoint='bar')
api.add_resource(Foo1, '/foo/toto', endpoint='blah')
with app.test_client() as client:
foo1 = client.get('/foo')
self.assertEquals(foo1.data, b'"foo1"\n')
foo2 = client.get('/foo/toto')
self.assertEquals(foo2.data, b'"foo1"\n')
def test_add_resource(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(views.MethodView, '/foo')
app.add_url_rule.assert_called_with('/foo',
view_func=api.output())
def test_resource_decorator(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
@api.resource('/foo', endpoint='bar')
class Foo(flask_restful.Resource):
pass
app.add_url_rule.assert_called_with('/foo',
view_func=api.output())
def test_add_resource_kwargs(self):
app = Mock(flask.Flask)
app.view_functions = {}
api = flask_restful.Api(app)
api.output = Mock()
api.add_resource(views.MethodView, '/foo', defaults={"bar": "baz"})
app.add_url_rule.assert_called_with('/foo',
view_func=api.output(),
defaults={"bar": "baz"})
def test_add_resource_forward_resource_class_parameters(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def __init__(self, *args, **kwargs):
self.one = args[0]
self.two = kwargs['secret_state']
def get(self):
return "{0} {1}".format(self.one, self.two)
api.add_resource(Foo, '/foo',
resource_class_args=('wonderful',),
resource_class_kwargs={'secret_state': 'slurm'})
with app.test_client() as client:
foo = client.get('/foo')
self.assertEquals(foo.data, b'"wonderful slurm"\n')
def test_output_unpack(self):
def make_empty_response():
return {'foo': 'bar'}
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
wrapper = api.output(make_empty_response)
resp = wrapper()
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode(), '{"foo": "bar"}\n')
def test_output_func(self):
def make_empty_response():
return flask.make_response('')
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context("/foo"):
wrapper = api.output(make_empty_response)
resp = wrapper()
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode(), '')
def test_resource(self):
app = Flask(__name__)
resource = flask_restful.Resource()
resource.get = Mock()
with app.test_request_context("/foo"):
resource.dispatch_request()
def test_resource_resp(self):
app = Flask(__name__)
resource = flask_restful.Resource()
resource.get = Mock()
with app.test_request_context("/foo"):
resource.get.return_value = flask.make_response('')
resource.dispatch_request()
def test_resource_text_plain(self):
app = Flask(__name__)
def text(data, code, headers=None):
return flask.make_response(six.text_type(data))
class Foo(flask_restful.Resource):
representations = {
'text/plain': text,
}
def get(self):
return 'hello'
with app.test_request_context("/foo", headers={'Accept': 'text/plain'}):
resource = Foo()
resp = resource.dispatch_request()
self.assertEquals(resp.data.decode(), 'hello')
def test_resource_error(self):
app = Flask(__name__)
resource = flask_restful.Resource()
with app.test_request_context("/foo"):
self.assertRaises(AssertionError, lambda: resource.dispatch_request())
def test_resource_head(self):
app = Flask(__name__)
resource = flask_restful.Resource()
with app.test_request_context("/foo", method="HEAD"):
self.assertRaises(AssertionError, lambda: resource.dispatch_request())
def test_abort_data(self):
try:
flask_restful.abort(404, foo='bar')
assert False # We should never get here
except Exception as e:
self.assertEquals(e.data, {'foo': 'bar'})
def test_abort_no_data(self):
try:
flask_restful.abort(404)
assert False # We should never get here
except Exception as e:
self.assertEquals(False, hasattr(e, "data"))
def test_abort_custom_message(self):
try:
flask_restful.abort(404, message="no user")
assert False # We should never get here
except Exception as e:
assert_equals(e.data['message'], "no user")
def test_abort_type(self):
self.assertRaises(HTTPException, lambda: flask_restful.abort(404))
def test_endpoints(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>', endpoint="hello")
with app.test_request_context('/foo'):
self.assertFalse(api._has_fr_route())
with app.test_request_context('/ids/3'):
self.assertTrue(api._has_fr_route())
def test_url_for(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>')
with app.test_request_context('/foo'):
self.assertEqual(api.url_for(HelloWorld, id=123), '/ids/123')
def test_url_for_with_blueprint(self):
api_bp = Blueprint('api', __name__)
app = Flask(__name__)
api = flask_restful.Api(api_bp)
api.add_resource(HelloWorld, '/foo/<string:bar>')
app.register_blueprint(api_bp)
with app.test_request_context('/foo'):
self.assertEqual(api.url_for(HelloWorld, bar='baz'), '/foo/baz')
def test_fr_405(self):
app = Flask(__name__)
api = flask_restful.Api(app)
api.add_resource(HelloWorld, '/ids/<int:id>', endpoint="hello")
app = app.test_client()
resp = app.post('/ids/3')
self.assertEquals(resp.status_code, 405)
self.assertEquals(resp.content_type, api.default_mediatype)
# Allow can be of the form 'GET, PUT, POST'
allow = ', '.join(set(resp.headers.get_all('Allow')))
allow = set(method.strip() for method in allow.split(','))
self.assertEquals(allow,
{'HEAD', 'OPTIONS'}.union(HelloWorld.methods))
def test_exception_header_forwarded(self):
app = Flask(__name__)
app.config['DEBUG'] = True
api = flask_restful.Api(app)
class NotModified(HTTPException):
code = 304
def __init__(self, etag, *args, **kwargs):
super(NotModified, self).__init__(*args, **kwargs)
self.etag = quote_etag(etag)
def get_headers(self, *args, **kwargs):
return [('ETag', self.etag)]
class Foo1(flask_restful.Resource):
def get(self):
flask_abort(304, etag='myETag')
api.add_resource(Foo1, '/foo')
_aborter.mapping.update({304: NotModified})
with app.test_client() as client:
foo = client.get('/foo')
self.assertEquals(foo.get_etag(),
unquote_etag(quote_etag('myETag')))
def test_exception_header_forwarding_doesnt_duplicate_headers(self):
app = Flask(__name__)
api = flask_restful.Api(app)
with app.test_request_context('/'):
r = api.handle_error(BadRequest())
self.assertEqual(len(r.headers.getlist('Content-Length')), 1)
def test_will_prettyprint_json_in_debug_mode(self):
app = Flask(__name__)
app.config['DEBUG'] = True
api = flask_restful.Api(app)
class Foo1(flask_restful.Resource):
def get(self):
return {'foo': 'bar', 'baz': 'asdf'}
api.add_resource(Foo1, '/foo', endpoint='bar')
with app.test_client() as client:
foo = client.get('/foo')
# Python's dictionaries have random order (as of "new" Pythons,
# assert that they're properly prettyprinted.
lines = foo.data.splitlines()
lines = [line.decode() for line in lines]
self.assertEquals("{", lines[0])
self.assertTrue(lines[1].startswith(' '))
self.assertTrue(lines[2].startswith(' '))
self.assertEquals("}", lines[3])
self.assertTrue(foo.data.endswith(b'\n'))
def test_read_json_settings_from_config(self):
class TestConfig(object):
RESTFUL_JSON = {'indent': 2,
'sort_keys': True,
'separators': (', ', ': ')}
app = Flask(__name__)
app.config.from_object(TestConfig)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def get(self):
return {'foo': 'bar', 'baz': 'qux'}
api.add_resource(Foo, '/foo')
with app.test_client() as client:
data = client.get('/foo').data
expected = b'{\n "baz": "qux", \n "foo": "bar"\n}\n'
self.assertEquals(data, expected)
def test_use_custom_jsonencoder(self):
class CabageEncoder(JSONEncoder):
def default(self, obj):
return 'cabbage'
class TestConfig(object):
RESTFUL_JSON = {'cls': CabageEncoder}
app = Flask(__name__)
app.config.from_object(TestConfig)
api = flask_restful.Api(app)
class Cabbage(flask_restful.Resource):
def get(self):
return {'frob': object()}
api.add_resource(Cabbage, '/cabbage')
with app.test_client() as client:
data = client.get('/cabbage').data
expected = b'{"frob": "cabbage"}\n'
self.assertEquals(data, expected)
def test_json_with_no_settings(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class Foo(flask_restful.Resource):
def get(self):
return {'foo': 'bar'}
api.add_resource(Foo, '/foo')
with app.test_client() as client:
data = client.get('/foo').data
expected = b'{"foo": "bar"}\n'
self.assertEquals(data, expected)
def test_redirect(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class FooResource(flask_restful.Resource):
def get(self):
return redirect('/')
api.add_resource(FooResource, '/api')
app = app.test_client()
resp = app.get('/api')
self.assertEquals(resp.status_code, 302)
self.assertEquals(resp.headers['Location'], 'http://localhost/')
def test_json_float_marshalled(self):
app = Flask(__name__)
api = flask_restful.Api(app)
class FooResource(flask_restful.Resource):
fields = {'foo': flask_restful.fields.Float}
def get(self):
return flask_restful.marshal({"foo": 3.0}, self.fields)
api.add_resource(FooResource, '/api')
app = app.test_client()
resp = app.get('/api')
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data.decode('utf-8'), '{"foo": 3.0}\n')
def test_custom_error_message(self):
errors = {
'FooError': {
'message': "api is foobar",
'status': 418,
}
}
class FooError(ValueError):
pass
app = Flask(__name__)
api = flask_restful.Api(app, errors=errors)
exception = FooError()
exception.code = 400
exception.data = {'message': 'FooError'}
with app.test_request_context("/foo"):
resp = api.handle_error(exception)
self.assertEquals(resp.status_code, 418)
self.assertEqual(loads(resp.data.decode('utf8')), {"message": "api is foobar", "status": 418})
def test_calling_owns_endpoint_before_api_init(self):
api = flask_restful.Api()
try:
api.owns_endpoint('endpoint')
except AttributeError as ae:
self.fail(ae.message)
def test_selectively_apply_method_decorators(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = {'get': [upper_deco]}
def get(self):
return 'get test'
def post(self):
return 'post test'
app = Flask(__name__)
with app.test_request_context('/', method='POST'):
r = TestResource().dispatch_request()
assert r == 'post test'
with app.test_request_context('/', method='GET'):
r = TestResource().dispatch_request()
assert r == 'GET TEST'
def test_apply_all_method_decorators_if_not_mapping(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = [upper_deco]
def get(self):
return 'get test'
def post(self):
return 'post test'
app = Flask(__name__)
with app.test_request_context('/', method='POST'):
r = TestResource().dispatch_request()
assert r == 'POST TEST'
with app.test_request_context('/', method='GET'):
r = TestResource().dispatch_request()
assert r == 'GET TEST'
def test_decorators_only_applied_at_dispatch(self):
def upper_deco(f):
def upper(*args, **kwargs):
return f(*args, **kwargs).upper()
return upper
class TestResource(flask_restful.Resource):
method_decorators = [upper_deco]
def get(self):
return 'get test'
def post(self):
return 'post test'
r = TestResource()
assert r.get() == 'get test'
assert r.post() == 'post test'
if __name__ == '__main__':
unittest.main()
| true | true |
f7f8e7146cfa372ab73c9e73a218cb539729f7df | 2,792 | py | Python | src/blog_app/blog_app.py | diogoosorio/blog | e0f9878d44354b17f4fc4a06942bc8e166558489 | [
"MIT"
] | null | null | null | src/blog_app/blog_app.py | diogoosorio/blog | e0f9878d44354b17f4fc4a06942bc8e166558489 | [
"MIT"
] | null | null | null | src/blog_app/blog_app.py | diogoosorio/blog | e0f9878d44354b17f4fc4a06942bc8e166558489 | [
"MIT"
] | null | null | null | import uuid
import re
from flask import Flask, redirect, render_template, g, abort, request, make_response
from flask_ink.ink import Ink
from flask_caching import Cache
from .settings import SETTINGS, CACHE_SETTINGS
from .repository import LocalRepository
from .parsers import BlogParser
from .pagination import BlogPagination
def build_app():
_app = Flask(__name__)
_app.config.update(SETTINGS)
_cache = Cache(_app, config=CACHE_SETTINGS)
Ink(_app)
return [_app, _cache]
app, cache = build_app() # pylint: disable=invalid-name
@app.before_request
def before_request():
content_dir = app.config['REPO_DIRECTORY']
parser = BlogParser()
g.repository = LocalRepository(content_dir, parser, cache, app.config['PAGESIZE'])
# pagination
page = request.args.get('page')
page = int(page) if page is not None and page.isdigit() else 1
g.page = page
@app.route('/')
def index():
return redirect('/blog', 301)
@cache.cached(timeout=1200)
@app.route('/blog/')
def blog():
template_variables = g.repository.getfiles('entries', g.page)
template_variables['pagination'] = BlogPagination(
page=g.page,
total=template_variables['total'],
per_page=app.config['PAGESIZE']
)
if not template_variables['entries']:
abort(404)
return render_template('blog.html', **template_variables)
@app.route('/blog/rss/')
@cache.cached(timeout=1200)
def rss():
template_variables = g.repository.getfiles('entries', g.page)
g.repository.pagesize = 1
last_entry = g.repository.getfiles('entries', 1)
last_entry = last_entry['entries'][0] if last_entry['entries'] else None
template_variables['uuid'] = uuid
template_variables['last_entry'] = last_entry
response = make_response(render_template('atom.xml', **template_variables))
response.headers['Content-Type'] = 'application/atom+xml'
return response
@app.errorhandler(404)
def page_not_found(_e):
path = request.path
legacy_match = re.match(r'^/blog/entry/([\w-]+)/?$', path, re.I)
if legacy_match:
slug = legacy_match.group(1)
entry = g.repository.getfile('entries', slug)
if entry:
return redirect("/blog/{0}".format(slug), 301)
return render_template('404.html', path=path), 404
@cache.memoize(timeout=3600)
@app.route(u'/blog/<post_name>')
def blog_detail(post_name):
entry = g.repository.getfile('entries', post_name)
if not entry:
abort(404)
template_variables = {
'entry': entry,
'title': entry['meta'].get('title'),
'description': entry['meta'].get('description')
}
return render_template('detail.html', **template_variables)
if __name__ == '__main__':
app.run(host=app.config['HOST'])
| 25.153153 | 86 | 0.681232 | import uuid
import re
from flask import Flask, redirect, render_template, g, abort, request, make_response
from flask_ink.ink import Ink
from flask_caching import Cache
from .settings import SETTINGS, CACHE_SETTINGS
from .repository import LocalRepository
from .parsers import BlogParser
from .pagination import BlogPagination
def build_app():
_app = Flask(__name__)
_app.config.update(SETTINGS)
_cache = Cache(_app, config=CACHE_SETTINGS)
Ink(_app)
return [_app, _cache]
app, cache = build_app()
@app.before_request
def before_request():
content_dir = app.config['REPO_DIRECTORY']
parser = BlogParser()
g.repository = LocalRepository(content_dir, parser, cache, app.config['PAGESIZE'])
page = request.args.get('page')
page = int(page) if page is not None and page.isdigit() else 1
g.page = page
@app.route('/')
def index():
return redirect('/blog', 301)
@cache.cached(timeout=1200)
@app.route('/blog/')
def blog():
template_variables = g.repository.getfiles('entries', g.page)
template_variables['pagination'] = BlogPagination(
page=g.page,
total=template_variables['total'],
per_page=app.config['PAGESIZE']
)
if not template_variables['entries']:
abort(404)
return render_template('blog.html', **template_variables)
@app.route('/blog/rss/')
@cache.cached(timeout=1200)
def rss():
template_variables = g.repository.getfiles('entries', g.page)
g.repository.pagesize = 1
last_entry = g.repository.getfiles('entries', 1)
last_entry = last_entry['entries'][0] if last_entry['entries'] else None
template_variables['uuid'] = uuid
template_variables['last_entry'] = last_entry
response = make_response(render_template('atom.xml', **template_variables))
response.headers['Content-Type'] = 'application/atom+xml'
return response
@app.errorhandler(404)
def page_not_found(_e):
path = request.path
legacy_match = re.match(r'^/blog/entry/([\w-]+)/?$', path, re.I)
if legacy_match:
slug = legacy_match.group(1)
entry = g.repository.getfile('entries', slug)
if entry:
return redirect("/blog/{0}".format(slug), 301)
return render_template('404.html', path=path), 404
@cache.memoize(timeout=3600)
@app.route(u'/blog/<post_name>')
def blog_detail(post_name):
entry = g.repository.getfile('entries', post_name)
if not entry:
abort(404)
template_variables = {
'entry': entry,
'title': entry['meta'].get('title'),
'description': entry['meta'].get('description')
}
return render_template('detail.html', **template_variables)
if __name__ == '__main__':
app.run(host=app.config['HOST'])
| true | true |
f7f8e777ca2a69e11f577cc6ce9cad4dff445c98 | 22,813 | py | Python | old/ms/openid/gaesession.py | jcnelson/syndicate | 4837265be3e0aa18cdf4ee50316dbfc2d1f06e5b | [
"Apache-2.0"
] | 16 | 2015-01-02T15:39:04.000Z | 2016-03-17T06:38:46.000Z | old/ms/openid/gaesession.py | jcnelson/syndicate | 4837265be3e0aa18cdf4ee50316dbfc2d1f06e5b | [
"Apache-2.0"
] | 37 | 2015-01-28T20:58:05.000Z | 2016-03-22T04:01:32.000Z | old/ms/openid/gaesession.py | jcnelson/syndicate | 4837265be3e0aa18cdf4ee50316dbfc2d1f06e5b | [
"Apache-2.0"
] | 8 | 2015-04-08T02:26:03.000Z | 2016-03-04T05:56:24.000Z | """
Copyright 2013 The Trustees of Princeton University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""A fast, lightweight, and secure session WSGI middleware for use with GAE."""
"""copied from https://raw.github.com/dound/gae-sessions/master/gaesessions/__init__.py"""
from Cookie import CookieError, SimpleCookie
from base64 import b64decode, b64encode
import datetime
import hashlib
import hmac
import logging
import pickle
import os
import threading
import time
from google.appengine.api import memcache
from google.appengine.ext import db
# Configurable cookie options
COOKIE_NAME_PREFIX = "DgU" # identifies a cookie as being one used by gae-sessions (so you can set cookies too)
COOKIE_PATH = "/"
DEFAULT_COOKIE_ONLY_THRESH = 10240 # 10KB: GAE only allows ~16000B in HTTP header - leave ~6KB for other info
DEFAULT_LIFETIME = datetime.timedelta(days=7)
DEFAULT_LIFETIME_SEC = 24 * 3600 * 7
# constants
SID_LEN = 43 # timestamp (10 chars) + underscore + md5 (32 hex chars)
SIG_LEN = 44 # base 64 encoded HMAC-SHA256
MAX_COOKIE_LEN = 4096
EXPIRE_COOKIE_FMT = ' %s=; expires=Wed, 01-Jan-1970 00:00:00 GMT; Path=' + COOKIE_PATH
COOKIE_FMT = ' ' + COOKIE_NAME_PREFIX + '%02d="%s"; %sPath=' + COOKIE_PATH + '; HttpOnly'
COOKIE_FMT_SECURE = COOKIE_FMT + '; Secure'
COOKIE_DATE_FMT = '%a, %d-%b-%Y %H:%M:%S GMT'
COOKIE_OVERHEAD = len(COOKIE_FMT % (0, '', '')) + len('expires=Xxx, xx XXX XXXX XX:XX:XX GMT; ') + 150 # 150=safety margin (e.g., in case browser uses 4000 instead of 4096)
MAX_DATA_PER_COOKIE = MAX_COOKIE_LEN - COOKIE_OVERHEAD
SESSION_COOKIE_KEY = "syndicatemsopenidsessioncookiekey"
_tls = threading.local()
def get_current_session():
"""Returns the session associated with the current request."""
ses = _tls.current_session
return _tls.current_session
def set_current_session(session):
"""Sets the session associated with the current request."""
_tls.current_session = session
def is_gaesessions_key(k):
return k.startswith(COOKIE_NAME_PREFIX)
class SessionModel(db.Model):
"""Contains session data. key_name is the session ID and pdump contains a
pickled dictionary which maps session variables to their values."""
pdump = db.BlobProperty()
class Session(object):
"""Manages loading, reading/writing key-value pairs, and saving of a session.
``sid`` - if set, then the session for that sid (if any) is loaded. Otherwise,
sid will be loaded from the HTTP_COOKIE (if any).
"""
DIRTY_BUT_DONT_PERSIST_TO_DB = 1
def __init__(self, sid=None, lifetime=DEFAULT_LIFETIME, no_datastore=False,
cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH, cookie_key=None):
self._accessed = False
self.sid = None
self.cookie_keys = []
self.cookie_data = None
self.data = {}
self.dirty = False # has the session been changed?
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.base_key = cookie_key
if sid:
self.__set_sid(sid, False)
self.data = None
else:
self.__read_cookie()
@staticmethod
def __compute_hmac(base_key, sid, text):
"""Computes the signature for text given base_key and sid."""
key = base_key + sid
return b64encode(hmac.new(key, text, hashlib.sha256).digest())
def __read_cookie(self):
"""Reads the HTTP Cookie and loads the sid and data from it (if any)."""
try:
# check the cookie to see if a session has been started
cookie = SimpleCookie(os.environ['HTTP_COOKIE'])
self.cookie_keys = filter(is_gaesessions_key, cookie.keys())
if not self.cookie_keys:
return # no session yet
self.cookie_keys.sort()
data = ''.join(cookie[k].value for k in self.cookie_keys)
i = SIG_LEN + SID_LEN
sig, sid, b64pdump = data[:SIG_LEN], data[SIG_LEN:i], data[i:]
pdump = b64decode(b64pdump)
actual_sig = Session.__compute_hmac(self.base_key, sid, pdump)
if sig == actual_sig:
self.__set_sid(sid, False)
# check for expiration and terminate the session if it has expired
if self.get_expiration() != 0 and time.time() > self.get_expiration():
return self.terminate()
if pdump:
self.data = self.__decode_data(pdump)
else:
self.data = None # data is in memcache/db: load it on-demand
else:
logging.warn('cookie with invalid sig received from %s: %s' % (os.environ.get('REMOTE_ADDR'), b64pdump))
except (CookieError, KeyError, IndexError, TypeError):
# there is no cookie (i.e., no session) or the cookie is invalid
self.terminate(False)
def make_cookie_headers(self):
"""Returns a list of cookie headers to send (if any)."""
# expire all cookies if the session has ended
if not self.sid:
return [EXPIRE_COOKIE_FMT % k for k in self.cookie_keys]
if self.cookie_data is None:
return [] # no cookie headers need to be sent
# build the cookie header(s): includes sig, sid, and cookie_data
if self.is_ssl_only():
m = MAX_DATA_PER_COOKIE - 8
fmt = COOKIE_FMT_SECURE
else:
m = MAX_DATA_PER_COOKIE
fmt = COOKIE_FMT
sig = Session.__compute_hmac(self.base_key, self.sid, self.cookie_data)
cv = sig + self.sid + b64encode(self.cookie_data)
num_cookies = 1 + (len(cv) - 1) / m
if self.get_expiration() > 0:
ed = "expires=%s; " % datetime.datetime.fromtimestamp(self.get_expiration()).strftime(COOKIE_DATE_FMT)
else:
ed = ''
cookies = [fmt % (i, cv[i * m:i * m + m], ed) for i in xrange(num_cookies)]
# expire old cookies which aren't needed anymore
old_cookies = xrange(num_cookies, len(self.cookie_keys))
key = COOKIE_NAME_PREFIX + '%02d'
cookies_to_ax = [EXPIRE_COOKIE_FMT % (key % i) for i in old_cookies]
return cookies + cookies_to_ax
def is_active(self):
"""Returns True if this session is active (i.e., it has been assigned a
session ID and will be or has been persisted)."""
return self.sid is not None
def is_ssl_only(self):
"""Returns True if cookies set by this session will include the "Secure"
attribute so that the client will only send them over a secure channel
like SSL)."""
return self.sid is not None and self.sid[-33] == 'S'
def is_accessed(self):
"""Returns True if any value of this session has been accessed."""
return self._accessed
def ensure_data_loaded(self):
"""Fetch the session data if it hasn't been retrieved it yet."""
self._accessed = True
if self.data is None and self.sid:
self.__retrieve_data()
def get_expiration(self):
"""Returns the timestamp at which this session will expire."""
try:
return int(self.sid[:-33])
except:
return 0
def __make_sid(self, expire_ts=None, ssl_only=False):
"""Returns a new session ID."""
# make a random ID (random.randrange() is 10x faster but less secure?)
if expire_ts is None:
expire_dt = datetime.datetime.now() + self.lifetime
expire_ts = int(time.mktime((expire_dt).timetuple()))
else:
expire_ts = int(expire_ts)
if ssl_only:
sep = 'S'
else:
sep = '_'
return ('%010d' % expire_ts) + sep + hashlib.md5(os.urandom(16)).hexdigest()
@staticmethod
def __encode_data(d):
"""Returns a "pickled+" encoding of d. d values of type db.Model are
protobuf encoded before pickling to minimize CPU usage & data size."""
# separate protobufs so we'll know how to decode (they are just strings)
eP = {} # for models encoded as protobufs
eO = {} # for everything else
for k, v in d.iteritems():
if isinstance(v, db.Model):
eP[k] = db.model_to_protobuf(v)
else:
eO[k] = v
return pickle.dumps((eP, eO), 2)
@staticmethod
def __decode_data(pdump):
"""Returns a data dictionary after decoding it from "pickled+" form."""
try:
eP, eO = pickle.loads(pdump)
for k, v in eP.iteritems():
eO[k] = db.model_from_protobuf(v)
except Exception, e:
logging.warn("failed to decode session data: %s" % e)
eO = {}
return eO
def regenerate_id(self, expiration_ts=None):
"""Assigns the session a new session ID (data carries over). This
should be called whenever a user authenticates to prevent session
fixation attacks.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session expiration time will not be changed.
"""
if self.sid or expiration_ts is not None:
self.ensure_data_loaded() # ensure we have the data before we delete it
if expiration_ts is None:
expiration_ts = self.get_expiration()
self.__set_sid(self.__make_sid(expiration_ts, self.is_ssl_only()))
self.dirty = True # ensure the data is written to the new session
def start(self, expiration_ts=None, ssl_only=False):
"""Starts a new session. expiration specifies when it will expire. If
expiration is not specified, then self.lifetime will used to
determine the expiration date.
Normally this method does not need to be called directly - a session is
automatically started when the first value is added to the session.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session will expire after the default ``lifetime`` has past
(as specified in ``SessionMiddleware``).
``ssl_only`` - Whether to specify the "Secure" attribute on the cookie
so that the client will ONLY transfer the cookie over a secure channel.
"""
self.dirty = True
self.data = {}
self.__set_sid(self.__make_sid(expiration_ts, ssl_only), True)
def terminate(self, clear_data=True):
"""Deletes the session and its data, and expires the user's cookie."""
if clear_data:
self.__clear_data()
self.sid = None
self.data = {}
self.dirty = False
if self.cookie_keys:
self.cookie_data = '' # trigger the cookies to expire
else:
self.cookie_data = None
def __set_sid(self, sid, make_cookie=True):
"""Sets the session ID, deleting the old session if one existed. The
session's data will remain intact (only the session ID changes)."""
if self.sid:
self.__clear_data()
self.sid = sid
self.db_key = db.Key.from_path(SessionModel.kind(), sid, namespace='')
# set the cookie if requested
if make_cookie:
self.cookie_data = '' # trigger the cookie to be sent
def __clear_data(self):
"""Deletes this session from memcache and the datastore."""
if self.sid:
memcache.delete(self.sid, namespace='') # not really needed; it'll go away on its own
try:
db.delete(self.db_key)
except:
pass # either it wasn't in the db (maybe cookie/memcache-only) or db is down => cron will expire it
def __retrieve_data(self):
"""Sets the data associated with this session after retrieving it from
memcache or the datastore. Assumes self.sid is set. Checks for session
expiration after getting the data."""
pdump = memcache.get(self.sid, namespace='')
if pdump is None:
# memcache lost it, go to the datastore
if self.no_datastore:
logging.info("can't find session data in memcache for sid=%s (using memcache only sessions)" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
session_model_instance = db.get(self.db_key)
if session_model_instance:
pdump = session_model_instance.pdump
else:
logging.error("can't find session data in the datastore for sid=%s" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
self.data = self.__decode_data(pdump)
def save(self, persist_even_if_using_cookie=False):
"""Saves the data associated with this session IF any changes have been
made (specifically, if any mutator methods like __setitem__ or the like
is called).
If the data is small enough it will be sent back to the user in a cookie
instead of using memcache and the datastore. If `persist_even_if_using_cookie`
evaluates to True, memcache and the datastore will also be used. If the
no_datastore option is set, then the datastore will never be used.
Normally this method does not need to be called directly - a session is
automatically saved at the end of the request if any changes were made.
"""
if not self.sid:
return # no session is active
if not self.dirty:
return # nothing has changed
dirty = self.dirty
self.dirty = False # saving, so it won't be dirty anymore
# do the pickling ourselves b/c we need it for the datastore anyway
pdump = self.__encode_data(self.data)
# persist via cookies if it is reasonably small
if len(pdump) * 4 / 3 <= self.cookie_only_thresh: # 4/3 b/c base64 is ~33% bigger
self.cookie_data = pdump
if not persist_even_if_using_cookie:
return
elif self.cookie_keys:
# latest data will only be in the backend, so expire data cookies we set
self.cookie_data = ''
memcache.set(self.sid, pdump, namespace='', time=self.get_expiration()) # may fail if memcache is down
# persist the session to the datastore
if not persist_even_if_using_cookie and (dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB or self.no_datastore):
return
try:
SessionModel(key_name=self.sid, pdump=pdump).put()
except Exception, e:
logging.warning("unable to persist session to datastore for sid=%s (%s)" % (self.sid, e))
# Users may interact with the session through a dictionary-like interface.
def clear(self):
"""Removes all data from the session (but does not terminate it)."""
if self.sid:
self.data = {}
self.dirty = True
def get(self, key, default=None):
"""Retrieves a value from the session."""
self.ensure_data_loaded()
return self.data.get(key, default)
def has_key(self, key):
"""Returns True if key is set."""
self.ensure_data_loaded()
return key in self.data
def pop(self, key, default=None):
"""Removes key and returns its value, or default if key is not present."""
self.ensure_data_loaded()
self.dirty = True
return self.data.pop(key, default)
def pop_quick(self, key, default=None):
"""Removes key and returns its value, or default if key is not present.
The change will only be persisted to memcache until another change
necessitates a write to the datastore."""
self.ensure_data_loaded()
if self.dirty is False:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
return self.data.pop(key, default)
def set_quick(self, key, value):
"""Set a value named key on this session. The change will only be
persisted to memcache until another change necessitates a write to the
datastore. This will start a session if one is not already active."""
dirty = self.dirty
self[key] = value
if dirty is False or dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
def __getitem__(self, key):
"""Returns the value associated with key on this session."""
self.ensure_data_loaded()
return self.data.__getitem__(key)
def __setitem__(self, key, value):
"""Set a value named key on this session. This will start a session if
one is not already active."""
self.ensure_data_loaded()
if not self.sid:
self.start()
self.data.__setitem__(key, value)
self.dirty = True
def __delitem__(self, key):
"""Deletes the value associated with key on this session."""
self.ensure_data_loaded()
self.data.__delitem__(key)
self.dirty = True
def __iter__(self):
"""Returns an iterator over the keys (names) of the stored values."""
self.ensure_data_loaded()
return self.data.iterkeys()
def __contains__(self, key):
"""Returns True if key is present on this session."""
self.ensure_data_loaded()
return self.data.__contains__(key)
def __str__(self):
"""Returns a string representation of the session."""
if self.sid:
self.ensure_data_loaded()
return "SID=%s %s" % (self.sid, self.data)
else:
return "uninitialized session"
class SessionMiddleware(object):
"""WSGI middleware that adds session support.
``cookie_key`` - A key used to secure cookies so users cannot modify their
content. Keys should be at least 32 bytes (RFC2104). Tip: generate your
key using ``os.urandom(64)`` but do this OFFLINE and copy/paste the output
into a string which you pass in as ``cookie_key``. If you use ``os.urandom()``
to dynamically generate your key at runtime then any existing sessions will
become junk every time your app starts up!
``lifetime`` - ``datetime.timedelta`` that specifies how long a session may last. Defaults to 7 days.
``no_datastore`` - By default all writes also go to the datastore in case
memcache is lost. Set to True to never use the datastore. This improves
write performance but sessions may be occassionally lost.
``cookie_only_threshold`` - A size in bytes. If session data is less than this
threshold, then session data is kept only in a secure cookie. This avoids
memcache/datastore latency which is critical for small sessions. Larger
sessions are kept in memcache+datastore instead. Defaults to 10KB.
"""
def __init__(self, app, cookie_key, lifetime=DEFAULT_LIFETIME, no_datastore=False, cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH):
self.app = app
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.cookie_key = cookie_key
if not self.cookie_key:
raise ValueError("cookie_key MUST be specified")
if len(self.cookie_key) < 32:
raise ValueError("RFC2104 recommends you use at least a 32 character key. Try os.urandom(64) to make a key.")
def __call__(self, environ, start_response):
# initialize a session for the current user
_tls.current_session = Session(lifetime=self.lifetime, no_datastore=self.no_datastore, cookie_only_threshold=self.cookie_only_thresh, cookie_key=self.cookie_key)
# create a hook for us to insert a cookie into the response headers
def my_start_response(status, headers, exc_info=None):
_tls.current_session.save() # store the session if it was changed
for ch in _tls.current_session.make_cookie_headers():
headers.append(('Set-Cookie', ch))
return start_response(status, headers, exc_info)
os.environ['DJANGO_SETTINGS_MODULE'] = 'django_syndicate_web.settings'
# let the app do its thing
return self.app(environ, my_start_response)
"""
class DjangoSessionMiddleware(object):
#Django middleware that adds session support. You must specify the
#session configuration parameters by modifying the call to ``SessionMiddleware``
#in ``DjangoSessionMiddleware.__init__()`` since Django cannot call an
#initialization method with parameters.
def __init__(self):
fake_app = lambda environ, start_response: start_response
self.wrapped_wsgi_middleware = SessionMiddleware(fake_app, cookie_key=SESSION_COOKIE_KEY)
self.response_handler = None
def process_request(self, request):
self.response_handler = self.wrapped_wsgi_middleware(None, lambda status, headers, exc_info: headers)
request.session = get_current_session() # for convenience
def process_response(self, request, response):
if self.response_handler:
session_headers = self.response_handler(None, [], None)
for k, v in session_headers:
response[k] = v
self.response_handler = None
if hasattr(request, 'session') and request.session.is_accessed():
from django.utils.cache import patch_vary_headers
logging.info("Varying")
patch_vary_headers(response, ('Cookie',))
return response
"""
def delete_expired_sessions():
"""Deletes expired sessions from the datastore.
If there are more than 500 expired sessions, only 500 will be removed.
Returns True if all expired sessions have been removed.
"""
now_str = unicode(int(time.time()))
q = db.Query(SessionModel, keys_only=True, namespace='')
key = db.Key.from_path('SessionModel', now_str + u'\ufffd', namespace='')
q.filter('__key__ < ', key)
results = q.fetch(500)
db.delete(results)
logging.info('gae-sessions: deleted %d expired sessions from the datastore' % len(results))
return len(results) < 500
| 42.168207 | 173 | 0.646517 | """
Copyright 2013 The Trustees of Princeton University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""A fast, lightweight, and secure session WSGI middleware for use with GAE."""
"""copied from https://raw.github.com/dound/gae-sessions/master/gaesessions/__init__.py"""
from Cookie import CookieError, SimpleCookie
from base64 import b64decode, b64encode
import datetime
import hashlib
import hmac
import logging
import pickle
import os
import threading
import time
from google.appengine.api import memcache
from google.appengine.ext import db
COOKIE_NAME_PREFIX = "DgU"
COOKIE_PATH = "/"
DEFAULT_COOKIE_ONLY_THRESH = 10240
DEFAULT_LIFETIME = datetime.timedelta(days=7)
DEFAULT_LIFETIME_SEC = 24 * 3600 * 7
SID_LEN = 43
SIG_LEN = 44
MAX_COOKIE_LEN = 4096
EXPIRE_COOKIE_FMT = ' %s=; expires=Wed, 01-Jan-1970 00:00:00 GMT; Path=' + COOKIE_PATH
COOKIE_FMT = ' ' + COOKIE_NAME_PREFIX + '%02d="%s"; %sPath=' + COOKIE_PATH + '; HttpOnly'
COOKIE_FMT_SECURE = COOKIE_FMT + '; Secure'
COOKIE_DATE_FMT = '%a, %d-%b-%Y %H:%M:%S GMT'
COOKIE_OVERHEAD = len(COOKIE_FMT % (0, '', '')) + len('expires=Xxx, xx XXX XXXX XX:XX:XX GMT; ') + 150
MAX_DATA_PER_COOKIE = MAX_COOKIE_LEN - COOKIE_OVERHEAD
SESSION_COOKIE_KEY = "syndicatemsopenidsessioncookiekey"
_tls = threading.local()
def get_current_session():
"""Returns the session associated with the current request."""
ses = _tls.current_session
return _tls.current_session
def set_current_session(session):
"""Sets the session associated with the current request."""
_tls.current_session = session
def is_gaesessions_key(k):
return k.startswith(COOKIE_NAME_PREFIX)
class SessionModel(db.Model):
"""Contains session data. key_name is the session ID and pdump contains a
pickled dictionary which maps session variables to their values."""
pdump = db.BlobProperty()
class Session(object):
"""Manages loading, reading/writing key-value pairs, and saving of a session.
``sid`` - if set, then the session for that sid (if any) is loaded. Otherwise,
sid will be loaded from the HTTP_COOKIE (if any).
"""
DIRTY_BUT_DONT_PERSIST_TO_DB = 1
def __init__(self, sid=None, lifetime=DEFAULT_LIFETIME, no_datastore=False,
cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH, cookie_key=None):
self._accessed = False
self.sid = None
self.cookie_keys = []
self.cookie_data = None
self.data = {}
self.dirty = False
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.base_key = cookie_key
if sid:
self.__set_sid(sid, False)
self.data = None
else:
self.__read_cookie()
@staticmethod
def __compute_hmac(base_key, sid, text):
"""Computes the signature for text given base_key and sid."""
key = base_key + sid
return b64encode(hmac.new(key, text, hashlib.sha256).digest())
def __read_cookie(self):
"""Reads the HTTP Cookie and loads the sid and data from it (if any)."""
try:
cookie = SimpleCookie(os.environ['HTTP_COOKIE'])
self.cookie_keys = filter(is_gaesessions_key, cookie.keys())
if not self.cookie_keys:
return
self.cookie_keys.sort()
data = ''.join(cookie[k].value for k in self.cookie_keys)
i = SIG_LEN + SID_LEN
sig, sid, b64pdump = data[:SIG_LEN], data[SIG_LEN:i], data[i:]
pdump = b64decode(b64pdump)
actual_sig = Session.__compute_hmac(self.base_key, sid, pdump)
if sig == actual_sig:
self.__set_sid(sid, False)
if self.get_expiration() != 0 and time.time() > self.get_expiration():
return self.terminate()
if pdump:
self.data = self.__decode_data(pdump)
else:
self.data = None
else:
logging.warn('cookie with invalid sig received from %s: %s' % (os.environ.get('REMOTE_ADDR'), b64pdump))
except (CookieError, KeyError, IndexError, TypeError):
self.terminate(False)
def make_cookie_headers(self):
"""Returns a list of cookie headers to send (if any)."""
if not self.sid:
return [EXPIRE_COOKIE_FMT % k for k in self.cookie_keys]
if self.cookie_data is None:
return []
if self.is_ssl_only():
m = MAX_DATA_PER_COOKIE - 8
fmt = COOKIE_FMT_SECURE
else:
m = MAX_DATA_PER_COOKIE
fmt = COOKIE_FMT
sig = Session.__compute_hmac(self.base_key, self.sid, self.cookie_data)
cv = sig + self.sid + b64encode(self.cookie_data)
num_cookies = 1 + (len(cv) - 1) / m
if self.get_expiration() > 0:
ed = "expires=%s; " % datetime.datetime.fromtimestamp(self.get_expiration()).strftime(COOKIE_DATE_FMT)
else:
ed = ''
cookies = [fmt % (i, cv[i * m:i * m + m], ed) for i in xrange(num_cookies)]
old_cookies = xrange(num_cookies, len(self.cookie_keys))
key = COOKIE_NAME_PREFIX + '%02d'
cookies_to_ax = [EXPIRE_COOKIE_FMT % (key % i) for i in old_cookies]
return cookies + cookies_to_ax
def is_active(self):
"""Returns True if this session is active (i.e., it has been assigned a
session ID and will be or has been persisted)."""
return self.sid is not None
def is_ssl_only(self):
"""Returns True if cookies set by this session will include the "Secure"
attribute so that the client will only send them over a secure channel
like SSL)."""
return self.sid is not None and self.sid[-33] == 'S'
def is_accessed(self):
"""Returns True if any value of this session has been accessed."""
return self._accessed
def ensure_data_loaded(self):
"""Fetch the session data if it hasn't been retrieved it yet."""
self._accessed = True
if self.data is None and self.sid:
self.__retrieve_data()
def get_expiration(self):
"""Returns the timestamp at which this session will expire."""
try:
return int(self.sid[:-33])
except:
return 0
def __make_sid(self, expire_ts=None, ssl_only=False):
"""Returns a new session ID."""
if expire_ts is None:
expire_dt = datetime.datetime.now() + self.lifetime
expire_ts = int(time.mktime((expire_dt).timetuple()))
else:
expire_ts = int(expire_ts)
if ssl_only:
sep = 'S'
else:
sep = '_'
return ('%010d' % expire_ts) + sep + hashlib.md5(os.urandom(16)).hexdigest()
@staticmethod
def __encode_data(d):
"""Returns a "pickled+" encoding of d. d values of type db.Model are
protobuf encoded before pickling to minimize CPU usage & data size."""
eP = {} # for models encoded as protobufs
eO = {} # for everything else
for k, v in d.iteritems():
if isinstance(v, db.Model):
eP[k] = db.model_to_protobuf(v)
else:
eO[k] = v
return pickle.dumps((eP, eO), 2)
@staticmethod
def __decode_data(pdump):
"""Returns a data dictionary after decoding it from "pickled+" form."""
try:
eP, eO = pickle.loads(pdump)
for k, v in eP.iteritems():
eO[k] = db.model_from_protobuf(v)
except Exception, e:
logging.warn("failed to decode session data: %s" % e)
eO = {}
return eO
def regenerate_id(self, expiration_ts=None):
"""Assigns the session a new session ID (data carries over). This
should be called whenever a user authenticates to prevent session
fixation attacks.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session expiration time will not be changed.
"""
if self.sid or expiration_ts is not None:
self.ensure_data_loaded() # ensure we have the data before we delete it
if expiration_ts is None:
expiration_ts = self.get_expiration()
self.__set_sid(self.__make_sid(expiration_ts, self.is_ssl_only()))
self.dirty = True # ensure the data is written to the new session
def start(self, expiration_ts=None, ssl_only=False):
"""Starts a new session. expiration specifies when it will expire. If
expiration is not specified, then self.lifetime will used to
determine the expiration date.
Normally this method does not need to be called directly - a session is
automatically started when the first value is added to the session.
``expiration_ts`` - The UNIX timestamp the session will expire at. If
omitted, the session will expire after the default ``lifetime`` has past
(as specified in ``SessionMiddleware``).
``ssl_only`` - Whether to specify the "Secure" attribute on the cookie
so that the client will ONLY transfer the cookie over a secure channel.
"""
self.dirty = True
self.data = {}
self.__set_sid(self.__make_sid(expiration_ts, ssl_only), True)
def terminate(self, clear_data=True):
"""Deletes the session and its data, and expires the user's cookie."""
if clear_data:
self.__clear_data()
self.sid = None
self.data = {}
self.dirty = False
if self.cookie_keys:
self.cookie_data = ''
else:
self.cookie_data = None
def __set_sid(self, sid, make_cookie=True):
"""Sets the session ID, deleting the old session if one existed. The
session's data will remain intact (only the session ID changes)."""
if self.sid:
self.__clear_data()
self.sid = sid
self.db_key = db.Key.from_path(SessionModel.kind(), sid, namespace='')
# set the cookie if requested
if make_cookie:
self.cookie_data = '' # trigger the cookie to be sent
def __clear_data(self):
"""Deletes this session from memcache and the datastore."""
if self.sid:
memcache.delete(self.sid, namespace='') # not really needed; it'll go away on its own
try:
db.delete(self.db_key)
except:
pass
def __retrieve_data(self):
"""Sets the data associated with this session after retrieving it from
memcache or the datastore. Assumes self.sid is set. Checks for session
expiration after getting the data."""
pdump = memcache.get(self.sid, namespace='')
if pdump is None:
# memcache lost it, go to the datastore
if self.no_datastore:
logging.info("can't find session data in memcache for sid=%s (using memcache only sessions)" % self.sid)
self.terminate(False)
return
session_model_instance = db.get(self.db_key)
if session_model_instance:
pdump = session_model_instance.pdump
else:
logging.error("can't find session data in the datastore for sid=%s" % self.sid)
self.terminate(False) # we lost it; just kill the session
return
self.data = self.__decode_data(pdump)
def save(self, persist_even_if_using_cookie=False):
"""Saves the data associated with this session IF any changes have been
made (specifically, if any mutator methods like __setitem__ or the like
is called).
If the data is small enough it will be sent back to the user in a cookie
instead of using memcache and the datastore. If `persist_even_if_using_cookie`
evaluates to True, memcache and the datastore will also be used. If the
no_datastore option is set, then the datastore will never be used.
Normally this method does not need to be called directly - a session is
automatically saved at the end of the request if any changes were made.
"""
if not self.sid:
return # no session is active
if not self.dirty:
return # nothing has changed
dirty = self.dirty
self.dirty = False # saving, so it won't be dirty anymore
pdump = self.__encode_data(self.data)
if len(pdump) * 4 / 3 <= self.cookie_only_thresh:
self.cookie_data = pdump
if not persist_even_if_using_cookie:
return
elif self.cookie_keys:
self.cookie_data = ''
memcache.set(self.sid, pdump, namespace='', time=self.get_expiration())
if not persist_even_if_using_cookie and (dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB or self.no_datastore):
return
try:
SessionModel(key_name=self.sid, pdump=pdump).put()
except Exception, e:
logging.warning("unable to persist session to datastore for sid=%s (%s)" % (self.sid, e))
def clear(self):
"""Removes all data from the session (but does not terminate it)."""
if self.sid:
self.data = {}
self.dirty = True
def get(self, key, default=None):
"""Retrieves a value from the session."""
self.ensure_data_loaded()
return self.data.get(key, default)
def has_key(self, key):
"""Returns True if key is set."""
self.ensure_data_loaded()
return key in self.data
def pop(self, key, default=None):
"""Removes key and returns its value, or default if key is not present."""
self.ensure_data_loaded()
self.dirty = True
return self.data.pop(key, default)
def pop_quick(self, key, default=None):
"""Removes key and returns its value, or default if key is not present.
The change will only be persisted to memcache until another change
necessitates a write to the datastore."""
self.ensure_data_loaded()
if self.dirty is False:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
return self.data.pop(key, default)
def set_quick(self, key, value):
"""Set a value named key on this session. The change will only be
persisted to memcache until another change necessitates a write to the
datastore. This will start a session if one is not already active."""
dirty = self.dirty
self[key] = value
if dirty is False or dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB:
self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
def __getitem__(self, key):
"""Returns the value associated with key on this session."""
self.ensure_data_loaded()
return self.data.__getitem__(key)
def __setitem__(self, key, value):
"""Set a value named key on this session. This will start a session if
one is not already active."""
self.ensure_data_loaded()
if not self.sid:
self.start()
self.data.__setitem__(key, value)
self.dirty = True
def __delitem__(self, key):
"""Deletes the value associated with key on this session."""
self.ensure_data_loaded()
self.data.__delitem__(key)
self.dirty = True
def __iter__(self):
"""Returns an iterator over the keys (names) of the stored values."""
self.ensure_data_loaded()
return self.data.iterkeys()
def __contains__(self, key):
"""Returns True if key is present on this session."""
self.ensure_data_loaded()
return self.data.__contains__(key)
def __str__(self):
"""Returns a string representation of the session."""
if self.sid:
self.ensure_data_loaded()
return "SID=%s %s" % (self.sid, self.data)
else:
return "uninitialized session"
class SessionMiddleware(object):
"""WSGI middleware that adds session support.
``cookie_key`` - A key used to secure cookies so users cannot modify their
content. Keys should be at least 32 bytes (RFC2104). Tip: generate your
key using ``os.urandom(64)`` but do this OFFLINE and copy/paste the output
into a string which you pass in as ``cookie_key``. If you use ``os.urandom()``
to dynamically generate your key at runtime then any existing sessions will
become junk every time your app starts up!
``lifetime`` - ``datetime.timedelta`` that specifies how long a session may last. Defaults to 7 days.
``no_datastore`` - By default all writes also go to the datastore in case
memcache is lost. Set to True to never use the datastore. This improves
write performance but sessions may be occassionally lost.
``cookie_only_threshold`` - A size in bytes. If session data is less than this
threshold, then session data is kept only in a secure cookie. This avoids
memcache/datastore latency which is critical for small sessions. Larger
sessions are kept in memcache+datastore instead. Defaults to 10KB.
"""
def __init__(self, app, cookie_key, lifetime=DEFAULT_LIFETIME, no_datastore=False, cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH):
self.app = app
self.lifetime = lifetime
self.no_datastore = no_datastore
self.cookie_only_thresh = cookie_only_threshold
self.cookie_key = cookie_key
if not self.cookie_key:
raise ValueError("cookie_key MUST be specified")
if len(self.cookie_key) < 32:
raise ValueError("RFC2104 recommends you use at least a 32 character key. Try os.urandom(64) to make a key.")
def __call__(self, environ, start_response):
_tls.current_session = Session(lifetime=self.lifetime, no_datastore=self.no_datastore, cookie_only_threshold=self.cookie_only_thresh, cookie_key=self.cookie_key)
def my_start_response(status, headers, exc_info=None):
_tls.current_session.save()
for ch in _tls.current_session.make_cookie_headers():
headers.append(('Set-Cookie', ch))
return start_response(status, headers, exc_info)
os.environ['DJANGO_SETTINGS_MODULE'] = 'django_syndicate_web.settings'
return self.app(environ, my_start_response)
"""
class DjangoSessionMiddleware(object):
#Django middleware that adds session support. You must specify the
#session configuration parameters by modifying the call to ``SessionMiddleware``
#in ``DjangoSessionMiddleware.__init__()`` since Django cannot call an
#initialization method with parameters.
def __init__(self):
fake_app = lambda environ, start_response: start_response
self.wrapped_wsgi_middleware = SessionMiddleware(fake_app, cookie_key=SESSION_COOKIE_KEY)
self.response_handler = None
def process_request(self, request):
self.response_handler = self.wrapped_wsgi_middleware(None, lambda status, headers, exc_info: headers)
request.session = get_current_session() # for convenience
def process_response(self, request, response):
if self.response_handler:
session_headers = self.response_handler(None, [], None)
for k, v in session_headers:
response[k] = v
self.response_handler = None
if hasattr(request, 'session') and request.session.is_accessed():
from django.utils.cache import patch_vary_headers
logging.info("Varying")
patch_vary_headers(response, ('Cookie',))
return response
"""
def delete_expired_sessions():
"""Deletes expired sessions from the datastore.
If there are more than 500 expired sessions, only 500 will be removed.
Returns True if all expired sessions have been removed.
"""
now_str = unicode(int(time.time()))
q = db.Query(SessionModel, keys_only=True, namespace='')
key = db.Key.from_path('SessionModel', now_str + u'\ufffd', namespace='')
q.filter('__key__ < ', key)
results = q.fetch(500)
db.delete(results)
logging.info('gae-sessions: deleted %d expired sessions from the datastore' % len(results))
return len(results) < 500
| false | true |
f7f8e864972c983bff3a89442b814c36fc2b717a | 1,018 | py | Python | setup.py | Skuldur/somnus | 45cec3e2ed3f27e2a024ea214651af30a0e6fe60 | [
"MIT"
] | 5 | 2020-07-13T13:41:36.000Z | 2021-05-22T18:50:17.000Z | setup.py | Skuldur/somnus | 45cec3e2ed3f27e2a024ea214651af30a0e6fe60 | [
"MIT"
] | null | null | null | setup.py | Skuldur/somnus | 45cec3e2ed3f27e2a024ea214651af30a0e6fe60 | [
"MIT"
] | 1 | 2020-07-18T16:50:15.000Z | 2020-07-18T16:50:15.000Z | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="somnus",
version="0.2.2",
author="Sigurður Skúli Sigurgeirsson",
author_email="siggiskuli@gmail.com",
description="Somnus is keyword detection made easy.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Skuldur/somnus",
packages=setuptools.find_packages(exclude=("tests",)),
install_requires=[
"numpy>=1.16.2",
"pydub>=0.23.1",
"pyaudio>=0.2.11",
"librosa>=0.8.0",
"tensorflow>=2.2.0",
"fire>=0.3.1",
"tqdm>=4.47.0"
],
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points = {
'console_scripts': ['somnus=cli.cli:main'],
},
python_requires='>=3.6',
)
| 28.277778 | 58 | 0.59725 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="somnus",
version="0.2.2",
author="Sigurður Skúli Sigurgeirsson",
author_email="siggiskuli@gmail.com",
description="Somnus is keyword detection made easy.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Skuldur/somnus",
packages=setuptools.find_packages(exclude=("tests",)),
install_requires=[
"numpy>=1.16.2",
"pydub>=0.23.1",
"pyaudio>=0.2.11",
"librosa>=0.8.0",
"tensorflow>=2.2.0",
"fire>=0.3.1",
"tqdm>=4.47.0"
],
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points = {
'console_scripts': ['somnus=cli.cli:main'],
},
python_requires='>=3.6',
)
| true | true |
f7f8e8aa3b05ca83d51ea2b4294fe88b9c4d9386 | 4,743 | py | Python | test/python/pulse/test_channels.py | mstypulk/qiskit-terra | 058feb06657ec4b598cc65216288bdd984550d00 | [
"Apache-2.0"
] | null | null | null | test/python/pulse/test_channels.py | mstypulk/qiskit-terra | 058feb06657ec4b598cc65216288bdd984550d00 | [
"Apache-2.0"
] | null | null | null | test/python/pulse/test_channels.py | mstypulk/qiskit-terra | 058feb06657ec4b598cc65216288bdd984550d00 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2019, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
"""Test cases for the pulse channel group."""
import unittest
from qiskit.pulse.channels import AcquireChannel, MemorySlot, RegisterSlot, SnapshotChannel
from qiskit.pulse.channels import DeviceSpecification, Qubit
from qiskit.pulse.channels import DriveChannel, ControlChannel, MeasureChannel
from qiskit.pulse.exceptions import PulseError
from qiskit.test import QiskitTestCase
from qiskit.test.mock import FakeOpenPulse2Q
class TestAcquireChannel(QiskitTestCase):
"""AcquireChannel tests."""
def test_default(self):
"""Test default acquire channel.
"""
acquire_channel = AcquireChannel(123)
self.assertEqual(acquire_channel.index, 123)
self.assertEqual(acquire_channel.name, 'a123')
class TestMemorySlot(QiskitTestCase):
"""AcquireChannel tests."""
def test_default(self):
"""Test default memory slot.
"""
memory_slot = MemorySlot(123)
self.assertEqual(memory_slot.index, 123)
self.assertEqual(memory_slot.name, 'm123')
class TestRegisterSlot(QiskitTestCase):
"""RegisterSlot tests."""
def test_default(self):
"""Test default register slot.
"""
register_slot = RegisterSlot(123)
self.assertEqual(register_slot.index, 123)
self.assertEqual(register_slot.name, 'c123')
class TestSnapshotChannel(QiskitTestCase):
"""SnapshotChannel tests."""
def test_default(self):
"""Test default snapshot channel.
"""
snapshot_channel = SnapshotChannel()
self.assertEqual(snapshot_channel.index, 0)
self.assertEqual(snapshot_channel.name, 's0')
class TestDriveChannel(QiskitTestCase):
"""DriveChannel tests."""
def test_default(self):
"""Test default drive channel.
"""
drive_channel = DriveChannel(123)
self.assertEqual(drive_channel.index, 123)
self.assertEqual(drive_channel.name, 'd123')
class TestControlChannel(QiskitTestCase):
"""ControlChannel tests."""
def test_default(self):
"""Test default control channel.
"""
control_channel = ControlChannel(123)
self.assertEqual(control_channel.index, 123)
self.assertEqual(control_channel.name, 'u123')
class TestMeasureChannel(QiskitTestCase):
"""MeasureChannel tests."""
def test_default(self):
"""Test default measure channel.
"""
measure_channel = MeasureChannel(123)
self.assertEqual(measure_channel.index, 123)
self.assertEqual(measure_channel.name, 'm123')
class TestQubit(QiskitTestCase):
"""Qubit tests."""
def test_default(self):
"""Test default qubit.
"""
qubit = Qubit(1,
drive_channels=[DriveChannel(2, 1.2)],
control_channels=[ControlChannel(3)],
measure_channels=[MeasureChannel(4)],
acquire_channels=[AcquireChannel(5)])
self.assertEqual(qubit.drive, DriveChannel(2, 1.2))
self.assertEqual(qubit.control, ControlChannel(3))
self.assertEqual(qubit.measure, MeasureChannel(4))
self.assertEqual(qubit.acquire, AcquireChannel(5))
class TestDeviceSpecification(QiskitTestCase):
"""DeviceSpecification tests."""
def test_default(self):
"""Test default device specification.
"""
qubits = [
Qubit(0, drive_channels=[DriveChannel(0, 1.2)], acquire_channels=[AcquireChannel(0)]),
Qubit(1, drive_channels=[DriveChannel(1, 3.4)], acquire_channels=[AcquireChannel(1)])
]
registers = [RegisterSlot(i) for i in range(2)]
mem_slots = [MemorySlot(i) for i in range(2)]
spec = DeviceSpecification(qubits, registers, mem_slots)
self.assertEqual(spec.q[0].drive, DriveChannel(0, 1.2))
self.assertEqual(spec.q[1].acquire, AcquireChannel(1))
self.assertEqual(spec.mem[0], MemorySlot(0))
self.assertEqual(spec.c[1], RegisterSlot(1))
def test_creation_from_backend_with_zero_u_channels(self):
"""Test creation of device specification from backend with u_channels == 0.
"""
backend = FakeOpenPulse2Q()
# overwrite n_uchannel
backend._configuration.n_uchannels = 0
device = DeviceSpecification.create_from(backend)
self.assertEqual(device.q[0].drive, DriveChannel(0, 4.9, (4.5, 5.5)))
with self.assertRaises(PulseError):
device.q[0].control()
if __name__ == '__main__':
unittest.main()
| 30.018987 | 98 | 0.663293 |
import unittest
from qiskit.pulse.channels import AcquireChannel, MemorySlot, RegisterSlot, SnapshotChannel
from qiskit.pulse.channels import DeviceSpecification, Qubit
from qiskit.pulse.channels import DriveChannel, ControlChannel, MeasureChannel
from qiskit.pulse.exceptions import PulseError
from qiskit.test import QiskitTestCase
from qiskit.test.mock import FakeOpenPulse2Q
class TestAcquireChannel(QiskitTestCase):
def test_default(self):
acquire_channel = AcquireChannel(123)
self.assertEqual(acquire_channel.index, 123)
self.assertEqual(acquire_channel.name, 'a123')
class TestMemorySlot(QiskitTestCase):
def test_default(self):
memory_slot = MemorySlot(123)
self.assertEqual(memory_slot.index, 123)
self.assertEqual(memory_slot.name, 'm123')
class TestRegisterSlot(QiskitTestCase):
def test_default(self):
register_slot = RegisterSlot(123)
self.assertEqual(register_slot.index, 123)
self.assertEqual(register_slot.name, 'c123')
class TestSnapshotChannel(QiskitTestCase):
def test_default(self):
snapshot_channel = SnapshotChannel()
self.assertEqual(snapshot_channel.index, 0)
self.assertEqual(snapshot_channel.name, 's0')
class TestDriveChannel(QiskitTestCase):
def test_default(self):
drive_channel = DriveChannel(123)
self.assertEqual(drive_channel.index, 123)
self.assertEqual(drive_channel.name, 'd123')
class TestControlChannel(QiskitTestCase):
def test_default(self):
control_channel = ControlChannel(123)
self.assertEqual(control_channel.index, 123)
self.assertEqual(control_channel.name, 'u123')
class TestMeasureChannel(QiskitTestCase):
def test_default(self):
measure_channel = MeasureChannel(123)
self.assertEqual(measure_channel.index, 123)
self.assertEqual(measure_channel.name, 'm123')
class TestQubit(QiskitTestCase):
def test_default(self):
qubit = Qubit(1,
drive_channels=[DriveChannel(2, 1.2)],
control_channels=[ControlChannel(3)],
measure_channels=[MeasureChannel(4)],
acquire_channels=[AcquireChannel(5)])
self.assertEqual(qubit.drive, DriveChannel(2, 1.2))
self.assertEqual(qubit.control, ControlChannel(3))
self.assertEqual(qubit.measure, MeasureChannel(4))
self.assertEqual(qubit.acquire, AcquireChannel(5))
class TestDeviceSpecification(QiskitTestCase):
def test_default(self):
qubits = [
Qubit(0, drive_channels=[DriveChannel(0, 1.2)], acquire_channels=[AcquireChannel(0)]),
Qubit(1, drive_channels=[DriveChannel(1, 3.4)], acquire_channels=[AcquireChannel(1)])
]
registers = [RegisterSlot(i) for i in range(2)]
mem_slots = [MemorySlot(i) for i in range(2)]
spec = DeviceSpecification(qubits, registers, mem_slots)
self.assertEqual(spec.q[0].drive, DriveChannel(0, 1.2))
self.assertEqual(spec.q[1].acquire, AcquireChannel(1))
self.assertEqual(spec.mem[0], MemorySlot(0))
self.assertEqual(spec.c[1], RegisterSlot(1))
def test_creation_from_backend_with_zero_u_channels(self):
backend = FakeOpenPulse2Q()
backend._configuration.n_uchannels = 0
device = DeviceSpecification.create_from(backend)
self.assertEqual(device.q[0].drive, DriveChannel(0, 4.9, (4.5, 5.5)))
with self.assertRaises(PulseError):
device.q[0].control()
if __name__ == '__main__':
unittest.main()
| true | true |
f7f8e8bae3283b951970e2d74bb26605806e6e7d | 1,943 | py | Python | aiida_porousmaterials/parser/__init__.py | pzarabadip/aiida-porousmaterials | ec4fd944a3014656968e810a3222f1eae7bf9a66 | [
"MIT"
] | 1 | 2021-03-06T01:27:18.000Z | 2021-03-06T01:27:18.000Z | aiida_porousmaterials/parser/__init__.py | pzarabadip/aiida-porousmaterials | ec4fd944a3014656968e810a3222f1eae7bf9a66 | [
"MIT"
] | 1 | 2020-03-05T09:05:57.000Z | 2020-03-05T09:05:57.000Z | aiida_porousmaterials/parser/__init__.py | pzarabadip/aiida-porousmaterials | ec4fd944a3014656968e810a3222f1eae7bf9a66 | [
"MIT"
] | null | null | null | """PorousMaterials Output Parse"""
import os
import re
from aiida.common import NotExistent, OutputParsingError
from aiida.engine import ExitCode
from aiida.orm import Dict, SinglefileData
from aiida.parsers.parser import Parser
from aiida_porousmaterials.utils import parse_base_output
class PorousMaterialsParser(Parser):
"""
Parsing the PorousMaterials output.
"""
def parse(self, **kwargs):
"""
Receives in input a dictionary of retrieved nodes.
Does all the logic here.
"""
try:
output_folder = self.retrieved
except NotExistent:
return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER
output_folder_name = self.node.process_class.OUTPUT_FOLDER
if output_folder_name not in output_folder._repository.list_object_names(): # pylint: disable=protected-access
return self.exit_codes.ERROR_NO_OUTPUT_FILE
output_parameters = {}
ev_output_file = {}
output_files = output_folder._repository.list_object_names(self.node.process_class.OUTPUT_FOLDER) # pylint: disable=protected-access
for fname in output_files:
output_abs_path = os.path.join(
output_folder._repository._get_base_folder().abspath, # pylint: disable=protected-access
self.node.process_class.OUTPUT_FOLDER,
fname
)
ev_output_file[fname[:-4]] = SinglefileData(file=output_abs_path)
dict_key1 = fname[:-4].split('_')[-1]
dict_key2 = fname[:-4].split('_')[-2]
if dict_key1 not in output_parameters.keys():
output_parameters[dict_key1] = {}
output_parameters[dict_key1][dict_key2 + '_probe'] = parse_base_output(output_abs_path)
self.out('ev_output_file', ev_output_file)
self.out('output_parameters', Dict(dict=output_parameters))
return ExitCode(0)
# EOF
| 34.087719 | 141 | 0.671642 | import os
import re
from aiida.common import NotExistent, OutputParsingError
from aiida.engine import ExitCode
from aiida.orm import Dict, SinglefileData
from aiida.parsers.parser import Parser
from aiida_porousmaterials.utils import parse_base_output
class PorousMaterialsParser(Parser):
def parse(self, **kwargs):
try:
output_folder = self.retrieved
except NotExistent:
return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER
output_folder_name = self.node.process_class.OUTPUT_FOLDER
if output_folder_name not in output_folder._repository.list_object_names():
return self.exit_codes.ERROR_NO_OUTPUT_FILE
output_parameters = {}
ev_output_file = {}
output_files = output_folder._repository.list_object_names(self.node.process_class.OUTPUT_FOLDER)
for fname in output_files:
output_abs_path = os.path.join(
output_folder._repository._get_base_folder().abspath,
self.node.process_class.OUTPUT_FOLDER,
fname
)
ev_output_file[fname[:-4]] = SinglefileData(file=output_abs_path)
dict_key1 = fname[:-4].split('_')[-1]
dict_key2 = fname[:-4].split('_')[-2]
if dict_key1 not in output_parameters.keys():
output_parameters[dict_key1] = {}
output_parameters[dict_key1][dict_key2 + '_probe'] = parse_base_output(output_abs_path)
self.out('ev_output_file', ev_output_file)
self.out('output_parameters', Dict(dict=output_parameters))
return ExitCode(0)
| true | true |
f7f8ea1b011e8fda627b12f5f487a4c766951af9 | 1,454 | py | Python | setup.py | ghewgill/pydkim | 8ef3b455be730a31b10983a0b2340ffc7cabf88e | [
"Zlib"
] | 10 | 2016-05-09T13:54:55.000Z | 2022-01-24T09:58:59.000Z | setup.py | akheron/pydkim | 89b08ee3b346b447c11f92ac55a1a2fe6b7e03af | [
"Zlib"
] | null | null | null | setup.py | akheron/pydkim | 89b08ee3b346b447c11f92ac55a1a2fe6b7e03af | [
"Zlib"
] | 8 | 2015-10-31T16:00:16.000Z | 2020-03-28T22:11:26.000Z | #!/usr/bin/env python
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the author be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
# Copyright (c) 2008 Greg Hewgill http://hewgill.com
from distutils.core import setup
version = "0.3"
setup(
name = "pydkim",
version = version,
description = "DKIM (DomainKeys Identified Mail)",
long_description =
"""pydkim is a Python library that implements DKIM (DomainKeys
Identified Mail) email signing and verification.""",
author = "Greg Hewgill",
author_email = "greg@hewgill.com",
url = "http://hewgill.com/pydkim/",
license = "BSD-like",
py_modules = ["dkim"],
scripts = ["dkimsign.py", "dkimverify.py", "dkimsend.sh"],
)
| 37.282051 | 76 | 0.722146 |
from distutils.core import setup
version = "0.3"
setup(
name = "pydkim",
version = version,
description = "DKIM (DomainKeys Identified Mail)",
long_description =
"""pydkim is a Python library that implements DKIM (DomainKeys
Identified Mail) email signing and verification.""",
author = "Greg Hewgill",
author_email = "greg@hewgill.com",
url = "http://hewgill.com/pydkim/",
license = "BSD-like",
py_modules = ["dkim"],
scripts = ["dkimsign.py", "dkimverify.py", "dkimsend.sh"],
)
| true | true |
f7f8eaa4678a20123277c8c31ce50e217b24c8d7 | 1,687 | py | Python | day_08.py | balcortex/advent_of_code_2020 | e56a54c7578d3d70b0dcc451100b0bb65624a954 | [
"MIT"
] | null | null | null | day_08.py | balcortex/advent_of_code_2020 | e56a54c7578d3d70b0dcc451100b0bb65624a954 | [
"MIT"
] | null | null | null | day_08.py | balcortex/advent_of_code_2020 | e56a54c7578d3d70b0dcc451100b0bb65624a954 | [
"MIT"
] | null | null | null | with open("day_08_input.txt") as f:
txt = f.read()
# - - - Part 1 - - -
def acum(txt: str) -> int:
counter = 0
index = 0
memo = []
program = txt.split("\n")
while True:
ins, num = program[index].split()
num = int(num)
if index not in memo:
memo.append(index)
else:
break
if ins == "nop":
index += 1
continue
elif ins == "acc":
index += 1
counter += num
else:
index += num
return counter
print()
print("First solution")
print(acum(txt))
print()
def fix(txt: str) -> int:
counter = 0
index = 0
memo = []
changed_index = 0
program = txt.split("\n")
while True:
if index >= len(program):
break
ins, num = program[index].split()
num = int(num)
if index not in memo:
memo.append(index)
else:
counter = 0
index = 0
memo = []
program = txt.split("\n")
while "acc" in program[changed_index]:
changed_index += 1
if program[changed_index].split()[0] == "nop":
program[changed_index] = "jmp" + program[changed_index][3:]
else:
program[changed_index] = "nop" + program[changed_index][3:]
changed_index += 1
continue
if ins == "nop":
index += 1
continue
elif ins == "acc":
index += 1
counter += num
else:
index += num
return counter
print("Second solution")
print(fix(txt))
print() | 19.616279 | 75 | 0.459395 | with open("day_08_input.txt") as f:
txt = f.read()
def acum(txt: str) -> int:
counter = 0
index = 0
memo = []
program = txt.split("\n")
while True:
ins, num = program[index].split()
num = int(num)
if index not in memo:
memo.append(index)
else:
break
if ins == "nop":
index += 1
continue
elif ins == "acc":
index += 1
counter += num
else:
index += num
return counter
print()
print("First solution")
print(acum(txt))
print()
def fix(txt: str) -> int:
counter = 0
index = 0
memo = []
changed_index = 0
program = txt.split("\n")
while True:
if index >= len(program):
break
ins, num = program[index].split()
num = int(num)
if index not in memo:
memo.append(index)
else:
counter = 0
index = 0
memo = []
program = txt.split("\n")
while "acc" in program[changed_index]:
changed_index += 1
if program[changed_index].split()[0] == "nop":
program[changed_index] = "jmp" + program[changed_index][3:]
else:
program[changed_index] = "nop" + program[changed_index][3:]
changed_index += 1
continue
if ins == "nop":
index += 1
continue
elif ins == "acc":
index += 1
counter += num
else:
index += num
return counter
print("Second solution")
print(fix(txt))
print() | true | true |
f7f8ec507d9f47557eed00e504d016649ecb715a | 928 | py | Python | steps/split_feats_by_spk.py | ondrejklejch/learning_to_adapt | 6de0b98370769596da16a1688582925ea2e1fa29 | [
"Apache-2.0"
] | 18 | 2019-10-24T04:42:16.000Z | 2021-11-24T03:07:59.000Z | steps/split_feats_by_spk.py | choko/learning_to_adapt | 6de0b98370769596da16a1688582925ea2e1fa29 | [
"Apache-2.0"
] | null | null | null | steps/split_feats_by_spk.py | choko/learning_to_adapt | 6de0b98370769596da16a1688582925ea2e1fa29 | [
"Apache-2.0"
] | 4 | 2018-08-31T01:08:50.000Z | 2019-05-10T12:12:57.000Z | import sys
from collections import defaultdict
def save_feats(feats_dir, spk_id, feats):
for j in range(0, len(feats), 20):
with open('%s/feats_%.4d_%.4d.scp' % (feats_dir, spk_id, j), 'w') as f:
for line in feats[j:j + 20]:
print >> f, line
if __name__ == '__main__':
feats_rspecifier = sys.argv[1]
train_dir = sys.argv[2]
val_dir = sys.argv[3]
num_valid_spks = int(sys.argv[4])
feats = defaultdict(list)
with open(feats_rspecifier, 'r') as f:
for line in f:
line = line.strip()
utt, _ = line.split(None, 1)
spk = "_".join(utt.split('_')[:-1])
feats[spk].append(line)
for i, all_feats in enumerate(feats.values()[:-num_valid_spks]):
save_feats(train_dir, i + 1, all_feats)
for i, all_feats in enumerate(feats.values()[-num_valid_spks:]):
save_feats(val_dir, i + 1, all_feats)
| 29.935484 | 79 | 0.58944 | import sys
from collections import defaultdict
def save_feats(feats_dir, spk_id, feats):
for j in range(0, len(feats), 20):
with open('%s/feats_%.4d_%.4d.scp' % (feats_dir, spk_id, j), 'w') as f:
for line in feats[j:j + 20]:
print >> f, line
if __name__ == '__main__':
feats_rspecifier = sys.argv[1]
train_dir = sys.argv[2]
val_dir = sys.argv[3]
num_valid_spks = int(sys.argv[4])
feats = defaultdict(list)
with open(feats_rspecifier, 'r') as f:
for line in f:
line = line.strip()
utt, _ = line.split(None, 1)
spk = "_".join(utt.split('_')[:-1])
feats[spk].append(line)
for i, all_feats in enumerate(feats.values()[:-num_valid_spks]):
save_feats(train_dir, i + 1, all_feats)
for i, all_feats in enumerate(feats.values()[-num_valid_spks:]):
save_feats(val_dir, i + 1, all_feats)
| true | true |
f7f8ed932b7446f3626d5114e48e64208afa23ee | 28,897 | py | Python | src/bdbd/src/bdbd/analysis/motion/NewRaph10.py | rkent/BDBD | c5d391da84faf5607c443078781f8b4e1c017dd5 | [
"MIT"
] | null | null | null | src/bdbd/src/bdbd/analysis/motion/NewRaph10.py | rkent/BDBD | c5d391da84faf5607c443078781f8b4e1c017dd5 | [
"MIT"
] | null | null | null | src/bdbd/src/bdbd/analysis/motion/NewRaph10.py | rkent/BDBD | c5d391da84faf5607c443078781f8b4e1c017dd5 | [
"MIT"
] | null | null | null | # newton-raphson iteration of motion equations
import numpy as np
import rospy
import math
import time
from bdbd_common.utils import fstr, gstr
from bdbd_common.msg import LeftRights
from bdbd_common.geometry import lr_est, default_lr_model, D_TO_R
def estr(a):
return fstr(a, fmat='10.7g', n_per_line=10)
class NewRaph():
def __init__(self, n, dt
,lr_model=default_lr_model()
,start_pose=(0.0, 0.0, 0.0)
,start_twist=(0.0, 0.0, 0.0)
):
self.lr_model = lr_model
self.n = n
self.dt = dt
self.start_pose = start_pose
self.start_twist = start_twist
# prep constants for calculations
alr_model = np.array(self.lr_model)
self.bhes = (dt * alr_model[0], dt * alr_model[1], dt * alr_model[2])
(_, _, qhx) = self.bhes[0]
(_, _, qhy) = self.bhes[1]
(_, _, qho) = self.bhes[2]
#print('(bhxl, bhxr, qhx): ' + estr((bhxl, bhxr, qhx)))
#print('(bhyl, bhyr, qhy): ' + estr((bhyl, bhyr, qhy)))
#print('(bhol, bhor, qho): ' + estr((bhol, bhor, qho)))
(alphax, alphay, alphao) = 1.0 - np.array((qhx, qhy, qho))
#print('(alphax, alphay, alphao):' + estr((alphax, alphay, alphao)))
# alpha ** j
alphaxj = [1.0]
alphayj = [1.0]
alphaoj = [1.0]
betaj = [dt]
for i in range(1, n):
alphaxj.append(alphaxj[i-1] * alphax)
alphayj.append(alphayj[i-1] * alphay)
alphaoj.append(alphaoj[i-1] * alphao)
betaj.append(betaj[i-1] + dt * alphaoj[i])
self.alphaxj = np.array(alphaxj)
self.alphayj = np.array(alphayj)
self.alphaoj = np.array(alphaoj)
self.betaj = np.array(betaj)
#print('alphaxj:' + estr(self.alphaxj))
#print('alphayj:' + estr(self.alphayj))
#print('alphaoj:' + estr(self.alphaoj))
#print('betaj:' + estr(self.betaj))
def poses(self, ls, rs,
details=False
):
als = np.asarray(ls)
ars = np.asarray(rs)
self.als = als
self.ars = ars
#print('als:' + estr(als))
(px0, py0, theta0) = self.start_pose
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
(vxw0, vyw0, omega0) = self.start_twist
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
# initial robot velocities
vx0 = vxw0 * math.cos(theta0) + vyw0 * math.cos(theta0)
vy0 = -vxw0 * math.sin(theta0) + vyw0 * math.cos(theta0)
# twist
vxj = np.empty(n)
vyj = np.empty(n)
omegaj = np.empty(n)
vxj[0] = vx0
vyj[0] = vy0
omegaj[0] = omega0
bmotorxj = bhxl * als + bhxr * ars
bmotoryj = bhyl * als + bhyr * ars
bmotoroj = bhol * als + bhor * ars
for i in range(1, n):
vxj[i] = vx0 * alphaxj[i] + np.dot(alphaxj[i-1::-1], bmotorxj[1:i+1])
vyj[i] = vy0 * alphayj[i] + np.dot(alphayj[i-1::-1], bmotoryj[1:i+1])
omegaj[i] = omega0 * alphaoj[i] + np.dot(alphaoj[i-1::-1], bmotoroj[1:i+1])
if details:
print(estr({'alphaoj[n-2::-1]': alphaoj[n-2::-1]}))
print(estr({'bmotoroj[1:n]': bmotoroj[1:n]}))
# pose
pxj = np.empty(n)
pyj = np.empty(n)
thetaj = np.empty(n)
pxj[0] = px0
pyj[0] = py0
thetaj[0] = theta0
for i in range(1, n):
thetaj[i] = theta0 + omega0 * (self.betaj[i] - dt) \
+ np.dot(self.betaj[i-1::-1], bmotoroj[1:i+1])
# intermediate values as vectors
cosj = np.cos(thetaj)
sinj = np.sin(thetaj)
vxcj = vxj * cosj
vxsj = vxj * sinj
vycj = vyj * cosj
vysj = vyj * sinj
vxwj = vxcj - vysj
vywj = vxsj + vycj
pxj[1:] = px0 + dt * np.cumsum(vxwj[1:])
pyj[1:] = py0 + dt * np.cumsum(vywj[1:])
# intermediate results
self.cosj = cosj
self.sinj = sinj
self.vxcj = vxcj
self.vxsj = vxsj
self.vycj = vycj
self.vysj = vysj
self.vxwj = vxwj
self.vywj = vywj
self.vxj = vxj
self.vyj = vyj
self.omegaj = omegaj
self.pxj = pxj
self.pyj = pyj
self.thetaj = thetaj
return (pxj, pyj, thetaj, vxj, vyj, omegaj)
def gradients(self):
# gradients
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
betaj = self.betaj
cosj = self.cosj
sinj = self.sinj
vxcj = self.vxcj
vxsj = self.vxsj
vycj = self.vycj
vysj = self.vysj
dpxdl = np.zeros((n,n))
dpydl = np.zeros((n,n))
dpxdr = np.zeros((n,n))
dpydr = np.zeros((n,n))
for i in range(1, n):
# gradients
for k in range(1, i+1):
doto = np.dot((-vxsj[k:i+1] - vycj[k:i+1]), betaj[:i+1-k])
dotx = np.dot(cosj[k:i+1], alphaxj[:i+1-k])
doty = np.dot(-sinj[k:i+1], alphayj[:i+1-k])
dpxdl[i,k] = dt * (
+bhol * doto
+bhxl * dotx
+bhyl * doty
)
dpxdr[i,k] = dt * (
+bhor * doto
+bhxr * dotx
+bhyr * doty
)
#if i == 1 and k == 1:
# print(estr({'bhor': bhor, 'doto': doto, 'bhxr': bhxr, 'dotx': dotx,
# 'bhyr': bhyr, 'doty': doty}))
doto = np.dot((vxcj[k:i+1] - vysj[k:i+1]), betaj[:i+1-k])
dotx = np.dot(sinj[k:i+1], alphaxj[:i+1-k])
doty = np.dot(cosj[k:i+1], alphayj[:i+1-k])
dpydl[i,k] = dt * (
+bhol * doto
+bhxl * dotx
+bhyl * doty
)
dpydr[i,k] = dt * (
+bhor * doto
+bhxr * dotx
+bhyr * doty
)
self.dpxdl = dpxdl
self.dpydl = dpydl
self.dpxdr = dpxdr
self.dpydr = dpydr
return (dpxdl, dpxdr, dpydl, dpydr)
def seconds(self):
# second partial derivatives at final location
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
betaj = self.betaj
cosj = self.cosj
sinj = self.sinj
vxwj = self.vxwj
vywj = self.vywj
d2pxdldl = np.zeros((n, n))
d2pxdldr = np.zeros((n, n))
d2pxdrdr = np.zeros((n, n))
d2pydldl = np.zeros((n, n))
d2pydldr = np.zeros((n, n))
d2pydrdr = np.zeros((n, n))
# This could be vectorized, but instead I do it discretely to more closely
# match the C++ version which is what we will actually use.
for j in range(1, n):
vxwdt = vxwj[j] * dt
vywdt = vywj[j] * dt
sdt = sinj[j] * dt
cdt = cosj[j] * dt
for k in range(1, j + 1):
betaljk = betaj[j-k] * bhol
betarjk = betaj[j-k] * bhor
alphaxljk = alphaxj[j-k] * bhxl
alphaxrjk = alphaxj[j-k] * bhxr
alphayljk = alphayj[j-k] * bhyl
alphayrjk = alphayj[j-k] * bhyr
for m in range(1, j + 1):
betaljm = betaj[j-m] * bhol
betarjm = betaj[j-m] * bhor
alphaxljm = alphaxj[j-m] * bhxl
alphaxrjm = alphaxj[j-m] * bhxr
alphayljm = alphaxj[j-m] * bhyl
alphayrjm = alphaxj[j-m] * bhyr
sumxll = (
-vxwdt * betaljk * betaljm
+sdt * (-betaljk * alphaxljm -alphaxljk * betaljm)
+cdt * (-betaljk * alphayljm -alphayljk * betaljm)
)
sumxlr = (
-vxwdt * betaljk * betarjm
+sdt * (-betaljk * alphaxrjm -alphaxljk * betarjm)
+cdt * (-betaljk * alphayrjm -alphayljk * betarjm)
)
sumxrr = (
-vxwdt * betarjk * betarjm
+sdt * (-betarjk * alphaxrjm -alphaxrjk * betarjm)
+cdt * (-betarjk * alphayrjm -alphayrjk * betarjm)
)
sumyll = (
-vywdt * betaljk * betaljm
+sdt * (-betaljk * alphayljm -alphayljk * betaljm)
+cdt * (betaljk * alphayljm +alphayljk * betaljm)
)
sumylr = (
-vywdt * betaljk * betarjm
+sdt * (-betaljk * alphayrjm -alphayljk * betarjm)
+cdt * (betaljk * alphayrjm +alphayljk * betarjm)
)
sumyrr = (
-vywdt * betarjk * betarjm
+sdt * (-betarjk * alphayrjm -alphayrjk * betarjm)
+cdt * (betarjk * alphayrjm +alphayrjk * betarjm)
)
#print('i,j,k,m', i, j, k, m)
d2pxdldl[k, m] += sumxll
d2pxdldr[k, m] += sumxlr
d2pxdrdr[k, m] += sumxrr
d2pydldl[k, m] += sumyll
d2pydldr[k, m] += sumylr
d2pydrdr[k, m] += sumyrr
self.d2pxdldl = d2pxdldl
self.d2pxdldr = d2pxdldr
self.d2pxdrdr = d2pxdrdr
self.d2pydldl = d2pydldl
self.d2pydldr = d2pydldr
self.d2pydrdr = d2pydrdr
return (d2pxdldl, d2pxdldr, d2pxdrdr, d2pydldl, d2pydldr, d2pydrdr)
def loss(self,
target_pose=(0.0, 0.0, 0.0),
target_twist=(0.0, 0.0, 0.0),
target_lr = (0.0, 0.0),
Wmax=1.0e-4,
Wjerk=1.0e-4,
Wback=1.0e-4,
mmax=1.0,
details=False
):
self.target_pose = target_pose
self.target_twist = target_twist
self.target_lr = target_lr
self.Wmax = Wmax
self.Wjerk = Wjerk
self.Wback = Wback
self.mmax = mmax
return self.reloss(details=details)
def reloss(self, details=False):
target_pose = self.target_pose
target_twist = self.target_twist
target_lr = self.target_lr
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
# given pose calculations, determine the loss
vxj = self.vxj
vyj = self.vyj
omegaj = self.omegaj
pxj = self.pxj
pyj = self.pyj
thetaj = self.thetaj
lefts = self.als
rights = self.ars
# values requiring summing over i
sumMax = 0.1 * Wmax * (
np.power(lefts, 10.0).sum() +np.power(rights, 10.0).sum()
) / mmax ** 10
# backing term
sumBack = 0.1 * Wback * np.power((lefts + rights).clip(max=0.0), 10).sum()
ldiff = lefts[1:] - lefts[:-1]
rdiff = rights[1:] - rights[:-1]
sumJerk = 0.5 * Wjerk * (np.square(ldiff).sum() + np.square(rdiff).sum())
# values based on final targets
vals = np.asarray([
pxj[-1]
, pyj[-1]
, thetaj[-1]
, vxj[-1]
, vyj[-1]
, omegaj[-1]
, lefts[-1]
, rights[-1]
])
targets = np.concatenate([target_pose, target_twist, target_lr])
#targets = np.concatenate([target_pose, target_twist[:1], target_lr])
diffs = vals - targets
# normalize theta difference from -pi to pi
diffs[2] = (diffs[2] + math.pi) % (2 * math.pi) - math.pi
sumTargets = 0.5 * np.square(diffs).sum()
loss = sumMax + sumJerk + sumTargets + sumBack
if details:
print('target losses: ' + estr(0.5 * np.square(vals - targets)))
print(estr({'loss': loss, 'sumMax': sumMax, 'sumJerk': sumJerk, 'sumTargets': sumTargets, 'sumBack': sumBack}))
print(fstr({'vals': vals}, fmat='15.12g'))
print(fstr({'targets': targets}))
print(fstr({'diffs': diffs}))
self.lossValue = loss
return loss
def jacobian(self):
# the 1st derivative of the loss function
vxj = self.vxj
vyj = self.vyj
omegaj = self.omegaj
pxj = self.pxj
pyj = self.pyj
thetaj = self.thetaj
(pxt, pyt, thetat) = self.target_pose
(vxt, vyt, omegat) = self.target_twist
(leftt, rightt) = self.target_lr
dpxdl = self.dpxdl
dpydl = self.dpydl
dpxdr = self.dpxdr
dpydr = self.dpydr
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
betaj = self.betaj
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
lefts = self.als
rights = self.ars
leftsp9 = np.power(lefts / mmax, 9)
rightsp9 = np.power(rights / mmax, 9)
lprsp9 = np.power((lefts + rights).clip(max=0.0), 9)
n = len(lefts)
dlefts = np.zeros([n])
drights = np.zeros([n])
difft = (thetaj[-1] - thetat + math.pi) % (2 * math.pi) - math.pi
for k in range(1, n):
dlefts[k] = (
+(vxj[-1] - vxt) * bhxl * alphaxj[n-1-k]
+(vyj[-1] - vyt) * bhyl * alphayj[n-1-k]
+(omegaj[-1] - omegat) * bhol * alphaoj[n-1-k]
+(difft) * bhol * betaj[n-1-k]
+(pxj[-1] - pxt) * dpxdl[-1, k]
+(pyj[-1] - pyt) * dpydl[-1, k]
+Wmax * leftsp9[k] / mmax
+Wback * lprsp9[k]
+Wjerk * (2 * lefts[k] -lefts[k-1] -lefts[min(k+1, n-1)])
)
drights[k] = (
+(vxj[-1] - vxt) * bhxr * alphaxj[n-1-k]
+(vyj[-1] - vyt) * bhyr * alphayj[n-1-k]
+(omegaj[-1] - omegat) * bhor * alphaoj[n-1-k]
+(difft) * bhor * betaj[n-1-k]
+(pxj[-1] - pxt) * dpxdr[-1, k]
+(pyj[-1] - pyt) * dpydr[-1, k]
+Wmax * rightsp9[k]
+Wback * lprsp9[k]
+Wjerk * (2 * rights[k] -rights[k-1] -rights[min(k+1, n-1)])
)
# TODO: check this
dlefts[-1] += (lefts[-1] - leftt)
drights[-1] += (rights[-1] - rightt)
self.dlefts = dlefts
self.drights = drights
return (dlefts, drights)
def hessian(self):
# second derivative of the loss function
pxj = self.pxj
pyj = self.pyj
(pxt, pyt, _) = self.target_pose
dpxdl = self.dpxdl
dpydl = self.dpydl
dpxdr = self.dpxdr
dpydr = self.dpydr
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
betaj = self.betaj
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
lefts = self.als
rights = self.ars
d2pxdldl = self.d2pxdldl
d2pxdldr = self.d2pxdldr
d2pxdrdr = self.d2pxdrdr
d2pydldl = self.d2pydldl
d2pydldr = self.d2pydldr
d2pydrdr = self.d2pydrdr
n = len(lefts) - 1
# We'll define this as 0 -> n-1 are lefts[1:], n -> 2n-1 are rights[1:]
hess = np.empty([2*n, 2*n])
# values that vary with each k, m value
deltapxn = pxj[-1] - pxt
deltapyn = pyj[-1] - pyt
for i in range(0, 2*n):
k = i % n + 1
kleft = (i < n)
if kleft:
dpxdu = dpxdl[n, k]
dpydu = dpydl[n, k]
dvxdu = alphaxj[n-k] * bhxl
dvydu = alphayj[n-k] * bhyl
domdu = alphaoj[n-k] * bhol
dthdu = betaj[n-k] * bhol
else:
dpxdu = dpxdr[n, k]
dpydu = dpydr[n, k]
dvxdu = alphaxj[n-k] * bhxr
dvydu = alphayj[n-k] * bhyr
domdu = alphaoj[n-k] * bhor
dthdu = betaj[n-k] * bhor
for j in range(0, 2*n):
m = j % n + 1
mleft = (j < n)
if mleft:
dpxds = dpxdl[n, m]
dpyds = dpydl[n, m]
dvxds = alphaxj[n-m] * bhxl
dvyds = alphayj[n-m] * bhyl
domds = alphaoj[n-m] * bhol
dthds = betaj[n-m] * bhol
if kleft:
d2px = d2pxdldl[k, m]
d2py = d2pydldl[k, m]
else:
# note d2pxdrdl[i,j] = d2pxdldr[j,i]
d2px = d2pxdldr[m, k]
d2py = d2pydldr[m, k]
else:
dpxds = dpxdr[n, m]
dpyds = dpydr[n, m]
dvxds = alphaxj[n-m] * bhxr
dvyds = alphayj[n-m] * bhyr
domds = alphaoj[n-m] * bhor
dthds = betaj[n-m] * bhor
if kleft:
d2px = d2pxdldr[k, m]
d2py = d2pydldr[k, m]
else:
d2px = d2pxdrdr[k, m]
d2py = d2pydrdr[k, m]
hess[i, j] = (
deltapxn * d2px + dpxdu * dpxds +
deltapyn * d2py + dpydu * dpyds +
dvxdu * dvxds + dvydu * dvyds + domdu * domds + dthdu * dthds
)
# values that require k == m
for i in range(0, 2*n):
k = i % n + 1
kleft = (i < n)
# max term
# TODO: I need factor of 9 here?
hess[i, i] += 9. * (Wmax / mmax**2) * (lefts[k]**8 if kleft else rights[k]**8)
# back term
if (lefts[k] + rights[k]) < 0.0:
hess[i, i] += 9. * Wback * (lefts[k] + rights[k])**8
# motor target value
if k == n:
hess[i, i] += 1.0
# jerk term
hess[i, i] += 2 *Wjerk
if k > 1:
hess[i, i-1] -= Wjerk
if k == n:
hess[i, i] -= Wjerk
else:
hess[i, i+1] -= Wjerk
self.hess = hess
return hess
def dloss_dleft(self, j, eps=1.e-3):
# numerical estimate of loss derivative at left[j]
base_als = self.als.copy()
lefts = base_als.copy()
lefts[j] += eps
nr.poses(lefts, self.ars)
loss_plus = nr.reloss()
lefts = base_als.copy()
lefts[j] -= eps
nr.poses(lefts, self.ars)
loss_minus = nr.reloss()
self.als = base_als
dloss = 0.5 * (loss_plus - loss_minus) / eps
return dloss
def d2loss_dl_dl(self, k, eps=0.0001):
# numerical estimate of second derivative of loss dl dl
base_als = self.als.copy()
n = len(self.als)
d2lossj = [0.0]
for j in range(1, n):
lefts = base_als.copy()
lefts[k] += eps
self.als = lefts
#dlossp = self.dloss_dleft(j, eps)
nr.poses(lefts, self.ars)
nr.gradients()
nr.jacobian()
dlossp = self.dlefts[j]
pxp = self.pxj[-1]
lefts = base_als.copy()
lefts[k] -= eps
self.als = lefts
#dlossm = self.dloss_dleft(j, eps)
nr.poses(lefts, self.ars)
nr.gradients()
nr.jacobian()
dlossm = self.dlefts[j]
pxm = self.pxj[-1]
d2lossj.append(0.5 * (dlossp - dlossm) / eps)
#print(estr({'pxp': pxp, 'pxm': pxm, 'pxp - pxm': pxp - pxm}))
print(estr(({'dlossp': dlossp, 'dlossm': dlossm, 'dlossp-dlossm': dlossp-dlossm, 'wjerk': self.Wjerk})))
self.als = base_als
return d2lossj
def dloss_dright(self, j, eps=0.0001):
# numerical estimate of loss derivative at right[j]
base_ars = self.ars.copy()
rights = base_ars.copy()
rights[j] += eps
nr.poses(self.als, rights)
loss_plus = nr.reloss()
rights = base_ars.copy()
rights[j] -= eps
nr.poses(self.als, rights)
loss_minus = nr.reloss()
self.ars = base_ars
dloss = 0.5 * (loss_plus - loss_minus) / eps
return dloss
if __name__ == '__main__':
from bdbd_common.pathPlan2 import PathPlan
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12,4))
dt = 0.05
lr_model = default_lr_model()
#lr_model = ((1.0, 1.0, 10.0), (-1.0, 1.0, 10.0), (-1.0, 10.0, 10.0))
start_pose = [0.0, 0.0, 0.0]
start_twist = [0.0, 0.0, 0.0]
target_pose = [0.2, .1, D_TO_R * 180]
target_twist = [0.0, 0.0, 0.0]
approach_rho = 0.05
min_rho = 0.02
cruise_v = 0.25
lr_start = (0.0, 0.0)
gauss_iters = 0
nr_iters = 20
Wmax = dt * 1.e-3
#Wmax = 0.0
Wjerk = dt * 1.e-3
Wback = 1.0
#Wback = 0.0
NRstart = 1.0
NRfact = 2
maxSlew = 1.00
testNR = False
pp = PathPlan(approach_rho=approach_rho, min_rho=min_rho)
pathPlan = pp.start2(start_pose, target_pose)
print('path_plan:')
for segment in pathPlan:
print(fstr(segment, fmat='10.7f'))
# estimate left, right to achieve the path
speedPlan = pp.speedPlan(start_twist[0], cruise_v, target_twist[0], u=0.10)
print('speed_plan:')
for segment in speedPlan:
print(fstr(segment, fmat='10.7f'))
vxr0 = start_twist[0] * math.cos(start_pose[2]) + start_twist[1] * math.sin(start_pose[2])
vyr0 = -start_twist[0] * math.sin(start_pose[2]) + start_twist[1] * math.cos(start_pose[2])
last_vx = vxr0
last_omega = start_twist[2]
vxres = [vxr0]
vyres = [vyr0]
omegas = [start_twist[2]]
vvs = [pp.v(0.0)]
vvs[0]['left'] = lr_start[0]
vvs[0]['right'] = lr_start[1]
lefts = [lr_start[0]]
rights = [lr_start[1]]
tt = 0.0
tees = [tt]
while True:
tt += dt
vv = pp.v(tt)
vvs.append(vv)
# vv gives vhat is in wheel frame. We need to convert to robot frame.
vxres.append(vv['v'])
vyres.append(vv['omega'] * pp.dwheel)
omegas.append(vv['omega'])
(left, right, last_vx, last_omega) = lr_est(vv['v'], vv['omega'], last_vx, last_omega, dt)
lefts.append(left)
rights.append(right)
tees.append(tt)
vv['left'] = left
vv['right'] = right
if vv['fraction'] > 0.9999:
break
for seg in vvs:
print(estr(seg))
# send to C++ node for processing
rospy.init_node('NewRaph')
lrPub = rospy.Publisher('rawLR', LeftRights, queue_size=10)
lrMsg = LeftRights()
lrMsg.dt = dt
lrMsg.lefts = lefts
lrMsg.rights = rights
start_lefts = lefts.copy()
start_rights = rights.copy()
while not rospy.is_shutdown():
lefts = start_lefts.copy()
rights = start_rights.copy()
lrPub.publish(lrMsg)
print('\n***** publishing rawLR *****')
n = len(lefts)
nr = NewRaph(n, dt
,lr_model=lr_model
,start_pose=start_pose
,start_twist=start_twist
)
eps = 1.0
nhess = len(lefts) - 1
axis3 = None
gauss_count = 0
nr_count = 0
while True:
if rospy.is_shutdown():
break
base_lefts = lefts.copy()
base_rights = rights.copy()
rospy.sleep(0.01)
(pxj, pyj, thetaj, vxj, vyj, omegaj) = nr.poses(lefts, rights)
loss = nr.loss(mmax=1.0, target_pose=target_pose, Wmax=Wmax, Wjerk=Wjerk, Wback=Wback, details=True)
print('loss: ' + estr(loss))
(dpxdl, dpxdr, dpydl, dpydr) = nr.gradients()
(dlefts, drights) = nr.jacobian()
#print(gstr({'dlefts': dlefts, '\ndrights': drights}))
if gauss_count < gauss_iters:
#eps = 1.0
gauss_count += 1
slew = 0.0
for i in range(1, n):
if abs(dlefts[i]) > slew:
slew = abs(dlefts[i])
if abs(drights[i]) > slew:
slew = abs(drights[i])
# line search over deltax looking for best eps
best_eps = 0.0
best_loss = loss
worst_eps = maxSlew / slew
print('eps limited to ', worst_eps)
eps = min(eps, worst_eps)
for lcount in range(4):
last_eps = eps
for i in range(1, n):
lefts[i] = base_lefts[i] - eps*dlefts[i]
rights[i] = base_rights[i] - eps*drights[i]
nr.poses(lefts, rights)
loss = nr.reloss()
if loss > best_loss:
worst_eps = eps
else:
best_eps = eps
best_loss = loss
if eps * 2 < worst_eps:
eps *= 2
else:
eps = 0.5 * (best_eps + worst_eps)
print(estr({'(G)eps': last_eps, 'loss': loss, 'best_eps': best_eps, 'worst_eps': worst_eps, 'new_eps': eps}))
eps = best_eps
for i in range(1, n):
lefts[i] = base_lefts[i] - eps*dlefts[i]
rights[i] = base_rights[i] - eps*drights[i]
else:
if nr_count >= nr_iters:
break
nr_count += 1
nr.seconds()
hess = nr.hessian()
b = np.concatenate([-nr.dlefts[1:], -nr.drights[1:]])
deltax = np.linalg.solve(nr.hess, b)
slew = np.amax(np.absolute(deltax))
# line search over deltax looking for best eps
best_eps = 0.0
best_loss = loss
worst_eps = maxSlew / slew
eps = min(eps, worst_eps)
for lcount in range(4):
last_eps = eps
lefts[1:] = base_lefts[1:] + eps * deltax[:nhess]
rights[1:] = base_rights[1:] + eps * deltax[nhess:]
nr.poses(lefts, rights)
loss = nr.reloss()
if loss > best_loss:
worst_eps = eps
else:
best_eps = eps
best_loss = loss
if eps * 2 < worst_eps:
eps *= 2
else:
eps = 0.5 * (best_eps + worst_eps)
print(estr({'(N)eps': last_eps, 'loss': loss, 'best_eps': best_eps, 'worst_eps': worst_eps, 'new_eps': eps}))
eps = best_eps
#eps = min(best_eps, 1.0)
print('using eps: ', eps)
lefts[1:] = base_lefts[1:] + eps * deltax[:nhess]
rights[1:] = base_rights[1:] + eps * deltax[nhess:]
fig.clf()
plt1 = fig.add_subplot(131)
#plt1.axis([0.0, tfPath.lrs[-1]['t'], -1.5, 1.5])
plt2 = fig.add_subplot(132)
plt3 = fig.add_subplot(133)
if axis3 is not None:
plt3.axis(axis3)
plt2.axis('equal')
plt1.plot(tees, lefts)
plt1.plot(tees, rights)
plt1.plot(tees, omegaj)
plt2.plot(pxj, pyj)
plt3.plot(tees, pxj)
plt3.plot(tees, pyj)
if gauss_count == 1:
plt.pause(1.0)
else:
plt.pause(1.0)
if axis3 is None:
axis3 = plt3.axis()
plt.waitforbuttonpress() | 33.996471 | 129 | 0.462193 |
import numpy as np
import rospy
import math
import time
from bdbd_common.utils import fstr, gstr
from bdbd_common.msg import LeftRights
from bdbd_common.geometry import lr_est, default_lr_model, D_TO_R
def estr(a):
return fstr(a, fmat='10.7g', n_per_line=10)
class NewRaph():
def __init__(self, n, dt
,lr_model=default_lr_model()
,start_pose=(0.0, 0.0, 0.0)
,start_twist=(0.0, 0.0, 0.0)
):
self.lr_model = lr_model
self.n = n
self.dt = dt
self.start_pose = start_pose
self.start_twist = start_twist
alr_model = np.array(self.lr_model)
self.bhes = (dt * alr_model[0], dt * alr_model[1], dt * alr_model[2])
(_, _, qhx) = self.bhes[0]
(_, _, qhy) = self.bhes[1]
(_, _, qho) = self.bhes[2]
(alphax, alphay, alphao) = 1.0 - np.array((qhx, qhy, qho))
alphaxj = [1.0]
alphayj = [1.0]
alphaoj = [1.0]
betaj = [dt]
for i in range(1, n):
alphaxj.append(alphaxj[i-1] * alphax)
alphayj.append(alphayj[i-1] * alphay)
alphaoj.append(alphaoj[i-1] * alphao)
betaj.append(betaj[i-1] + dt * alphaoj[i])
self.alphaxj = np.array(alphaxj)
self.alphayj = np.array(alphayj)
self.alphaoj = np.array(alphaoj)
self.betaj = np.array(betaj)
def poses(self, ls, rs,
details=False
):
als = np.asarray(ls)
ars = np.asarray(rs)
self.als = als
self.ars = ars
(px0, py0, theta0) = self.start_pose
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
(vxw0, vyw0, omega0) = self.start_twist
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
vx0 = vxw0 * math.cos(theta0) + vyw0 * math.cos(theta0)
vy0 = -vxw0 * math.sin(theta0) + vyw0 * math.cos(theta0)
vxj = np.empty(n)
vyj = np.empty(n)
omegaj = np.empty(n)
vxj[0] = vx0
vyj[0] = vy0
omegaj[0] = omega0
bmotorxj = bhxl * als + bhxr * ars
bmotoryj = bhyl * als + bhyr * ars
bmotoroj = bhol * als + bhor * ars
for i in range(1, n):
vxj[i] = vx0 * alphaxj[i] + np.dot(alphaxj[i-1::-1], bmotorxj[1:i+1])
vyj[i] = vy0 * alphayj[i] + np.dot(alphayj[i-1::-1], bmotoryj[1:i+1])
omegaj[i] = omega0 * alphaoj[i] + np.dot(alphaoj[i-1::-1], bmotoroj[1:i+1])
if details:
print(estr({'alphaoj[n-2::-1]': alphaoj[n-2::-1]}))
print(estr({'bmotoroj[1:n]': bmotoroj[1:n]}))
pxj = np.empty(n)
pyj = np.empty(n)
thetaj = np.empty(n)
pxj[0] = px0
pyj[0] = py0
thetaj[0] = theta0
for i in range(1, n):
thetaj[i] = theta0 + omega0 * (self.betaj[i] - dt) \
+ np.dot(self.betaj[i-1::-1], bmotoroj[1:i+1])
cosj = np.cos(thetaj)
sinj = np.sin(thetaj)
vxcj = vxj * cosj
vxsj = vxj * sinj
vycj = vyj * cosj
vysj = vyj * sinj
vxwj = vxcj - vysj
vywj = vxsj + vycj
pxj[1:] = px0 + dt * np.cumsum(vxwj[1:])
pyj[1:] = py0 + dt * np.cumsum(vywj[1:])
self.cosj = cosj
self.sinj = sinj
self.vxcj = vxcj
self.vxsj = vxsj
self.vycj = vycj
self.vysj = vysj
self.vxwj = vxwj
self.vywj = vywj
self.vxj = vxj
self.vyj = vyj
self.omegaj = omegaj
self.pxj = pxj
self.pyj = pyj
self.thetaj = thetaj
return (pxj, pyj, thetaj, vxj, vyj, omegaj)
def gradients(self):
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
betaj = self.betaj
cosj = self.cosj
sinj = self.sinj
vxcj = self.vxcj
vxsj = self.vxsj
vycj = self.vycj
vysj = self.vysj
dpxdl = np.zeros((n,n))
dpydl = np.zeros((n,n))
dpxdr = np.zeros((n,n))
dpydr = np.zeros((n,n))
for i in range(1, n):
for k in range(1, i+1):
doto = np.dot((-vxsj[k:i+1] - vycj[k:i+1]), betaj[:i+1-k])
dotx = np.dot(cosj[k:i+1], alphaxj[:i+1-k])
doty = np.dot(-sinj[k:i+1], alphayj[:i+1-k])
dpxdl[i,k] = dt * (
+bhol * doto
+bhxl * dotx
+bhyl * doty
)
dpxdr[i,k] = dt * (
+bhor * doto
+bhxr * dotx
+bhyr * doty
)
doto = np.dot((vxcj[k:i+1] - vysj[k:i+1]), betaj[:i+1-k])
dotx = np.dot(sinj[k:i+1], alphaxj[:i+1-k])
doty = np.dot(cosj[k:i+1], alphayj[:i+1-k])
dpydl[i,k] = dt * (
+bhol * doto
+bhxl * dotx
+bhyl * doty
)
dpydr[i,k] = dt * (
+bhor * doto
+bhxr * dotx
+bhyr * doty
)
self.dpxdl = dpxdl
self.dpydl = dpydl
self.dpxdr = dpxdr
self.dpydr = dpydr
return (dpxdl, dpxdr, dpydl, dpydr)
def seconds(self):
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
n = self.n
dt = self.dt
alphaxj = self.alphaxj
alphayj = self.alphayj
betaj = self.betaj
cosj = self.cosj
sinj = self.sinj
vxwj = self.vxwj
vywj = self.vywj
d2pxdldl = np.zeros((n, n))
d2pxdldr = np.zeros((n, n))
d2pxdrdr = np.zeros((n, n))
d2pydldl = np.zeros((n, n))
d2pydldr = np.zeros((n, n))
d2pydrdr = np.zeros((n, n))
for j in range(1, n):
vxwdt = vxwj[j] * dt
vywdt = vywj[j] * dt
sdt = sinj[j] * dt
cdt = cosj[j] * dt
for k in range(1, j + 1):
betaljk = betaj[j-k] * bhol
betarjk = betaj[j-k] * bhor
alphaxljk = alphaxj[j-k] * bhxl
alphaxrjk = alphaxj[j-k] * bhxr
alphayljk = alphayj[j-k] * bhyl
alphayrjk = alphayj[j-k] * bhyr
for m in range(1, j + 1):
betaljm = betaj[j-m] * bhol
betarjm = betaj[j-m] * bhor
alphaxljm = alphaxj[j-m] * bhxl
alphaxrjm = alphaxj[j-m] * bhxr
alphayljm = alphaxj[j-m] * bhyl
alphayrjm = alphaxj[j-m] * bhyr
sumxll = (
-vxwdt * betaljk * betaljm
+sdt * (-betaljk * alphaxljm -alphaxljk * betaljm)
+cdt * (-betaljk * alphayljm -alphayljk * betaljm)
)
sumxlr = (
-vxwdt * betaljk * betarjm
+sdt * (-betaljk * alphaxrjm -alphaxljk * betarjm)
+cdt * (-betaljk * alphayrjm -alphayljk * betarjm)
)
sumxrr = (
-vxwdt * betarjk * betarjm
+sdt * (-betarjk * alphaxrjm -alphaxrjk * betarjm)
+cdt * (-betarjk * alphayrjm -alphayrjk * betarjm)
)
sumyll = (
-vywdt * betaljk * betaljm
+sdt * (-betaljk * alphayljm -alphayljk * betaljm)
+cdt * (betaljk * alphayljm +alphayljk * betaljm)
)
sumylr = (
-vywdt * betaljk * betarjm
+sdt * (-betaljk * alphayrjm -alphayljk * betarjm)
+cdt * (betaljk * alphayrjm +alphayljk * betarjm)
)
sumyrr = (
-vywdt * betarjk * betarjm
+sdt * (-betarjk * alphayrjm -alphayrjk * betarjm)
+cdt * (betarjk * alphayrjm +alphayrjk * betarjm)
)
d2pxdldl[k, m] += sumxll
d2pxdldr[k, m] += sumxlr
d2pxdrdr[k, m] += sumxrr
d2pydldl[k, m] += sumyll
d2pydldr[k, m] += sumylr
d2pydrdr[k, m] += sumyrr
self.d2pxdldl = d2pxdldl
self.d2pxdldr = d2pxdldr
self.d2pxdrdr = d2pxdrdr
self.d2pydldl = d2pydldl
self.d2pydldr = d2pydldr
self.d2pydrdr = d2pydrdr
return (d2pxdldl, d2pxdldr, d2pxdrdr, d2pydldl, d2pydldr, d2pydrdr)
def loss(self,
target_pose=(0.0, 0.0, 0.0),
target_twist=(0.0, 0.0, 0.0),
target_lr = (0.0, 0.0),
Wmax=1.0e-4,
Wjerk=1.0e-4,
Wback=1.0e-4,
mmax=1.0,
details=False
):
self.target_pose = target_pose
self.target_twist = target_twist
self.target_lr = target_lr
self.Wmax = Wmax
self.Wjerk = Wjerk
self.Wback = Wback
self.mmax = mmax
return self.reloss(details=details)
def reloss(self, details=False):
target_pose = self.target_pose
target_twist = self.target_twist
target_lr = self.target_lr
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
vxj = self.vxj
vyj = self.vyj
omegaj = self.omegaj
pxj = self.pxj
pyj = self.pyj
thetaj = self.thetaj
lefts = self.als
rights = self.ars
sumMax = 0.1 * Wmax * (
np.power(lefts, 10.0).sum() +np.power(rights, 10.0).sum()
) / mmax ** 10
sumBack = 0.1 * Wback * np.power((lefts + rights).clip(max=0.0), 10).sum()
ldiff = lefts[1:] - lefts[:-1]
rdiff = rights[1:] - rights[:-1]
sumJerk = 0.5 * Wjerk * (np.square(ldiff).sum() + np.square(rdiff).sum())
vals = np.asarray([
pxj[-1]
, pyj[-1]
, thetaj[-1]
, vxj[-1]
, vyj[-1]
, omegaj[-1]
, lefts[-1]
, rights[-1]
])
targets = np.concatenate([target_pose, target_twist, target_lr])
diffs = vals - targets
diffs[2] = (diffs[2] + math.pi) % (2 * math.pi) - math.pi
sumTargets = 0.5 * np.square(diffs).sum()
loss = sumMax + sumJerk + sumTargets + sumBack
if details:
print('target losses: ' + estr(0.5 * np.square(vals - targets)))
print(estr({'loss': loss, 'sumMax': sumMax, 'sumJerk': sumJerk, 'sumTargets': sumTargets, 'sumBack': sumBack}))
print(fstr({'vals': vals}, fmat='15.12g'))
print(fstr({'targets': targets}))
print(fstr({'diffs': diffs}))
self.lossValue = loss
return loss
def jacobian(self):
vxj = self.vxj
vyj = self.vyj
omegaj = self.omegaj
pxj = self.pxj
pyj = self.pyj
thetaj = self.thetaj
(pxt, pyt, thetat) = self.target_pose
(vxt, vyt, omegat) = self.target_twist
(leftt, rightt) = self.target_lr
dpxdl = self.dpxdl
dpydl = self.dpydl
dpxdr = self.dpxdr
dpydr = self.dpydr
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
betaj = self.betaj
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
lefts = self.als
rights = self.ars
leftsp9 = np.power(lefts / mmax, 9)
rightsp9 = np.power(rights / mmax, 9)
lprsp9 = np.power((lefts + rights).clip(max=0.0), 9)
n = len(lefts)
dlefts = np.zeros([n])
drights = np.zeros([n])
difft = (thetaj[-1] - thetat + math.pi) % (2 * math.pi) - math.pi
for k in range(1, n):
dlefts[k] = (
+(vxj[-1] - vxt) * bhxl * alphaxj[n-1-k]
+(vyj[-1] - vyt) * bhyl * alphayj[n-1-k]
+(omegaj[-1] - omegat) * bhol * alphaoj[n-1-k]
+(difft) * bhol * betaj[n-1-k]
+(pxj[-1] - pxt) * dpxdl[-1, k]
+(pyj[-1] - pyt) * dpydl[-1, k]
+Wmax * leftsp9[k] / mmax
+Wback * lprsp9[k]
+Wjerk * (2 * lefts[k] -lefts[k-1] -lefts[min(k+1, n-1)])
)
drights[k] = (
+(vxj[-1] - vxt) * bhxr * alphaxj[n-1-k]
+(vyj[-1] - vyt) * bhyr * alphayj[n-1-k]
+(omegaj[-1] - omegat) * bhor * alphaoj[n-1-k]
+(difft) * bhor * betaj[n-1-k]
+(pxj[-1] - pxt) * dpxdr[-1, k]
+(pyj[-1] - pyt) * dpydr[-1, k]
+Wmax * rightsp9[k]
+Wback * lprsp9[k]
+Wjerk * (2 * rights[k] -rights[k-1] -rights[min(k+1, n-1)])
)
dlefts[-1] += (lefts[-1] - leftt)
drights[-1] += (rights[-1] - rightt)
self.dlefts = dlefts
self.drights = drights
return (dlefts, drights)
def hessian(self):
pxj = self.pxj
pyj = self.pyj
(pxt, pyt, _) = self.target_pose
dpxdl = self.dpxdl
dpydl = self.dpydl
dpxdr = self.dpxdr
dpydr = self.dpydr
(bhxl, bhxr, _) = self.bhes[0]
(bhyl, bhyr, _) = self.bhes[1]
(bhol, bhor, _) = self.bhes[2]
alphaxj = self.alphaxj
alphayj = self.alphayj
alphaoj = self.alphaoj
betaj = self.betaj
Wmax = self.Wmax
Wjerk = self.Wjerk
Wback = self.Wback
mmax = self.mmax
lefts = self.als
rights = self.ars
d2pxdldl = self.d2pxdldl
d2pxdldr = self.d2pxdldr
d2pxdrdr = self.d2pxdrdr
d2pydldl = self.d2pydldl
d2pydldr = self.d2pydldr
d2pydrdr = self.d2pydrdr
n = len(lefts) - 1
hess = np.empty([2*n, 2*n])
# values that vary with each k, m value
deltapxn = pxj[-1] - pxt
deltapyn = pyj[-1] - pyt
for i in range(0, 2*n):
k = i % n + 1
kleft = (i < n)
if kleft:
dpxdu = dpxdl[n, k]
dpydu = dpydl[n, k]
dvxdu = alphaxj[n-k] * bhxl
dvydu = alphayj[n-k] * bhyl
domdu = alphaoj[n-k] * bhol
dthdu = betaj[n-k] * bhol
else:
dpxdu = dpxdr[n, k]
dpydu = dpydr[n, k]
dvxdu = alphaxj[n-k] * bhxr
dvydu = alphayj[n-k] * bhyr
domdu = alphaoj[n-k] * bhor
dthdu = betaj[n-k] * bhor
for j in range(0, 2*n):
m = j % n + 1
mleft = (j < n)
if mleft:
dpxds = dpxdl[n, m]
dpyds = dpydl[n, m]
dvxds = alphaxj[n-m] * bhxl
dvyds = alphayj[n-m] * bhyl
domds = alphaoj[n-m] * bhol
dthds = betaj[n-m] * bhol
if kleft:
d2px = d2pxdldl[k, m]
d2py = d2pydldl[k, m]
else:
# note d2pxdrdl[i,j] = d2pxdldr[j,i]
d2px = d2pxdldr[m, k]
d2py = d2pydldr[m, k]
else:
dpxds = dpxdr[n, m]
dpyds = dpydr[n, m]
dvxds = alphaxj[n-m] * bhxr
dvyds = alphayj[n-m] * bhyr
domds = alphaoj[n-m] * bhor
dthds = betaj[n-m] * bhor
if kleft:
d2px = d2pxdldr[k, m]
d2py = d2pydldr[k, m]
else:
d2px = d2pxdrdr[k, m]
d2py = d2pydrdr[k, m]
hess[i, j] = (
deltapxn * d2px + dpxdu * dpxds +
deltapyn * d2py + dpydu * dpyds +
dvxdu * dvxds + dvydu * dvyds + domdu * domds + dthdu * dthds
)
# values that require k == m
for i in range(0, 2*n):
k = i % n + 1
kleft = (i < n)
# max term
# TODO: I need factor of 9 here?
hess[i, i] += 9. * (Wmax / mmax**2) * (lefts[k]**8 if kleft else rights[k]**8)
# back term
if (lefts[k] + rights[k]) < 0.0:
hess[i, i] += 9. * Wback * (lefts[k] + rights[k])**8
# motor target value
if k == n:
hess[i, i] += 1.0
# jerk term
hess[i, i] += 2 *Wjerk
if k > 1:
hess[i, i-1] -= Wjerk
if k == n:
hess[i, i] -= Wjerk
else:
hess[i, i+1] -= Wjerk
self.hess = hess
return hess
def dloss_dleft(self, j, eps=1.e-3):
# numerical estimate of loss derivative at left[j]
base_als = self.als.copy()
lefts = base_als.copy()
lefts[j] += eps
nr.poses(lefts, self.ars)
loss_plus = nr.reloss()
lefts = base_als.copy()
lefts[j] -= eps
nr.poses(lefts, self.ars)
loss_minus = nr.reloss()
self.als = base_als
dloss = 0.5 * (loss_plus - loss_minus) / eps
return dloss
def d2loss_dl_dl(self, k, eps=0.0001):
# numerical estimate of second derivative of loss dl dl
base_als = self.als.copy()
n = len(self.als)
d2lossj = [0.0]
for j in range(1, n):
lefts = base_als.copy()
lefts[k] += eps
self.als = lefts
#dlossp = self.dloss_dleft(j, eps)
nr.poses(lefts, self.ars)
nr.gradients()
nr.jacobian()
dlossp = self.dlefts[j]
pxp = self.pxj[-1]
lefts = base_als.copy()
lefts[k] -= eps
self.als = lefts
#dlossm = self.dloss_dleft(j, eps)
nr.poses(lefts, self.ars)
nr.gradients()
nr.jacobian()
dlossm = self.dlefts[j]
pxm = self.pxj[-1]
d2lossj.append(0.5 * (dlossp - dlossm) / eps)
#print(estr({'pxp': pxp, 'pxm': pxm, 'pxp - pxm': pxp - pxm}))
print(estr(({'dlossp': dlossp, 'dlossm': dlossm, 'dlossp-dlossm': dlossp-dlossm, 'wjerk': self.Wjerk})))
self.als = base_als
return d2lossj
def dloss_dright(self, j, eps=0.0001):
# numerical estimate of loss derivative at right[j]
base_ars = self.ars.copy()
rights = base_ars.copy()
rights[j] += eps
nr.poses(self.als, rights)
loss_plus = nr.reloss()
rights = base_ars.copy()
rights[j] -= eps
nr.poses(self.als, rights)
loss_minus = nr.reloss()
self.ars = base_ars
dloss = 0.5 * (loss_plus - loss_minus) / eps
return dloss
if __name__ == '__main__':
from bdbd_common.pathPlan2 import PathPlan
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12,4))
dt = 0.05
lr_model = default_lr_model()
#lr_model = ((1.0, 1.0, 10.0), (-1.0, 1.0, 10.0), (-1.0, 10.0, 10.0))
start_pose = [0.0, 0.0, 0.0]
start_twist = [0.0, 0.0, 0.0]
target_pose = [0.2, .1, D_TO_R * 180]
target_twist = [0.0, 0.0, 0.0]
approach_rho = 0.05
min_rho = 0.02
cruise_v = 0.25
lr_start = (0.0, 0.0)
gauss_iters = 0
nr_iters = 20
Wmax = dt * 1.e-3
#Wmax = 0.0
Wjerk = dt * 1.e-3
Wback = 1.0
#Wback = 0.0
NRstart = 1.0
NRfact = 2
maxSlew = 1.00
testNR = False
pp = PathPlan(approach_rho=approach_rho, min_rho=min_rho)
pathPlan = pp.start2(start_pose, target_pose)
print('path_plan:')
for segment in pathPlan:
print(fstr(segment, fmat='10.7f'))
# estimate left, right to achieve the path
speedPlan = pp.speedPlan(start_twist[0], cruise_v, target_twist[0], u=0.10)
print('speed_plan:')
for segment in speedPlan:
print(fstr(segment, fmat='10.7f'))
vxr0 = start_twist[0] * math.cos(start_pose[2]) + start_twist[1] * math.sin(start_pose[2])
vyr0 = -start_twist[0] * math.sin(start_pose[2]) + start_twist[1] * math.cos(start_pose[2])
last_vx = vxr0
last_omega = start_twist[2]
vxres = [vxr0]
vyres = [vyr0]
omegas = [start_twist[2]]
vvs = [pp.v(0.0)]
vvs[0]['left'] = lr_start[0]
vvs[0]['right'] = lr_start[1]
lefts = [lr_start[0]]
rights = [lr_start[1]]
tt = 0.0
tees = [tt]
while True:
tt += dt
vv = pp.v(tt)
vvs.append(vv)
# vv gives vhat is in wheel frame. We need to convert to robot frame.
vxres.append(vv['v'])
vyres.append(vv['omega'] * pp.dwheel)
omegas.append(vv['omega'])
(left, right, last_vx, last_omega) = lr_est(vv['v'], vv['omega'], last_vx, last_omega, dt)
lefts.append(left)
rights.append(right)
tees.append(tt)
vv['left'] = left
vv['right'] = right
if vv['fraction'] > 0.9999:
break
for seg in vvs:
print(estr(seg))
# send to C++ node for processing
rospy.init_node('NewRaph')
lrPub = rospy.Publisher('rawLR', LeftRights, queue_size=10)
lrMsg = LeftRights()
lrMsg.dt = dt
lrMsg.lefts = lefts
lrMsg.rights = rights
start_lefts = lefts.copy()
start_rights = rights.copy()
while not rospy.is_shutdown():
lefts = start_lefts.copy()
rights = start_rights.copy()
lrPub.publish(lrMsg)
print('\n***** publishing rawLR *****')
n = len(lefts)
nr = NewRaph(n, dt
,lr_model=lr_model
,start_pose=start_pose
,start_twist=start_twist
)
eps = 1.0
nhess = len(lefts) - 1
axis3 = None
gauss_count = 0
nr_count = 0
while True:
if rospy.is_shutdown():
break
base_lefts = lefts.copy()
base_rights = rights.copy()
rospy.sleep(0.01)
(pxj, pyj, thetaj, vxj, vyj, omegaj) = nr.poses(lefts, rights)
loss = nr.loss(mmax=1.0, target_pose=target_pose, Wmax=Wmax, Wjerk=Wjerk, Wback=Wback, details=True)
print('loss: ' + estr(loss))
(dpxdl, dpxdr, dpydl, dpydr) = nr.gradients()
(dlefts, drights) = nr.jacobian()
#print(gstr({'dlefts': dlefts, '\ndrights': drights}))
if gauss_count < gauss_iters:
#eps = 1.0
gauss_count += 1
slew = 0.0
for i in range(1, n):
if abs(dlefts[i]) > slew:
slew = abs(dlefts[i])
if abs(drights[i]) > slew:
slew = abs(drights[i])
# line search over deltax looking for best eps
best_eps = 0.0
best_loss = loss
worst_eps = maxSlew / slew
print('eps limited to ', worst_eps)
eps = min(eps, worst_eps)
for lcount in range(4):
last_eps = eps
for i in range(1, n):
lefts[i] = base_lefts[i] - eps*dlefts[i]
rights[i] = base_rights[i] - eps*drights[i]
nr.poses(lefts, rights)
loss = nr.reloss()
if loss > best_loss:
worst_eps = eps
else:
best_eps = eps
best_loss = loss
if eps * 2 < worst_eps:
eps *= 2
else:
eps = 0.5 * (best_eps + worst_eps)
print(estr({'(G)eps': last_eps, 'loss': loss, 'best_eps': best_eps, 'worst_eps': worst_eps, 'new_eps': eps}))
eps = best_eps
for i in range(1, n):
lefts[i] = base_lefts[i] - eps*dlefts[i]
rights[i] = base_rights[i] - eps*drights[i]
else:
if nr_count >= nr_iters:
break
nr_count += 1
nr.seconds()
hess = nr.hessian()
b = np.concatenate([-nr.dlefts[1:], -nr.drights[1:]])
deltax = np.linalg.solve(nr.hess, b)
slew = np.amax(np.absolute(deltax))
# line search over deltax looking for best eps
best_eps = 0.0
best_loss = loss
worst_eps = maxSlew / slew
eps = min(eps, worst_eps)
for lcount in range(4):
last_eps = eps
lefts[1:] = base_lefts[1:] + eps * deltax[:nhess]
rights[1:] = base_rights[1:] + eps * deltax[nhess:]
nr.poses(lefts, rights)
loss = nr.reloss()
if loss > best_loss:
worst_eps = eps
else:
best_eps = eps
best_loss = loss
if eps * 2 < worst_eps:
eps *= 2
else:
eps = 0.5 * (best_eps + worst_eps)
print(estr({'(N)eps': last_eps, 'loss': loss, 'best_eps': best_eps, 'worst_eps': worst_eps, 'new_eps': eps}))
eps = best_eps
#eps = min(best_eps, 1.0)
print('using eps: ', eps)
lefts[1:] = base_lefts[1:] + eps * deltax[:nhess]
rights[1:] = base_rights[1:] + eps * deltax[nhess:]
fig.clf()
plt1 = fig.add_subplot(131)
#plt1.axis([0.0, tfPath.lrs[-1]['t'], -1.5, 1.5])
plt2 = fig.add_subplot(132)
plt3 = fig.add_subplot(133)
if axis3 is not None:
plt3.axis(axis3)
plt2.axis('equal')
plt1.plot(tees, lefts)
plt1.plot(tees, rights)
plt1.plot(tees, omegaj)
plt2.plot(pxj, pyj)
plt3.plot(tees, pxj)
plt3.plot(tees, pyj)
if gauss_count == 1:
plt.pause(1.0)
else:
plt.pause(1.0)
if axis3 is None:
axis3 = plt3.axis()
plt.waitforbuttonpress() | true | true |
f7f8eeaabedd066a839f4a52e7e5bf44b79aadf0 | 833 | py | Python | king_of_the_hill/segment_timer.py | andrewzwicky/KingOfTheHill | e0077d9fc9ca4ebb1d0dfa556589d83b4a52bf45 | [
"MIT"
] | null | null | null | king_of_the_hill/segment_timer.py | andrewzwicky/KingOfTheHill | e0077d9fc9ca4ebb1d0dfa556589d83b4a52bf45 | [
"MIT"
] | null | null | null | king_of_the_hill/segment_timer.py | andrewzwicky/KingOfTheHill | e0077d9fc9ca4ebb1d0dfa556589d83b4a52bf45 | [
"MIT"
] | null | null | null | import RPi.GPIO as GPIO
from Adafruit_LED_Backpack import SevenSegment
from time import sleep
def setup():
GPIO.setmode(GPIO.BCM)
red_segment = SevenSegment.SevenSegment(address=0x70)
green_segment = SevenSegment.SevenSegment(address=0x72)
red_segment.begin()
green_segment.begin()
return red_segment, green_segment
def teardown():
GPIO.cleanup()
def test_all_digits(segment):
segment.clear()
for num in range(9):
segment.set_digit(0, num)
segment.set_digit(1, num)
segment.set_digit(2, num)
segment.set_digit(3, num)
sleep(0.5)
def main():
try:
red_segment, green_segment = setup()
test_all_digits(red_segment)
test_all_digits(green_segment)
finally:
teardown()
if __name__ == "__main__":
main()
| 18.108696 | 59 | 0.666267 | import RPi.GPIO as GPIO
from Adafruit_LED_Backpack import SevenSegment
from time import sleep
def setup():
GPIO.setmode(GPIO.BCM)
red_segment = SevenSegment.SevenSegment(address=0x70)
green_segment = SevenSegment.SevenSegment(address=0x72)
red_segment.begin()
green_segment.begin()
return red_segment, green_segment
def teardown():
GPIO.cleanup()
def test_all_digits(segment):
segment.clear()
for num in range(9):
segment.set_digit(0, num)
segment.set_digit(1, num)
segment.set_digit(2, num)
segment.set_digit(3, num)
sleep(0.5)
def main():
try:
red_segment, green_segment = setup()
test_all_digits(red_segment)
test_all_digits(green_segment)
finally:
teardown()
if __name__ == "__main__":
main()
| true | true |
f7f8efaa23034e9a4cbd289bc8a5783292e56cee | 500 | py | Python | plotly/validators/layout/scene/yaxis/_spikecolor.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/layout/scene/yaxis/_spikecolor.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 27 | 2020-04-28T21:23:12.000Z | 2021-06-25T15:36:38.000Z | plotly/validators/layout/scene/yaxis/_spikecolor.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class SpikecolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='spikecolor',
parent_name='layout.scene.yaxis',
**kwargs
):
super(SpikecolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
| 26.315789 | 71 | 0.612 | import _plotly_utils.basevalidators
class SpikecolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='spikecolor',
parent_name='layout.scene.yaxis',
**kwargs
):
super(SpikecolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
| true | true |
f7f8f06b9799a1cd00fe1cb57ff8e5c82fad6f8e | 808 | py | Python | zoom-meetings/Zoom.py | ye-song/automation | ae9e987234a40e139f8f9bd84fea7e68e1e9e9df | [
"MIT"
] | null | null | null | zoom-meetings/Zoom.py | ye-song/automation | ae9e987234a40e139f8f9bd84fea7e68e1e9e9df | [
"MIT"
] | null | null | null | zoom-meetings/Zoom.py | ye-song/automation | ae9e987234a40e139f8f9bd84fea7e68e1e9e9df | [
"MIT"
] | null | null | null | #! python3
# Zoom.py - opens up zoom and login the meeting session.
import subprocess
import pyautogui
import time
'''
This is a sample code. To use this sample code:
1. Replace the path of zoom application on your machine
2. Replace the coordinates of where each mouse click will be
** Note that this depends on the screen resolution of your machine.
3. Replace the 'meeting ID' placeholder
4. Replace the 'password' placeholder
The times are added because pyautogui runs too fast before the
GUI is loaded.
'''
subprocess.Popen('C:\\Users\\AppData\\Roaming\\Zoom\\bin\\Zoom.exe')
time.sleep(1)
pyautogui.click((1300, 713))
time.sleep(1)
pyautogui.typewrite('meeting ID')
time.sleep(1)
pyautogui.click((1305, 908))
time.sleep(2)
pyautogui.typewrite('password')
time.sleep(1)
pyautogui.click((1295, 906))
| 26.933333 | 68 | 0.757426 |
import subprocess
import pyautogui
import time
subprocess.Popen('C:\\Users\\AppData\\Roaming\\Zoom\\bin\\Zoom.exe')
time.sleep(1)
pyautogui.click((1300, 713))
time.sleep(1)
pyautogui.typewrite('meeting ID')
time.sleep(1)
pyautogui.click((1305, 908))
time.sleep(2)
pyautogui.typewrite('password')
time.sleep(1)
pyautogui.click((1295, 906))
| true | true |
f7f8f11d956cc0b98b114859aa0373559038bb23 | 4,846 | py | Python | stanCode_projects/boggle_game_solver/anagram.py | ShihYesWei/stanCode-projects | 69104b7be3d8c3fbd34935c1d4e15e40961e4556 | [
"MIT"
] | null | null | null | stanCode_projects/boggle_game_solver/anagram.py | ShihYesWei/stanCode-projects | 69104b7be3d8c3fbd34935c1d4e15e40961e4556 | [
"MIT"
] | null | null | null | stanCode_projects/boggle_game_solver/anagram.py | ShihYesWei/stanCode-projects | 69104b7be3d8c3fbd34935c1d4e15e40961e4556 | [
"MIT"
] | null | null | null | """
File: anagram.py
Name: Alan Chen
----------------------------------
This program recursively finds all the anagram(s)
for the word input by user and terminates when the
input string matches the EXIT constant defined
at line 21
If you correctly implement this program, you should see the
number of anagrams for each word listed below:
* arm -> 3 anagrams
* contains -> 5 anagrams
* stop -> 6 anagrams
* tesla -> 10 anagrams
* spear -> 12 anagrams
"""
# Constants
FILE = 'dictionary.txt' # This is the filename of an English dictionary
EXIT = '-1' # Controls when to stop the loop
# Global variable
dictionary = [] # A python list of vocabulary from FILE = 'dictionary.txt'
alpha_dict = {} # A python dict for saving the input words of [order : alphabet]
num_list = [] # A python list for saving integers with the length of the input vocabulary
printed = [] # A python list for saving the printed anagrams
def main():
"""
this function read the dictionary and save it into a python list first
and waiting for user to input vocabulary
alpha_dict, num_list and printed
"""
read_dictionary()
print('Welcome to stanCode "Anagram Generator" (or ' + str(EXIT) + ' to quit) ')
global num_list
global alpha_dict
global printed
while True:
vocabulary = input('Find anagrams for: ')
if vocabulary == EXIT:
break
else:
sub_s = [] # initialize after a new input
num_list = [] # initialize after a new input
alpha_dict = {} # initialize after a new input
printed = [] # initialize after a new input
make_dict(vocabulary)
make_list(vocabulary)
print('Searching...')
find_anagrams(vocabulary, num_list, sub_s)
print(f'{len(printed)} anagrams: {printed}')
def find_anagrams(s, l, new_l):
"""
finding anagrams by permuting the integers in python list: num_list
:param s: (str) the input vocabulary
:param l: (list) the number list from python list: num_list
:param new_l: (list) the new list for permutation of list l
"""
if len(new_l) == len(s): # base case: the potentially anagram word is finished
word = translation(new_l)
if word in dictionary and word not in printed: # check whether the anagram is a word and have not been printed
print('Found: ', end='')
print(word)
print('Searching...')
printed.append(word)
else: # recursive case
if not has_prefix(new_l): # if the combination so far is not prefix of any word, then skip.
return
for i in range(len(l)):
order = l[i]
if order not in new_l:
new_l.append(order)
find_anagrams(s, l, new_l)
new_l.pop()
def read_dictionary():
"""
read the dictionary.txt and save the words into the python list: dictionary
"""
global dictionary
with open(FILE, 'r') as f:
for line in f:
line = line.strip()
dictionary.append(line)
def make_dict(s):
"""
save each units of input vocabulary with its order
for example:
when the input word is "stop"
this dict will save {0: 's', 1: 't', 2: 'o', 3: 'p'}
:param s: (str) the input vocabulary
"""
for i in range(len(s)):
alpha_dict[i] = s[i]
def make_list(s):
"""
make a list of integer with the length of the input vocabulary
by combining this list and the make_dict, finding anagrams is just like dealing with permutation.
for example:
when the input word is "stop"
this list will save [0, 1, 2, 3]
:param s: (str) the input vocabulary
"""
global num_list
for i in range(len(s)):
num_list.append(i)
def translation(number_list):
"""
transform the number into combination of words according to alpha_dict
for example:
when the input word is "stop", the alpha_dict saves: {0: 's', 1: 't', 2: 'o', 3: 'p'}
when the given number_list is [3, 2, 1, 0]
this function returns "pots"
:param number_list: (list) list of integers
:return word: (str) the translated word
"""
word = ''
for number in number_list:
word += alpha_dict[number]
return word
def has_prefix(n_lst):
"""
This function will search whether the dictionary have the word of the prefix so far
and aim to shorten the searching time.
:param n_lst: (list) combination of the integer, upon input will be translated
:return: (boolean) if word(s) with the prefix exist, return True
"""
s = translation(n_lst)
for word in dictionary:
if word.startswith(s):
return True
return False
if __name__ == '__main__':
main()
| 30.670886 | 119 | 0.62175 |
FILE = 'dictionary.txt'
EXIT = '-1'
dictionary = []
alpha_dict = {}
num_list = []
printed = []
def main():
read_dictionary()
print('Welcome to stanCode "Anagram Generator" (or ' + str(EXIT) + ' to quit) ')
global num_list
global alpha_dict
global printed
while True:
vocabulary = input('Find anagrams for: ')
if vocabulary == EXIT:
break
else:
sub_s = []
num_list = []
alpha_dict = {}
printed = []
make_dict(vocabulary)
make_list(vocabulary)
print('Searching...')
find_anagrams(vocabulary, num_list, sub_s)
print(f'{len(printed)} anagrams: {printed}')
def find_anagrams(s, l, new_l):
if len(new_l) == len(s):
word = translation(new_l)
if word in dictionary and word not in printed:
print('Found: ', end='')
print(word)
print('Searching...')
printed.append(word)
else:
if not has_prefix(new_l):
return
for i in range(len(l)):
order = l[i]
if order not in new_l:
new_l.append(order)
find_anagrams(s, l, new_l)
new_l.pop()
def read_dictionary():
global dictionary
with open(FILE, 'r') as f:
for line in f:
line = line.strip()
dictionary.append(line)
def make_dict(s):
for i in range(len(s)):
alpha_dict[i] = s[i]
def make_list(s):
global num_list
for i in range(len(s)):
num_list.append(i)
def translation(number_list):
word = ''
for number in number_list:
word += alpha_dict[number]
return word
def has_prefix(n_lst):
s = translation(n_lst)
for word in dictionary:
if word.startswith(s):
return True
return False
if __name__ == '__main__':
main()
| true | true |
f7f8f14baa26e0bd197444f4daedee8f28d48ce8 | 9,085 | py | Python | research/ptn/train_ptn.py | Zhangxu0501/models | 7c8ca1647926226556e05fdd6535a35abe3100eb | [
"Apache-2.0"
] | 310 | 2017-01-23T15:04:12.000Z | 2022-01-11T05:50:41.000Z | research/ptn/train_ptn.py | wzy1510300a28/models | 42a3da72313b8814ef0ced8f425af90b57313b9f | [
"Apache-2.0"
] | 12 | 2017-05-23T17:42:59.000Z | 2020-01-10T05:16:22.000Z | research/ptn/train_ptn.py | wzy1510300a28/models | 42a3da72313b8814ef0ced8f425af90b57313b9f | [
"Apache-2.0"
] | 73 | 2017-01-24T13:57:45.000Z | 2021-09-29T18:52:57.000Z | # Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains training plan for the Im2vox model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
from tensorflow import app
import model_ptn
flags = tf.app.flags
slim = tf.contrib.slim
flags.DEFINE_string('inp_dir',
'',
'Directory path containing the input data (tfrecords).')
flags.DEFINE_string(
'dataset_name', 'shapenet_chair',
'Dataset name that is to be used for training and evaluation.')
flags.DEFINE_integer('z_dim', 512, '')
flags.DEFINE_integer('f_dim', 64, '')
flags.DEFINE_integer('fc_dim', 1024, '')
flags.DEFINE_integer('num_views', 24, 'Num of viewpoints in the input data.')
flags.DEFINE_integer('image_size', 64,
'Input images dimension (pixels) - width & height.')
flags.DEFINE_integer('vox_size', 32, 'Voxel prediction dimension.')
flags.DEFINE_integer('step_size', 24, 'Steps to take in rotation to fetch viewpoints.')
flags.DEFINE_integer('batch_size', 1, 'Batch size while training.')
flags.DEFINE_float('focal_length', 0.866, 'Focal length parameter used in perspective projection.')
flags.DEFINE_float('focal_range', 1.732, 'Focal length parameter used in perspective projection.')
flags.DEFINE_string('encoder_name', 'ptn_encoder',
'Name of the encoder network being used.')
flags.DEFINE_string('decoder_name', 'ptn_vox_decoder',
'Name of the decoder network being used.')
flags.DEFINE_string('projector_name', 'perspective_projector',
'Name of the projector network being used.')
# Save options
flags.DEFINE_string('checkpoint_dir', '/tmp/ptn_train/',
'Directory path for saving trained models and other data.')
flags.DEFINE_string('model_name', 'ptn_finetune',
'Name of the model used in naming the TF job. Must be different for each run.')
flags.DEFINE_string('init_model', None,
'Checkpoint path of the model to initialize with.')
flags.DEFINE_integer('save_every', 1000,
'Average period of steps after which we save a model.')
# Optimization
flags.DEFINE_float('proj_weight', 10, 'Weighting factor for projection loss.')
flags.DEFINE_float('volume_weight', 0, 'Weighting factor for volume loss.')
flags.DEFINE_float('viewpoint_weight', 1, 'Weighting factor for viewpoint loss.')
flags.DEFINE_float('learning_rate', 0.0001, 'Learning rate.')
flags.DEFINE_float('weight_decay', 0.001, 'Weight decay parameter while training.')
flags.DEFINE_float('clip_gradient_norm', 0, 'Gradient clim norm, leave 0 if no gradient clipping.')
flags.DEFINE_integer('max_number_of_steps', 10000, 'Maximum number of steps for training.')
# Summary
flags.DEFINE_integer('save_summaries_secs', 15, 'Seconds interval for dumping TF summaries.')
flags.DEFINE_integer('save_interval_secs', 60 * 5, 'Seconds interval to save models.')
# Scheduling
flags.DEFINE_string('master', '', 'The address of the tensorflow master')
flags.DEFINE_bool('sync_replicas', False, 'Whether to sync gradients between replicas for optimizer.')
flags.DEFINE_integer('worker_replicas', 1, 'Number of worker replicas (train tasks).')
flags.DEFINE_integer('backup_workers', 0, 'Number of backup workers.')
flags.DEFINE_integer('ps_tasks', 0, 'Number of ps tasks.')
flags.DEFINE_integer('task', 0,
'Task identifier flag to be set for each task running in distributed manner. Task number 0 '
'will be chosen as the chief.')
FLAGS = flags.FLAGS
def main(_):
train_dir = os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name, 'train')
save_image_dir = os.path.join(train_dir, 'images')
if not os.path.exists(train_dir):
os.makedirs(train_dir)
if not os.path.exists(save_image_dir):
os.makedirs(save_image_dir)
g = tf.Graph()
with g.as_default():
with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
global_step = slim.get_or_create_global_step()
###########
## model ##
###########
model = model_ptn.model_PTN(FLAGS)
##########
## data ##
##########
train_data = model.get_inputs(
FLAGS.inp_dir,
FLAGS.dataset_name,
'train',
FLAGS.batch_size,
FLAGS.image_size,
FLAGS.vox_size,
is_training=True)
inputs = model.preprocess(train_data, FLAGS.step_size)
##############
## model_fn ##
##############
model_fn = model.get_model_fn(
is_training=True, reuse=False, run_projection=True)
outputs = model_fn(inputs)
##################
## train_scopes ##
##################
if FLAGS.init_model:
train_scopes = ['decoder']
init_scopes = ['encoder']
else:
train_scopes = ['encoder', 'decoder']
##########
## loss ##
##########
task_loss = model.get_loss(inputs, outputs)
regularization_loss = model.get_regularization_loss(train_scopes)
loss = task_loss + regularization_loss
###############
## optimizer ##
###############
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
if FLAGS.sync_replicas:
optimizer = tf.train.SyncReplicasOptimizer(
optimizer,
replicas_to_aggregate=FLAGS.workers_replicas - FLAGS.backup_workers,
total_num_replicas=FLAGS.worker_replicas)
##############
## train_op ##
##############
train_op = model.get_train_op_for_scope(loss, optimizer, train_scopes)
###########
## saver ##
###########
saver = tf.train.Saver(max_to_keep=np.minimum(5,
FLAGS.worker_replicas + 1))
if FLAGS.task == 0:
params = FLAGS
params.batch_size = params.num_views
params.step_size = 1
model.set_params(params)
val_data = model.get_inputs(
params.inp_dir,
params.dataset_name,
'val',
params.batch_size,
params.image_size,
params.vox_size,
is_training=False)
val_inputs = model.preprocess(val_data, params.step_size)
# Note: don't compute loss here
reused_model_fn = model.get_model_fn(is_training=False, reuse=True)
val_outputs = reused_model_fn(val_inputs)
with tf.device(tf.DeviceSpec(device_type='CPU')):
vis_input_images = val_inputs['images_1'] * 255.0
vis_gt_projs = (val_outputs['masks_1'] * (-1) + 1) * 255.0
vis_pred_projs = (val_outputs['projs_1'] * (-1) + 1) * 255.0
vis_gt_projs = tf.concat([vis_gt_projs] * 3, axis=3)
vis_pred_projs = tf.concat([vis_pred_projs] * 3, axis=3)
# rescale
new_size = [FLAGS.image_size] * 2
vis_gt_projs = tf.image.resize_nearest_neighbor(
vis_gt_projs, new_size)
vis_pred_projs = tf.image.resize_nearest_neighbor(
vis_pred_projs, new_size)
# flip
# vis_gt_projs = utils.image_flipud(vis_gt_projs)
# vis_pred_projs = utils.image_flipud(vis_pred_projs)
# vis_gt_projs is of shape [batch, height, width, channels]
write_disk_op = model.write_disk_grid(
global_step=global_step,
log_dir=save_image_dir,
input_images=vis_input_images,
gt_projs=vis_gt_projs,
pred_projs=vis_pred_projs,
input_voxels=val_inputs['voxels'],
output_voxels=val_outputs['voxels_1'])
with tf.control_dependencies([write_disk_op]):
train_op = tf.identity(train_op)
#############
## init_fn ##
#############
if FLAGS.init_model:
init_fn = model.get_init_fn(init_scopes)
else:
init_fn = None
##############
## training ##
##############
slim.learning.train(
train_op=train_op,
logdir=train_dir,
init_fn=init_fn,
master=FLAGS.master,
is_chief=(FLAGS.task == 0),
number_of_steps=FLAGS.max_number_of_steps,
saver=saver,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs)
if __name__ == '__main__':
app.run()
| 39.329004 | 113 | 0.634342 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
from tensorflow import app
import model_ptn
flags = tf.app.flags
slim = tf.contrib.slim
flags.DEFINE_string('inp_dir',
'',
'Directory path containing the input data (tfrecords).')
flags.DEFINE_string(
'dataset_name', 'shapenet_chair',
'Dataset name that is to be used for training and evaluation.')
flags.DEFINE_integer('z_dim', 512, '')
flags.DEFINE_integer('f_dim', 64, '')
flags.DEFINE_integer('fc_dim', 1024, '')
flags.DEFINE_integer('num_views', 24, 'Num of viewpoints in the input data.')
flags.DEFINE_integer('image_size', 64,
'Input images dimension (pixels) - width & height.')
flags.DEFINE_integer('vox_size', 32, 'Voxel prediction dimension.')
flags.DEFINE_integer('step_size', 24, 'Steps to take in rotation to fetch viewpoints.')
flags.DEFINE_integer('batch_size', 1, 'Batch size while training.')
flags.DEFINE_float('focal_length', 0.866, 'Focal length parameter used in perspective projection.')
flags.DEFINE_float('focal_range', 1.732, 'Focal length parameter used in perspective projection.')
flags.DEFINE_string('encoder_name', 'ptn_encoder',
'Name of the encoder network being used.')
flags.DEFINE_string('decoder_name', 'ptn_vox_decoder',
'Name of the decoder network being used.')
flags.DEFINE_string('projector_name', 'perspective_projector',
'Name of the projector network being used.')
flags.DEFINE_string('checkpoint_dir', '/tmp/ptn_train/',
'Directory path for saving trained models and other data.')
flags.DEFINE_string('model_name', 'ptn_finetune',
'Name of the model used in naming the TF job. Must be different for each run.')
flags.DEFINE_string('init_model', None,
'Checkpoint path of the model to initialize with.')
flags.DEFINE_integer('save_every', 1000,
'Average period of steps after which we save a model.')
flags.DEFINE_float('proj_weight', 10, 'Weighting factor for projection loss.')
flags.DEFINE_float('volume_weight', 0, 'Weighting factor for volume loss.')
flags.DEFINE_float('viewpoint_weight', 1, 'Weighting factor for viewpoint loss.')
flags.DEFINE_float('learning_rate', 0.0001, 'Learning rate.')
flags.DEFINE_float('weight_decay', 0.001, 'Weight decay parameter while training.')
flags.DEFINE_float('clip_gradient_norm', 0, 'Gradient clim norm, leave 0 if no gradient clipping.')
flags.DEFINE_integer('max_number_of_steps', 10000, 'Maximum number of steps for training.')
flags.DEFINE_integer('save_summaries_secs', 15, 'Seconds interval for dumping TF summaries.')
flags.DEFINE_integer('save_interval_secs', 60 * 5, 'Seconds interval to save models.')
flags.DEFINE_string('master', '', 'The address of the tensorflow master')
flags.DEFINE_bool('sync_replicas', False, 'Whether to sync gradients between replicas for optimizer.')
flags.DEFINE_integer('worker_replicas', 1, 'Number of worker replicas (train tasks).')
flags.DEFINE_integer('backup_workers', 0, 'Number of backup workers.')
flags.DEFINE_integer('ps_tasks', 0, 'Number of ps tasks.')
flags.DEFINE_integer('task', 0,
'Task identifier flag to be set for each task running in distributed manner. Task number 0 '
'will be chosen as the chief.')
FLAGS = flags.FLAGS
def main(_):
train_dir = os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name, 'train')
save_image_dir = os.path.join(train_dir, 'images')
if not os.path.exists(train_dir):
os.makedirs(train_dir)
if not os.path.exists(save_image_dir):
os.makedirs(save_image_dir)
g = tf.Graph()
with g.as_default():
with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
global_step = slim.get_or_create_global_step()
FLAGS.image_size,
FLAGS.vox_size,
is_training=True)
inputs = model.preprocess(train_data, FLAGS.step_size)
.num_views
params.step_size = 1
model.set_params(params)
val_data = model.get_inputs(
params.inp_dir,
params.dataset_name,
'val',
params.batch_size,
params.image_size,
params.vox_size,
is_training=False)
val_inputs = model.preprocess(val_data, params.step_size)
reused_model_fn = model.get_model_fn(is_training=False, reuse=True)
val_outputs = reused_model_fn(val_inputs)
with tf.device(tf.DeviceSpec(device_type='CPU')):
vis_input_images = val_inputs['images_1'] * 255.0
vis_gt_projs = (val_outputs['masks_1'] * (-1) + 1) * 255.0
vis_pred_projs = (val_outputs['projs_1'] * (-1) + 1) * 255.0
vis_gt_projs = tf.concat([vis_gt_projs] * 3, axis=3)
vis_pred_projs = tf.concat([vis_pred_projs] * 3, axis=3)
# rescale
new_size = [FLAGS.image_size] * 2
vis_gt_projs = tf.image.resize_nearest_neighbor(
vis_gt_projs, new_size)
vis_pred_projs = tf.image.resize_nearest_neighbor(
vis_pred_projs, new_size)
# flip
# vis_gt_projs = utils.image_flipud(vis_gt_projs)
# vis_pred_projs = utils.image_flipud(vis_pred_projs)
# vis_gt_projs is of shape [batch, height, width, channels]
write_disk_op = model.write_disk_grid(
global_step=global_step,
log_dir=save_image_dir,
input_images=vis_input_images,
gt_projs=vis_gt_projs,
pred_projs=vis_pred_projs,
input_voxels=val_inputs['voxels'],
output_voxels=val_outputs['voxels_1'])
with tf.control_dependencies([write_disk_op]):
train_op = tf.identity(train_op)
#############
## init_fn ##
#############
if FLAGS.init_model:
init_fn = model.get_init_fn(init_scopes)
else:
init_fn = None
##############
## training ##
##############
slim.learning.train(
train_op=train_op,
logdir=train_dir,
init_fn=init_fn,
master=FLAGS.master,
is_chief=(FLAGS.task == 0),
number_of_steps=FLAGS.max_number_of_steps,
saver=saver,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs)
if __name__ == '__main__':
app.run()
| true | true |
f7f8f22719b9a83585d52f10a2d0966474c96d29 | 13,861 | py | Python | vize/130401078/server/ftpserver/myftpserver.py | hasan-se/blm304 | 893d15282497a426ff96b0c8b6c77d57c406742e | [
"Unlicense"
] | 1 | 2021-05-04T21:46:08.000Z | 2021-05-04T21:46:08.000Z | vize/130401078/server/ftpserver/myftpserver.py | hasan-se/blm304 | 893d15282497a426ff96b0c8b6c77d57c406742e | [
"Unlicense"
] | null | null | null | vize/130401078/server/ftpserver/myftpserver.py | hasan-se/blm304 | 893d15282497a426ff96b0c8b6c77d57c406742e | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@date Sat May 16 12:05:20 2020
@author Abdoul Karim TOURE
@contact contact@trabdlkarim.com
@copyright Copyright 2020, Abdoul Karim TOURE
@license GPL v3.0 or Later
@version 1.0.1
@status Development
"""
import os
import sys
import socket
import threading
import time
import datetime
import argparse as arg
from socketserver import UDPServer
from socketserver import ThreadingMixIn
from socketserver import DatagramRequestHandler
PORT = 42
DPORT = 9999
HOST = "127.0.0.1"
GREEN = '\033[92m'
END = '\033[0m'
class MyFTPServer(ThreadingMixIn, UDPServer):
def init_setup(self, host=HOST, port=PORT):
print("Server starting...")
self.control_channel = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print("Binding to address {}:{}...".format(host,port))
self.control_channel.bind((host,port))
time.sleep(2)
print("Initializing env variables...")
self.cwd = os.getcwd() + "/uploads/"
self.success = "-Data connection already open; transfer starting"
self.ac_success = "226-Data connection closed; requested file action was successful."
self.cli_data = None
time.sleep(2)
print("Server is running on address {}:{}".format(host,port))
print("Active: service "+GREEN+"running"+END+" since %s" % datetime.datetime.now().isoformat(timespec='seconds') )
print("Waiting for request.")
def send_response(self,retcode, message):
print("Sending over control channel {} return code to ".format(retcode) + str(self.client_address))
self.control_channel.sendto(bytes(str(retcode) + message ,"utf-8"),self.client_address)
print("Return code successfully sent.")
def handle_client_request(self,host=HOST, port=PORT):
self.client_request, self.client_address = self.control_channel.recvfrom(1024)
cmd_parts = self.client_request.decode().split()
command = cmd_parts[0]
print(datetime.datetime.now().isoformat(timespec='seconds') + ': ',end='')
print("{} request from {}".format(command, self.client_address))
if len(cmd_parts) == 1:
if command == "QUIT":
self.send_response(231,"-User logged out; service terminated.")
elif command == "LIST":
dir_contents = self.listdir()
if type(dir_contents) == tuple:
self.send_response(125,self.success)
elif dir_contents == "EMPTY_DIR":
self.send_response(125,self.success)
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "ADAT":
self.send_response(220,"-Service ready for new user.")
print("New client connected.")
elif command == "STOR":
self.send_response(501,"-Syntax error in parameters or arguments.")
elif command == "RETR":
self.send_response(501,"-Syntax error in parameters or arguments.")
else:
self.send_response(502,"-Command not implemented.")
else:
cmd_args = " ".join([ i for i in cmd_parts[1:]]).strip()
if command == "RETR":
index = cmd_args.find('/')
if index != 0:
cmd_args = self.cwd + cmd_args
isFile = os.path.isfile(cmd_args)
if isFile:
self.send_response(125,self.success)
self.filename = cmd_args
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "LIST":
dir_contents = self.listdir(cmd_args)
if type(dir_contents) == tuple:
self.send_response(125,self.success)
elif dir_contents == "PERMISSION_ERROR":
self.send_response(553,"-Requested action not taken. Permission denied.")
elif dir_contents == "EMPTY_DIR":
self.send_response(125,self.success)
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "STOR":
index = cmd_args.rfind('/')
if index < 0:
cmd_args = self.cwd + cmd_args
else:
cmd_args = self.cwd + cmd_args[index+1:]
try:
self.send_response(125,self.success)
self.filename = cmd_args
except Exception as e:
print(e)
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "QUIT":
self.send_response(501,"-Syntax error in parameters or arguments.")
else:
self.send_response(502,"-Command not implemented.")
def listdir(self, path="uploads/"):
try:
paths = os.listdir(path)
except FileNotFoundError:
return "PATH_ERROR"
except PermissionError:
return "PERMISSION_ERROR"
except Exception:
return "UNKNOWN_ERROR"
if len(paths)== 0:
return "EMPTY_DIR"
else:
count = len(max(paths,key=len))
header = "| %*s | %9s | %10s | %20s | %10s | %10s |"
header = header % (count,"Name","Filetype","Filesize","Last Modified","Permission","User/Group")
table = '%s\n%s\n%s\n' % ('-' * len(header), header, '-' * len(header))
footer = '%s\n' % ('-' * len(header))
body = ""
for p in paths:
fpath = os.path.join(path,p)
stat = os.stat(path)
filetype = None
if os.path.isdir(fpath):
filetype= "Directory"
else:
filetype = "File"
body += "| %*s | %9s | %10s | %20s | %10s | %10s |\n" % (count, p,filetype,
str(stat.st_size) + 'B',time.strftime('%b %d, %Y %H:%M',
time.localtime(stat.st_mtime)), oct(stat.st_mode)[-4:], str(stat.st_uid) + '/' + str(stat.st_gid))
return table,body,footer
class FTPRequestHandler(DatagramRequestHandler):
def data_channel_open(self):
print("Starting data transfer...")
print("Opening data channel...")
print("Data connection ready for transfer.")
print("Transfering data...")
def transfer_data(self, data):
self.wfile.write(data)
def data_channel_close(self):
print("Requested file action okay, completed.")
print("Closing data channel...")
print("Data connection closed.")
print("\nWaiting for new request.")
def handle(self):
#cli_data = self.rfile.readline(4096).strip()
cmd_parts = self.server.client_request.decode().split()
command = cmd_parts[0]
print(datetime.datetime.now().__str__() + ': ',end='')
print("{} request from {}".format(command, self.client_address))
if len(cmd_parts) == 1:
if command == "QUIT":
self.data_channel_open()
self.transfer_data("231-User logged out; service terminated.".encode('utf-8'))
self.server.shutdown_request(self.request)
self.data_channel_close()
elif command == "LIST":
dir_contents = self.server.listdir()
self.data_channel_open()
if type(dir_contents) == tuple:
self.wfile.write(bytes(str("Server PWD: " + self.server.cwd)+"\n","utf-8"))
self.transfer_data(bytes(str(dir_contents[0]+dir_contents[1]+dir_contents[2]),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
elif dir_contents == "EMPTY_DIR":
self.tranfer_data(bytes(str("Directory is empty.\n"),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
else:
self.transfer_data("550-Requested action not taken. File unavailable or not found.".encode('utf-8'))
self.data_channel_close()
elif command == "ADAT":
self.data_channel_open()
self.transfer_data("220-Service ready for new user.".encode('utf-8'))
self.data_channel_close()
elif command == "STOR":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
elif command == "RETR":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
else:
self.data_channel_open()
self.transfer_data("502-Command not implemented.".encode('utf-8'))
self.data_channel_close()
else:
cmd_args = " ".join([ i for i in cmd_parts[1:]]).strip()
if command == "RETR":
try:
with open(self.server.filename,'rb') as file:
self.data_channel_open()
fdata = file.readline(4096)
while fdata:
self.transfer_data(fdata)
fdata = file.readline(4096)
self.data_channel_close()
except Exception as e:
print(e)
self.transfer_data("550-Requested action not taken. File unavailable or not found.".encode('utf-8'))
elif command == "LIST":
dir_contents = self.server.listdir(cmd_args)
self.data_channel_open()
if type(dir_contents) == tuple:
self.wfile.write(bytes(str("Directory: "+cmd_args+"\n"),"utf-8"))
self.transfer_data(bytes(str(dir_contents[0]+dir_contents[1]+dir_contents[2]),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
elif dir_contents == "PERMISSION_ERROR":
self.transfer_data("553-Requested action not taken. Permission denied.".encode('utf-8'))
elif dir_contents == "EMPTY_DIR":
self.transfer_data(bytes(str("Directory: "+cmd_args+" is empty\n"),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
else:
self.transfer_data("500-Requested action not taken. File unavailable or not found.".encode('utf-8'))
self.data_channel_close()
elif command == "STOR":
fdata = self.rfile.readline(1024).strip()
self.data_channel_open()
self.server.socket.settimeout(6.5)
with open(self.server.filename,'wb') as file:
while fdata:
file.write(fdata)
fdata = self.rfile.readline(1024)
self.transfer_data(bytes(str("File has been successfully uploaded in server uploads directory."),"utf-8"))
self.data_channel_close()
elif command == "QUIT":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
else:
self.data_channel_open()
self.transfer_data("502-Command not implemented.".encode('utf-8'))
self.data_channel_close()
def parse_args(argv):
parser = arg.ArgumentParser(prog="myftserver",description="description: simple FTP server using UDP protocol")
group = parser.add_mutually_exclusive_group()
global PORT
parser.add_argument("-p","--port",type=int, default=PORT,
help="define the server's listening port")
group.add_argument("--start", action='store_true', help="start the ftp server")
group.add_argument("--stop",action="store_true", help="stop the ftp server ")
parser.add_argument('-v','--version', action='version', version='%(prog)s 1.0.1')
args = parser.parse_args(argv)
args = vars(args)
PORT = args['port']
return parser, args
def start_server():
server = MyFTPServer((HOST,DPORT), FTPRequestHandler)
pid = os.getpid()
with open("pid.txt","w") as pfile:
pfile.write(str(pid)+"\n")
with server:
server_thread = threading.Thread(target = server.serve_forever)
server_thread.daemon = True
server.init_setup(HOST, PORT)
server_thread.start()
while True:
server.handle_client_request()
server.shutdown()
server.socket.close()
def stop_server():
with open("pid.txt",'r') as pfile:
pid = pfile.readline()
pid = int(str(pid).strip())
try:
os.kill(pid,2)
print("Server has been stopped.")
except ProcessLookupError:
print("Can't stop server, because it not running.")
def main(argv):
parser, args = parse_args(argv)
if args["start"] == True:
try:
start_server()
except (KeyboardInterrupt,SystemExit):
print("\nServer stopped.")
elif args["stop"] == True:
stop_server()
else:
parser.print_help()
if __name__=="__main__":
print("You shouldn't be running this module, rather run server.py") | 37.160858 | 131 | 0.561215 |
import os
import sys
import socket
import threading
import time
import datetime
import argparse as arg
from socketserver import UDPServer
from socketserver import ThreadingMixIn
from socketserver import DatagramRequestHandler
PORT = 42
DPORT = 9999
HOST = "127.0.0.1"
GREEN = '\033[92m'
END = '\033[0m'
class MyFTPServer(ThreadingMixIn, UDPServer):
def init_setup(self, host=HOST, port=PORT):
print("Server starting...")
self.control_channel = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print("Binding to address {}:{}...".format(host,port))
self.control_channel.bind((host,port))
time.sleep(2)
print("Initializing env variables...")
self.cwd = os.getcwd() + "/uploads/"
self.success = "-Data connection already open; transfer starting"
self.ac_success = "226-Data connection closed; requested file action was successful."
self.cli_data = None
time.sleep(2)
print("Server is running on address {}:{}".format(host,port))
print("Active: service "+GREEN+"running"+END+" since %s" % datetime.datetime.now().isoformat(timespec='seconds') )
print("Waiting for request.")
def send_response(self,retcode, message):
print("Sending over control channel {} return code to ".format(retcode) + str(self.client_address))
self.control_channel.sendto(bytes(str(retcode) + message ,"utf-8"),self.client_address)
print("Return code successfully sent.")
def handle_client_request(self,host=HOST, port=PORT):
self.client_request, self.client_address = self.control_channel.recvfrom(1024)
cmd_parts = self.client_request.decode().split()
command = cmd_parts[0]
print(datetime.datetime.now().isoformat(timespec='seconds') + ': ',end='')
print("{} request from {}".format(command, self.client_address))
if len(cmd_parts) == 1:
if command == "QUIT":
self.send_response(231,"-User logged out; service terminated.")
elif command == "LIST":
dir_contents = self.listdir()
if type(dir_contents) == tuple:
self.send_response(125,self.success)
elif dir_contents == "EMPTY_DIR":
self.send_response(125,self.success)
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "ADAT":
self.send_response(220,"-Service ready for new user.")
print("New client connected.")
elif command == "STOR":
self.send_response(501,"-Syntax error in parameters or arguments.")
elif command == "RETR":
self.send_response(501,"-Syntax error in parameters or arguments.")
else:
self.send_response(502,"-Command not implemented.")
else:
cmd_args = " ".join([ i for i in cmd_parts[1:]]).strip()
if command == "RETR":
index = cmd_args.find('/')
if index != 0:
cmd_args = self.cwd + cmd_args
isFile = os.path.isfile(cmd_args)
if isFile:
self.send_response(125,self.success)
self.filename = cmd_args
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "LIST":
dir_contents = self.listdir(cmd_args)
if type(dir_contents) == tuple:
self.send_response(125,self.success)
elif dir_contents == "PERMISSION_ERROR":
self.send_response(553,"-Requested action not taken. Permission denied.")
elif dir_contents == "EMPTY_DIR":
self.send_response(125,self.success)
else:
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "STOR":
index = cmd_args.rfind('/')
if index < 0:
cmd_args = self.cwd + cmd_args
else:
cmd_args = self.cwd + cmd_args[index+1:]
try:
self.send_response(125,self.success)
self.filename = cmd_args
except Exception as e:
print(e)
self.send_response(550,"-Requested action not taken. File unavailable or not found.")
elif command == "QUIT":
self.send_response(501,"-Syntax error in parameters or arguments.")
else:
self.send_response(502,"-Command not implemented.")
def listdir(self, path="uploads/"):
try:
paths = os.listdir(path)
except FileNotFoundError:
return "PATH_ERROR"
except PermissionError:
return "PERMISSION_ERROR"
except Exception:
return "UNKNOWN_ERROR"
if len(paths)== 0:
return "EMPTY_DIR"
else:
count = len(max(paths,key=len))
header = "| %*s | %9s | %10s | %20s | %10s | %10s |"
header = header % (count,"Name","Filetype","Filesize","Last Modified","Permission","User/Group")
table = '%s\n%s\n%s\n' % ('-' * len(header), header, '-' * len(header))
footer = '%s\n' % ('-' * len(header))
body = ""
for p in paths:
fpath = os.path.join(path,p)
stat = os.stat(path)
filetype = None
if os.path.isdir(fpath):
filetype= "Directory"
else:
filetype = "File"
body += "| %*s | %9s | %10s | %20s | %10s | %10s |\n" % (count, p,filetype,
str(stat.st_size) + 'B',time.strftime('%b %d, %Y %H:%M',
time.localtime(stat.st_mtime)), oct(stat.st_mode)[-4:], str(stat.st_uid) + '/' + str(stat.st_gid))
return table,body,footer
class FTPRequestHandler(DatagramRequestHandler):
def data_channel_open(self):
print("Starting data transfer...")
print("Opening data channel...")
print("Data connection ready for transfer.")
print("Transfering data...")
def transfer_data(self, data):
self.wfile.write(data)
def data_channel_close(self):
print("Requested file action okay, completed.")
print("Closing data channel...")
print("Data connection closed.")
print("\nWaiting for new request.")
def handle(self):
cmd_parts = self.server.client_request.decode().split()
command = cmd_parts[0]
print(datetime.datetime.now().__str__() + ': ',end='')
print("{} request from {}".format(command, self.client_address))
if len(cmd_parts) == 1:
if command == "QUIT":
self.data_channel_open()
self.transfer_data("231-User logged out; service terminated.".encode('utf-8'))
self.server.shutdown_request(self.request)
self.data_channel_close()
elif command == "LIST":
dir_contents = self.server.listdir()
self.data_channel_open()
if type(dir_contents) == tuple:
self.wfile.write(bytes(str("Server PWD: " + self.server.cwd)+"\n","utf-8"))
self.transfer_data(bytes(str(dir_contents[0]+dir_contents[1]+dir_contents[2]),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
elif dir_contents == "EMPTY_DIR":
self.tranfer_data(bytes(str("Directory is empty.\n"),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
else:
self.transfer_data("550-Requested action not taken. File unavailable or not found.".encode('utf-8'))
self.data_channel_close()
elif command == "ADAT":
self.data_channel_open()
self.transfer_data("220-Service ready for new user.".encode('utf-8'))
self.data_channel_close()
elif command == "STOR":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
elif command == "RETR":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
else:
self.data_channel_open()
self.transfer_data("502-Command not implemented.".encode('utf-8'))
self.data_channel_close()
else:
cmd_args = " ".join([ i for i in cmd_parts[1:]]).strip()
if command == "RETR":
try:
with open(self.server.filename,'rb') as file:
self.data_channel_open()
fdata = file.readline(4096)
while fdata:
self.transfer_data(fdata)
fdata = file.readline(4096)
self.data_channel_close()
except Exception as e:
print(e)
self.transfer_data("550-Requested action not taken. File unavailable or not found.".encode('utf-8'))
elif command == "LIST":
dir_contents = self.server.listdir(cmd_args)
self.data_channel_open()
if type(dir_contents) == tuple:
self.wfile.write(bytes(str("Directory: "+cmd_args+"\n"),"utf-8"))
self.transfer_data(bytes(str(dir_contents[0]+dir_contents[1]+dir_contents[2]),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
elif dir_contents == "PERMISSION_ERROR":
self.transfer_data("553-Requested action not taken. Permission denied.".encode('utf-8'))
elif dir_contents == "EMPTY_DIR":
self.transfer_data(bytes(str("Directory: "+cmd_args+" is empty\n"),"utf-8"))
self.wfile.write(bytes(str(self.server.ac_success),"utf-8"))
else:
self.transfer_data("500-Requested action not taken. File unavailable or not found.".encode('utf-8'))
self.data_channel_close()
elif command == "STOR":
fdata = self.rfile.readline(1024).strip()
self.data_channel_open()
self.server.socket.settimeout(6.5)
with open(self.server.filename,'wb') as file:
while fdata:
file.write(fdata)
fdata = self.rfile.readline(1024)
self.transfer_data(bytes(str("File has been successfully uploaded in server uploads directory."),"utf-8"))
self.data_channel_close()
elif command == "QUIT":
self.data_channel_open()
self.transfer_data("501-Syntax error in parameters or arguments.".encode('utf-8'))
self.data_channel_close()
else:
self.data_channel_open()
self.transfer_data("502-Command not implemented.".encode('utf-8'))
self.data_channel_close()
def parse_args(argv):
parser = arg.ArgumentParser(prog="myftserver",description="description: simple FTP server using UDP protocol")
group = parser.add_mutually_exclusive_group()
global PORT
parser.add_argument("-p","--port",type=int, default=PORT,
help="define the server's listening port")
group.add_argument("--start", action='store_true', help="start the ftp server")
group.add_argument("--stop",action="store_true", help="stop the ftp server ")
parser.add_argument('-v','--version', action='version', version='%(prog)s 1.0.1')
args = parser.parse_args(argv)
args = vars(args)
PORT = args['port']
return parser, args
def start_server():
server = MyFTPServer((HOST,DPORT), FTPRequestHandler)
pid = os.getpid()
with open("pid.txt","w") as pfile:
pfile.write(str(pid)+"\n")
with server:
server_thread = threading.Thread(target = server.serve_forever)
server_thread.daemon = True
server.init_setup(HOST, PORT)
server_thread.start()
while True:
server.handle_client_request()
server.shutdown()
server.socket.close()
def stop_server():
with open("pid.txt",'r') as pfile:
pid = pfile.readline()
pid = int(str(pid).strip())
try:
os.kill(pid,2)
print("Server has been stopped.")
except ProcessLookupError:
print("Can't stop server, because it not running.")
def main(argv):
parser, args = parse_args(argv)
if args["start"] == True:
try:
start_server()
except (KeyboardInterrupt,SystemExit):
print("\nServer stopped.")
elif args["stop"] == True:
stop_server()
else:
parser.print_help()
if __name__=="__main__":
print("You shouldn't be running this module, rather run server.py") | true | true |
f7f8f3bfd951108aa64a2bda82e8cf60541c50a4 | 3,942 | py | Python | FER_osc.py | jaegonlee/fer | 628651d88568103f5b2b3e081d5c6b5dab39267d | [
"MIT"
] | null | null | null | FER_osc.py | jaegonlee/fer | 628651d88568103f5b2b3e081d5c6b5dab39267d | [
"MIT"
] | null | null | null | FER_osc.py | jaegonlee/fer | 628651d88568103f5b2b3e081d5c6b5dab39267d | [
"MIT"
] | null | null | null | #!/usr/local/bin/python3
from keras.models import model_from_json
import numpy as np
import cv2
import argparse
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import math
from pythonosc import udp_client
class FacialExpressionModel(object):
EMOTIONS_LIST = ["Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"]
def __init__(self, model_json_file, model_weights_file):
with open(model_json_file, "r") as json_file:
loaded_model_json = json_file.read()
self.loaded_model = model_from_json(loaded_model_json)
self.loaded_model.load_weights(model_weights_file)
# print("Model loaded from disk")
# self.loaded_model.summary()
def predict_emotion(self, img):
self.preds = self.loaded_model.predict(img)
return self.preds#FacialExpressionModel.EMOTIONS_LIST[np.argmax(self.preds)]
#parser = argparse.ArgumentParser()
#parser.add_argument("source")
#parser.add_argument("fps")
#args = parser.parse_args()
cap = cv2.VideoCapture(0)#os.path.abspath(args.source) if not args.source == 'webcam' else 0)
faceCascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
font = cv2.FONT_HERSHEY_SIMPLEX
cap.set(cv2.CAP_PROP_FPS, 30)#int(args.fps))
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
DNN = "TF"
if DNN == "CAFFE":
modelFile = "res10_300x300_ssd_iter_140000_fp16.caffemodel"
configFile = "deploy.prototxt"
net = cv2.dnn.readNetFromCaffe(configFile, modelFile)
else:
modelFile = "opencv_face_detector_uint8.pb"
configFile = "opencv_face_detector.pbtxt"
net = cv2.dnn.readNetFromTensorflow(modelFile, configFile)
def getdata():
_, fr = cap.read()
fr = cv2.flip(fr, 1)
gray = cv2.cvtColor(fr, cv2.COLOR_BGR2GRAY)
# faces = faceCascade.detectMultiScale(gray, 1.3, 5)
frameOpencvDnn = fr.copy()
frameHeight = frameOpencvDnn.shape[0]
frameWidth = frameOpencvDnn.shape[1]
blob = cv2.dnn.blobFromImage(frameOpencvDnn, 1.0, (300, 300), [104, 117, 123], False, False)
net.setInput(blob)
detections = net.forward()
bboxes = []
for i in range(detections.shape[2]):
confidence = detections[0, 0, i, 2]
if confidence > 0.7:
x1 = int(detections[0, 0, i, 3] * frameWidth)
y1 = int(detections[0, 0, i, 4] * frameHeight)
x2 = int(detections[0, 0, i, 5] * frameWidth)
y2 = int(detections[0, 0, i, 6] * frameHeight)
# print("has face!",x1,y1,x2,y2)
bboxes.append([x1, y1, x2, y2])
return bboxes, fr, gray
def start_app(cnn):
while cap.isOpened():
faces, fr, gray_fr = getdata()
for (x, y, x2, y2) in faces:
if y<0 or x<0:
break
fc = gray_fr[y:y2, x:x2]
roi = cv2.resize(fc, (48, 48))
pred = cnn.predict_emotion(roi[np.newaxis, :, :, np.newaxis])
emotion = FacialExpressionModel.EMOTIONS_LIST[np.argmax(pred)]
for idx,i in enumerate(FacialExpressionModel.EMOTIONS_LIST):
color = (211, 211, 211) if pred[0][idx] < 0.01 else (0, 255, 0)
emotion_score = "{}: {}".format(i, "{:.2f}".format(pred[0][idx]) if pred[0][idx] > 0.01 else "")
cv2.putText(fr, emotion_score, (x2 + 5, y + 15 + idx*18), font, 0.5, color, 1, cv2.LINE_AA)
cv2.rectangle(fr, (x, y), (x2, y2), (255, 0, 0), 2)
client.send_message("/found",1)
client.send_message("/face",[x,y,x2-x,y2-y])
client.send_message("/emotion", emotion)
if cv2.waitKey(1) == 27:
break
cv2.imshow('Facial Emotion Recognition', fr)
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
ip = "127.0.0.1"
port = 12345
client = udp_client.SimpleUDPClient(ip, port)
model = FacialExpressionModel("model.json", "weights.h5")
start_app(model)
| 36.841121 | 112 | 0.637494 |
from keras.models import model_from_json
import numpy as np
import cv2
import argparse
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import math
from pythonosc import udp_client
class FacialExpressionModel(object):
EMOTIONS_LIST = ["Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"]
def __init__(self, model_json_file, model_weights_file):
with open(model_json_file, "r") as json_file:
loaded_model_json = json_file.read()
self.loaded_model = model_from_json(loaded_model_json)
self.loaded_model.load_weights(model_weights_file)
def predict_emotion(self, img):
self.preds = self.loaded_model.predict(img)
return self.preds
cap = cv2.VideoCapture(0)
faceCascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
font = cv2.FONT_HERSHEY_SIMPLEX
cap.set(cv2.CAP_PROP_FPS, 30)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
DNN = "TF"
if DNN == "CAFFE":
modelFile = "res10_300x300_ssd_iter_140000_fp16.caffemodel"
configFile = "deploy.prototxt"
net = cv2.dnn.readNetFromCaffe(configFile, modelFile)
else:
modelFile = "opencv_face_detector_uint8.pb"
configFile = "opencv_face_detector.pbtxt"
net = cv2.dnn.readNetFromTensorflow(modelFile, configFile)
def getdata():
_, fr = cap.read()
fr = cv2.flip(fr, 1)
gray = cv2.cvtColor(fr, cv2.COLOR_BGR2GRAY)
frameOpencvDnn = fr.copy()
frameHeight = frameOpencvDnn.shape[0]
frameWidth = frameOpencvDnn.shape[1]
blob = cv2.dnn.blobFromImage(frameOpencvDnn, 1.0, (300, 300), [104, 117, 123], False, False)
net.setInput(blob)
detections = net.forward()
bboxes = []
for i in range(detections.shape[2]):
confidence = detections[0, 0, i, 2]
if confidence > 0.7:
x1 = int(detections[0, 0, i, 3] * frameWidth)
y1 = int(detections[0, 0, i, 4] * frameHeight)
x2 = int(detections[0, 0, i, 5] * frameWidth)
y2 = int(detections[0, 0, i, 6] * frameHeight)
bboxes.append([x1, y1, x2, y2])
return bboxes, fr, gray
def start_app(cnn):
while cap.isOpened():
faces, fr, gray_fr = getdata()
for (x, y, x2, y2) in faces:
if y<0 or x<0:
break
fc = gray_fr[y:y2, x:x2]
roi = cv2.resize(fc, (48, 48))
pred = cnn.predict_emotion(roi[np.newaxis, :, :, np.newaxis])
emotion = FacialExpressionModel.EMOTIONS_LIST[np.argmax(pred)]
for idx,i in enumerate(FacialExpressionModel.EMOTIONS_LIST):
color = (211, 211, 211) if pred[0][idx] < 0.01 else (0, 255, 0)
emotion_score = "{}: {}".format(i, "{:.2f}".format(pred[0][idx]) if pred[0][idx] > 0.01 else "")
cv2.putText(fr, emotion_score, (x2 + 5, y + 15 + idx*18), font, 0.5, color, 1, cv2.LINE_AA)
cv2.rectangle(fr, (x, y), (x2, y2), (255, 0, 0), 2)
client.send_message("/found",1)
client.send_message("/face",[x,y,x2-x,y2-y])
client.send_message("/emotion", emotion)
if cv2.waitKey(1) == 27:
break
cv2.imshow('Facial Emotion Recognition', fr)
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
ip = "127.0.0.1"
port = 12345
client = udp_client.SimpleUDPClient(ip, port)
model = FacialExpressionModel("model.json", "weights.h5")
start_app(model)
| true | true |
f7f8f3e21df8c6750dc12a89f42a1ee05324bdc1 | 21,739 | py | Python | compiler/expr_translate.py | esay2077/logica | 2d5038f8d1d2c64e4c53ee4e3e1e7cad575ecaf4 | [
"Apache-2.0"
] | null | null | null | compiler/expr_translate.py | esay2077/logica | 2d5038f8d1d2c64e4c53ee4e3e1e7cad575ecaf4 | [
"Apache-2.0"
] | null | null | null | compiler/expr_translate.py | esay2077/logica | 2d5038f8d1d2c64e4c53ee4e3e1e7cad575ecaf4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compiler of a Logica expression to SQL."""
import copy
import json
if '.' not in __package__:
from common import color
from common.data import processed_functions
from compiler import dialects
else:
from ..common import color
from ..common.data import processed_functions
from ..compiler import dialects
class QL(object):
"""Class translating Logica expressions into SQL."""
BUILT_IN_FUNCTIONS = { # Please keep sorted alphabetically.
# Casting.
'ToFloat64': 'CAST(%s AS FLOAT64)',
'ToInt64': 'CAST(%s AS INT64)',
'ToUInt64': 'CAST(%s AS UINT64)',
'ToString': 'CAST(%s AS STRING)',
# Aggregation.
'Aggr': '%s', # Placeholder to use formulas for aggregation.
'Agg+': 'SUM(%s)',
'Agg++': 'ARRAY_CONCAT_AGG(%s)',
'Container': '%s',
'Count': 'APPROX_COUNT_DISTINCT(%s)',
'ExactCount': 'COUNT(DISTINCT %s)',
'List': 'ARRAY_AGG(%s)',
'Median': 'APPROX_QUANTILES(%s, 2)[OFFSET(1)]',
'SomeValue': 'ARRAY_AGG(%s IGNORE NULLS LIMIT 1)[OFFSET(0)]',
# Other functions.
'!': 'NOT %s',
'-': '- %s',
'Concat': 'ARRAY_CONCAT({0}, {1})',
'Constraint': '%s',
'DateAddDay': 'DATE_ADD({0}, INTERVAL {1} DAY)',
'DateDiffDay': 'DATE_DIFF({0}, {1}, DAY)',
'Element': '{0}[OFFSET({1})]',
'Enumerate': ('ARRAY(SELECT STRUCT('
'ROW_NUMBER() OVER () AS n, x AS element) '
'FROM UNNEST(%s) as x)'),
'IsNull': '(%s IS NULL)',
'Join': 'ARRAY_TO_STRING(%s)',
'Like': '({0} LIKE {1})',
'Range': 'GENERATE_ARRAY(0, %s - 1)',
'RangeOf': 'GENERATE_ARRAY(0, ARRAY_LENGTH(%s) - 1)',
'Set': 'ARRAY_AGG(DISTINCT %s)',
'Size': 'ARRAY_LENGTH(%s)',
'Sort': 'ARRAY(SELECT x FROM UNNEST(%s) as x ORDER BY x)',
'TimestampAddDays': 'TIMESTAMP_ADD({0}, INTERVAL {1} DAY)',
'Unique': 'ARRAY(SELECT DISTINCT x FROM UNNEST(%s) as x ORDER BY x)',
'ValueOfUnnested': '%s',
# These functions are treated specially.
'FlagValue': 'UNUSED',
'Cast': 'UNUSED',
'SqlExpr': 'UNUSED'
}
BUILT_IN_INFIX_OPERATORS = {
'==': '%s = %s',
'<=': '%s <= %s',
'<': '%s < %s',
'>=': '%s >= %s',
'>': '%s > %s',
'/': '(%s) / (%s)',
'+': '(%s) + (%s)',
'-': '(%s) - (%s)',
'*': '(%s) * (%s)',
'^': 'POW(%s, %s)',
'!=': '%s != %s',
'++': 'CONCAT(%s, %s)',
'in': '%s IN UNNEST(%s)',
'||': '%s OR %s',
'&&': '%s AND %s',
'%': 'MOD(%s, %s)'
}
BULK_FUNCTIONS = {}
BULK_FUNCTIONS_ARITY_RANGE = {}
# When adding any analytic functions please check that ConvertAnalytic
# function handles them correctly.
ANALYTIC_FUNCTIONS = {
'CumulativeSum':
'SUM({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'CumulativeMax':
'MAX({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'CumulativeMin':
'MIN({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'WindowSum':
'SUM({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
'WindowMax':
'MAX({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
'WindowMin':
'MIN({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
}
def __init__(self, vars_vocabulary, subquery_translator, exception_maker,
flag_values, custom_udfs=None, dialect=None):
"""Initializes the instance.
Args:
vars_vocabulary: Dictionary mapping Logica variable to an SQL expression.
subquery_translator: SubqueryTranslator to translate 'combine'
expressions.
exception_maker: Exception to raise if expression comilation error occurs.
flag_values: Values of program flags.
custom_udfs: A map from udf name to udf application string template.
dialect: SQL dialect object.
"""
self.dialect = dialect or dialects.BigQueryDialect()
self.vocabulary = vars_vocabulary
self.subquery_translator = subquery_translator
# Using instance variables for test purposes.
self.InstallBulkFunctionsOfStandardSQL()
self.bulk_functions = self.BULK_FUNCTIONS
self.bulk_function_arity_range = self.BULK_FUNCTIONS_ARITY_RANGE
self.built_in_functions = copy.deepcopy(self.bulk_functions)
self.built_in_functions.update(self.BUILT_IN_FUNCTIONS)
self.built_in_functions.update(self.dialect.BuiltInFunctions())
self.built_in_infix_operators = copy.deepcopy(
self.BUILT_IN_INFIX_OPERATORS)
self.built_in_infix_operators.update(self.dialect.InfixOperators())
self.CleanOperatorsAndFunctions()
self.exception_maker = exception_maker
self.debug_undefined_variables = False
# We set convert_to_json to convert arguments of annotations to Python
# objects. This is hack-y. In the long run we plan to run them in
# StandardSQL reference implementation and record the output.
self.convert_to_json = False
self.flag_values = flag_values
self.custom_udfs = custom_udfs or {}
def CleanOperatorsAndFunctions(self):
def CleanDictionary(d):
keys = list(d.keys())
for k in keys:
if d[k] is None:
del d[k]
for d in [self.built_in_infix_operators,
self.built_in_functions]:
CleanDictionary(self.built_in_infix_operators)
@classmethod
def BasisFunctions(cls):
cls.InstallBulkFunctionsOfStandardSQL()
return (
set(cls.BUILT_IN_FUNCTIONS) |
set(cls.BUILT_IN_INFIX_OPERATORS) |
set(cls.BULK_FUNCTIONS) |
set(cls.ANALYTIC_FUNCTIONS))
@classmethod
def InstallBulkFunctionsOfStandardSQL(cls):
"""Populates fields from processed_functions.csv."""
if cls.BULK_FUNCTIONS:
return
def CamelCase(s):
s = s.replace('.', '_')
return ''.join(p[0].upper() + p[1:] for p in s.split('_'))
reader = processed_functions.GetCsv()
header = next(reader)
for row in reader:
row = dict(list(zip(header, row)))
if row['function'][0] == '$':
# TODO: Process operators.
continue
function_name = CamelCase(row['function'])
cls.BULK_FUNCTIONS[function_name] = (
'%s(%s)' % (row['sql_function'], '%s'))
cls.BULK_FUNCTIONS_ARITY_RANGE[function_name] = (
int(row['min_args']),
float('inf')
if row['has_repeated_args'] == '1' else int(row['max_args']))
def BuiltInFunctionArityRange(self, f):
"""Returns arity of the built-in function."""
assert f in self.built_in_functions
if f in self.BUILT_IN_FUNCTIONS:
if f == 'If':
return (3, 3)
arity_2_functions = ['RegexpExtract', 'Like',
'ParseTimestamp', 'FormatTimestamp',
'TimestampAddDays', 'Split', 'Element',
'Concat', 'DateAddDay', 'DateDiffDay',
'Join']
if f in arity_2_functions:
return (2, 2)
return (1, 1)
else:
assert f in self.bulk_functions
return self.bulk_function_arity_range[f]
def If(self, args):
assert len(args) == 3
return 'IF(%s, %s, %s)' % tuple(args)
def Function(self, f, args):
args_list = [None] * len(args)
for k, v in args.items():
args_list[k] = str(v)
if '%s' in f:
return f % ', '.join(args_list)
else:
return f.format(*args_list)
def Infix(self, op, args):
return op % (args['left'], args['right'])
def Subscript(self, record, subscript):
if isinstance(subscript, int):
subscript = 'col%d' % subscript
return self.dialect.Subscript(record, subscript)
def IntLiteral(self, literal):
return str(literal['number'])
def StrLiteral(self, literal):
if self.dialect.Name() in ["PostgreSQL", "Presto", "Trino", "SqLite"]:
# TODO: Do this safely.
return '\'%s\'' % literal['the_string']
return json.dumps(literal['the_string'], ensure_ascii=False)
def ListLiteralInternals(self, literal):
return ', '.join([self.ConvertToSql(e)
for e in literal['element']])
def ListLiteral(self, literal):
return self.dialect.ArrayPhrase() % self.ListLiteralInternals(literal)
def BoolLiteral(self, literal):
return literal['the_bool']
def NullLiteral(self, literal):
return literal['the_null']
# Might be used for automatic program analysis.
def PredicateLiteral(self, literal):
if self.convert_to_json:
return '{"predicate_name": "%s"}' % (literal['predicate_name'])
return 'STRUCT("%s" AS predicate_name)' % literal['predicate_name']
def Variable(self, variable):
if variable['var_name'] in self.vocabulary:
return self.vocabulary[variable['var_name']]
else:
if self.debug_undefined_variables:
return 'UNDEFINED_%s' % variable['var_name']
assert False, 'Found no interpretation for %s in %s' % (
variable['var_name'], self.vocabulary)
def ConvertRecord(self, args):
result = {}
for f_v in args['field_value']:
assert 'expression' in f_v['value'], (
'Bad record: %s' % args)
result[f_v['field']] = self.ConvertToSql(f_v['value']['expression'])
return result
def RecordAsJson(self, record):
json_field_values = []
for f_v in record['field_value']:
json_field_values.append('"{field}": {value}'.format(
field=f_v['field'],
value=self.ConvertToSql(f_v['value']['expression'])))
return '{%s}' % ', '.join(json_field_values)
def Record(self, record):
if self.convert_to_json:
return self.RecordAsJson(record)
# TODO: Move this to dialects.py.
if self.dialect.Name() == 'SqLite':
arguments_str = ', '.join(
"'%s', %s" % (f_v['field'],
self.ConvertToSql(f_v['value']['expression']) )
for f_v in record['field_value'])
return 'JSON_OBJECT(%s)' % arguments_str
arguments_str = ', '.join(
'%s AS %s' % (self.ConvertToSql(f_v['value']['expression']),
f_v['field'])
for f_v in record['field_value'])
return 'STRUCT(%s)' % arguments_str
def GenericSqlExpression(self, record):
"""Converting SqlExpr to SQL."""
top_record = self.ConvertRecord(record)
if set(top_record) != set([0, 1]):
raise self.exception_maker(
'SqlExpr must have 2 positional arguments, got: %s' % top_record)
if ('literal' not in record['field_value'][0]
['value']['expression'] or
'the_string' not in
record['field_value'][0]['value']['expression']['literal']):
raise self.exception_maker(
'SqlExpr must have first argument be string, got: %s' %
top_record[0])
template = (
record['field_value'][0]['value']['expression']['literal']
['the_string']['the_string'])
if 'record' not in record['field_value'][1]['value']['expression']:
raise self.exception_maker(
'Sectond argument of SqlExpr must be record literal. Got: %s' %
top_record[1])
args = self.ConvertRecord(
record['field_value'][1]['value']['expression']['record'])
return template.format(**args)
def Implication(self, implication):
when_then_clauses = []
for cond_cons in implication['if_then']:
when_then_clauses.append(
'WHEN {cond} THEN {cons}'.format(
cond=self.ConvertToSql(cond_cons['condition']),
cons=self.ConvertToSql(cond_cons['consequence'])))
when_then_clauses_str = ' '.join(when_then_clauses)
otherwise = self.ConvertToSql(implication['otherwise'])
return 'CASE %s ELSE %s END' % (when_then_clauses_str, otherwise)
def ConvertAnalyticListArgument(self, expression):
if ('literal' not in expression or
'the_list' not in expression['literal']):
raise self.exception_maker(
'Analytic list argument must resolve to list literal, got: %s' %
self.ConvertToSql(expression))
return self.ListLiteralInternals(expression['literal']['the_list'])
def ConvertAnalytic(self, call):
"""Converting analytic function call to SQL."""
is_window = call['predicate_name'].startswith('Window')
if len(call['record']['field_value']) != 3 + is_window:
raise self.exception_maker(
'Function %s must have %d arguments.' % (call['predicate_name'],
3 + is_window))
aggregant = self.ConvertToSql(
call['record']['field_value'][0]['value']['expression'])
group_by = self.ConvertAnalyticListArgument(
call['record']['field_value'][1]['value']['expression'])
order_by = self.ConvertAnalyticListArgument(
call['record']['field_value'][2]['value']['expression'])
if is_window:
window_size = self.ConvertToSql(
call['record']['field_value'][3]['value']['expression'])
if not is_window:
return self.ANALYTIC_FUNCTIONS[call['predicate_name']].format(
aggregant, group_by, order_by)
else:
return self.ANALYTIC_FUNCTIONS[call['predicate_name']].format(
aggregant, group_by, order_by, window_size)
def SubIfStruct(self, implication, subscript):
"""Optimizing SQL for subscript of an 'if' statement.
Args:
implication: Implication syntax tree.
subscript: Subscript string.
Returns:
optimized SQL if all consequences are syntactic records, or None
otherwise.
"""
def GetValueOfField(field_values, field):
for field_value in field_values:
if field_value['field'] == field:
return field_value['value']['expression']
raise self.exception_maker(
'Expected field %s missing in a record inside %s statement.' % (
color.Warn(subscript), color.Warn('if')))
assert False
all_records = all(
('record' in if_then['consequence'])
for if_then in implication['if_then'])
if not (all_records and 'record' in implication['otherwise']):
return None
new_if_thens = []
for if_then in implication['if_then']:
new_if_then = copy.deepcopy(if_then)
consequence = GetValueOfField(
if_then['consequence']['record']['field_value'], subscript)
new_if_then['consequence'] = consequence
new_if_thens.append(new_if_then)
new_otherwise = GetValueOfField(
implication['otherwise']['record']['field_value'], subscript)
new_expr = {
'implication': {'if_then': new_if_thens, 'otherwise': new_otherwise}}
return self.ConvertToSql(new_expr)
def ConvertToSql(self, expression):
"""Converting Logica expression into SQL."""
# print('EXPR:', expression)
# Variables.
if 'variable' in expression:
return self.Variable(expression['variable'])
# Literals.
if 'literal' in expression:
literal = expression['literal']
if 'the_number' in literal:
return self.IntLiteral(literal['the_number'])
if 'the_string' in literal:
return self.StrLiteral(literal['the_string'])
if 'the_list' in literal:
return self.ListLiteral(literal['the_list'])
if 'the_bool' in literal:
return self.BoolLiteral(literal['the_bool'])
if 'the_null' in literal:
return self.NullLiteral(literal['the_null'])
if 'the_predicate' in literal:
return self.PredicateLiteral(literal['the_predicate'])
assert False, 'Logica bug: unsupported literal parsed: %s' % literal
if 'call' in expression:
call = expression['call']
arguments = self.ConvertRecord(call['record'])
if call['predicate_name'] in self.ANALYTIC_FUNCTIONS:
return self.ConvertAnalytic(call)
if call['predicate_name'] == 'SqlExpr':
return self.GenericSqlExpression(call['record'])
if call['predicate_name'] == 'Cast':
if (len(arguments) != 2 or
'literal' not in
call['record']['field_value'][1]['value']['expression'] or
'the_string' not in
call['record']['field_value'][1]['value']['expression']['literal']):
raise self.exception_maker(
'Cast must have 2 arguments and the second argument must be a '
'string literal.')
cast_to = (
call['record']['field_value'][1]['value']['expression']['literal']
['the_string']['the_string'])
return 'CAST(%s AS %s)' % (
self.ConvertToSql(
call['record']['field_value'][0]['value']['expression']),
cast_to)
if call['predicate_name'] == 'FlagValue':
if (len(arguments) != 1 or
'literal' not in
call['record']['field_value'][0]['value']['expression'] or
'the_string' not in
call['record']['field_value'][0]['value']['expression']['literal']):
raise self.exception_maker(
'FlagValue argument must be a string literal.')
flag = (
call['record']['field_value'][0]['value']['expression']['literal']
['the_string']['the_string'])
if flag not in self.flag_values:
raise self.exception_maker(
'Unspecified flag: %s' % flag)
return self.StrLiteral(
{'the_string': self.flag_values[flag]})
for ydg_f, sql_f in self.built_in_functions.items():
if call['predicate_name'] == ydg_f:
if not sql_f:
raise self.exception_maker(
'Function %s is not supported by %s dialect.' % (
color.Warn(ydg_f), color.Warn(self.dialect.Name())))
if len(arguments) == 2 and ydg_f == '-':
continue # '-' is the only operator with variable arity.
arity_range = self.BuiltInFunctionArityRange(ydg_f)
if not arity_range[0] <= len(arguments) <= arity_range[1]:
raise self.exception_maker(
color.Format(
'Built-in function {warning}{ydg_f}{end} takes {a} '
'arguments, but {b} arguments were given.',
dict(ydg_f=ydg_f, a=arity_range,
b=len(arguments))))
return self.Function(sql_f, arguments)
for udf, udf_sql in self.custom_udfs.items():
if call['predicate_name'] == udf:
# TODO: Treatment of positional arguments should be
# simplified everywhere.
arguments = dict(
(k, v) if isinstance(k, str) else ('col%d' % k, v)
for k, v in arguments.items())
try:
result = udf_sql.format(**arguments)
except KeyError:
raise self.exception_maker(
'Function %s call is inconsistent with its signature %s.' %
(color.Warn(udf), udf_sql))
return result
for ydg_op, sql_op in self.built_in_infix_operators.items():
if call['predicate_name'] == ydg_op:
result = self.Infix(sql_op, arguments)
result = '(' + result + ')'
return result
if 'subscript' in expression:
sub = expression['subscript']
subscript = sub['subscript']['literal']['the_symbol']['symbol']
# TODO: Record literal and record of subscript should have
# different keys.
# Try to opimize and return the field from a record.
if 'record' in sub['record']:
for f_v in sub['record']['record']['field_value']:
if f_v['field'] == subscript:
# Optimizing and returning the field directly.
return self.ConvertToSql(f_v['value']['expression'])
# Trying to optmize subscript of implication.
if 'implication' in sub['record']:
simplified_sub = self.SubIfStruct(sub['record']['implication'],
subscript)
if simplified_sub:
return simplified_sub
# Couldn't optimize, just return the '.' expression.
record = self.ConvertToSql(sub['record'])
return self.Subscript(record, subscript)
if 'record' in expression:
record = expression['record']
return self.Record(record)
if 'combine' in expression:
return '(%s)' % (
self.subquery_translator.TranslateRule(expression['combine'],
self.vocabulary))
if 'implication' in expression:
implication = expression['implication']
return self.Implication(implication)
if 'call' in expression and 'predicate_name' in expression['call']:
raise self.exception_maker(color.Format(
'Unsupported supposedly built-in function: '
'{warning}{predicate}{end}.', dict(
predicate=expression['call']['predicate_name'])))
assert False, (
'Logica bug: expression %s failed to compile for unknown reason.' %
str(expression))
| 38.819643 | 80 | 0.611068 |
import copy
import json
if '.' not in __package__:
from common import color
from common.data import processed_functions
from compiler import dialects
else:
from ..common import color
from ..common.data import processed_functions
from ..compiler import dialects
class QL(object):
BUILT_IN_FUNCTIONS = {
'ToFloat64': 'CAST(%s AS FLOAT64)',
'ToInt64': 'CAST(%s AS INT64)',
'ToUInt64': 'CAST(%s AS UINT64)',
'ToString': 'CAST(%s AS STRING)',
'Aggr': '%s',
'Agg+': 'SUM(%s)',
'Agg++': 'ARRAY_CONCAT_AGG(%s)',
'Container': '%s',
'Count': 'APPROX_COUNT_DISTINCT(%s)',
'ExactCount': 'COUNT(DISTINCT %s)',
'List': 'ARRAY_AGG(%s)',
'Median': 'APPROX_QUANTILES(%s, 2)[OFFSET(1)]',
'SomeValue': 'ARRAY_AGG(%s IGNORE NULLS LIMIT 1)[OFFSET(0)]',
'!': 'NOT %s',
'-': '- %s',
'Concat': 'ARRAY_CONCAT({0}, {1})',
'Constraint': '%s',
'DateAddDay': 'DATE_ADD({0}, INTERVAL {1} DAY)',
'DateDiffDay': 'DATE_DIFF({0}, {1}, DAY)',
'Element': '{0}[OFFSET({1})]',
'Enumerate': ('ARRAY(SELECT STRUCT('
'ROW_NUMBER() OVER () AS n, x AS element) '
'FROM UNNEST(%s) as x)'),
'IsNull': '(%s IS NULL)',
'Join': 'ARRAY_TO_STRING(%s)',
'Like': '({0} LIKE {1})',
'Range': 'GENERATE_ARRAY(0, %s - 1)',
'RangeOf': 'GENERATE_ARRAY(0, ARRAY_LENGTH(%s) - 1)',
'Set': 'ARRAY_AGG(DISTINCT %s)',
'Size': 'ARRAY_LENGTH(%s)',
'Sort': 'ARRAY(SELECT x FROM UNNEST(%s) as x ORDER BY x)',
'TimestampAddDays': 'TIMESTAMP_ADD({0}, INTERVAL {1} DAY)',
'Unique': 'ARRAY(SELECT DISTINCT x FROM UNNEST(%s) as x ORDER BY x)',
'ValueOfUnnested': '%s',
'FlagValue': 'UNUSED',
'Cast': 'UNUSED',
'SqlExpr': 'UNUSED'
}
BUILT_IN_INFIX_OPERATORS = {
'==': '%s = %s',
'<=': '%s <= %s',
'<': '%s < %s',
'>=': '%s >= %s',
'>': '%s > %s',
'/': '(%s) / (%s)',
'+': '(%s) + (%s)',
'-': '(%s) - (%s)',
'*': '(%s) * (%s)',
'^': 'POW(%s, %s)',
'!=': '%s != %s',
'++': 'CONCAT(%s, %s)',
'in': '%s IN UNNEST(%s)',
'||': '%s OR %s',
'&&': '%s AND %s',
'%': 'MOD(%s, %s)'
}
BULK_FUNCTIONS = {}
BULK_FUNCTIONS_ARITY_RANGE = {}
ANALYTIC_FUNCTIONS = {
'CumulativeSum':
'SUM({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'CumulativeMax':
'MAX({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'CumulativeMin':
'MIN({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)',
'WindowSum':
'SUM({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
'WindowMax':
'MAX({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
'WindowMin':
'MIN({0}) OVER (PARTITION BY {1} ORDER BY {2} '
'ROWS BETWEEN {3} PRECEDING AND CURRENT ROW)',
}
def __init__(self, vars_vocabulary, subquery_translator, exception_maker,
flag_values, custom_udfs=None, dialect=None):
self.dialect = dialect or dialects.BigQueryDialect()
self.vocabulary = vars_vocabulary
self.subquery_translator = subquery_translator
self.InstallBulkFunctionsOfStandardSQL()
self.bulk_functions = self.BULK_FUNCTIONS
self.bulk_function_arity_range = self.BULK_FUNCTIONS_ARITY_RANGE
self.built_in_functions = copy.deepcopy(self.bulk_functions)
self.built_in_functions.update(self.BUILT_IN_FUNCTIONS)
self.built_in_functions.update(self.dialect.BuiltInFunctions())
self.built_in_infix_operators = copy.deepcopy(
self.BUILT_IN_INFIX_OPERATORS)
self.built_in_infix_operators.update(self.dialect.InfixOperators())
self.CleanOperatorsAndFunctions()
self.exception_maker = exception_maker
self.debug_undefined_variables = False
self.convert_to_json = False
self.flag_values = flag_values
self.custom_udfs = custom_udfs or {}
def CleanOperatorsAndFunctions(self):
def CleanDictionary(d):
keys = list(d.keys())
for k in keys:
if d[k] is None:
del d[k]
for d in [self.built_in_infix_operators,
self.built_in_functions]:
CleanDictionary(self.built_in_infix_operators)
@classmethod
def BasisFunctions(cls):
cls.InstallBulkFunctionsOfStandardSQL()
return (
set(cls.BUILT_IN_FUNCTIONS) |
set(cls.BUILT_IN_INFIX_OPERATORS) |
set(cls.BULK_FUNCTIONS) |
set(cls.ANALYTIC_FUNCTIONS))
@classmethod
def InstallBulkFunctionsOfStandardSQL(cls):
if cls.BULK_FUNCTIONS:
return
def CamelCase(s):
s = s.replace('.', '_')
return ''.join(p[0].upper() + p[1:] for p in s.split('_'))
reader = processed_functions.GetCsv()
header = next(reader)
for row in reader:
row = dict(list(zip(header, row)))
if row['function'][0] == '$':
continue
function_name = CamelCase(row['function'])
cls.BULK_FUNCTIONS[function_name] = (
'%s(%s)' % (row['sql_function'], '%s'))
cls.BULK_FUNCTIONS_ARITY_RANGE[function_name] = (
int(row['min_args']),
float('inf')
if row['has_repeated_args'] == '1' else int(row['max_args']))
def BuiltInFunctionArityRange(self, f):
assert f in self.built_in_functions
if f in self.BUILT_IN_FUNCTIONS:
if f == 'If':
return (3, 3)
arity_2_functions = ['RegexpExtract', 'Like',
'ParseTimestamp', 'FormatTimestamp',
'TimestampAddDays', 'Split', 'Element',
'Concat', 'DateAddDay', 'DateDiffDay',
'Join']
if f in arity_2_functions:
return (2, 2)
return (1, 1)
else:
assert f in self.bulk_functions
return self.bulk_function_arity_range[f]
def If(self, args):
assert len(args) == 3
return 'IF(%s, %s, %s)' % tuple(args)
def Function(self, f, args):
args_list = [None] * len(args)
for k, v in args.items():
args_list[k] = str(v)
if '%s' in f:
return f % ', '.join(args_list)
else:
return f.format(*args_list)
def Infix(self, op, args):
return op % (args['left'], args['right'])
def Subscript(self, record, subscript):
if isinstance(subscript, int):
subscript = 'col%d' % subscript
return self.dialect.Subscript(record, subscript)
def IntLiteral(self, literal):
return str(literal['number'])
def StrLiteral(self, literal):
if self.dialect.Name() in ["PostgreSQL", "Presto", "Trino", "SqLite"]:
return '\'%s\'' % literal['the_string']
return json.dumps(literal['the_string'], ensure_ascii=False)
def ListLiteralInternals(self, literal):
return ', '.join([self.ConvertToSql(e)
for e in literal['element']])
def ListLiteral(self, literal):
return self.dialect.ArrayPhrase() % self.ListLiteralInternals(literal)
def BoolLiteral(self, literal):
return literal['the_bool']
def NullLiteral(self, literal):
return literal['the_null']
def PredicateLiteral(self, literal):
if self.convert_to_json:
return '{"predicate_name": "%s"}' % (literal['predicate_name'])
return 'STRUCT("%s" AS predicate_name)' % literal['predicate_name']
def Variable(self, variable):
if variable['var_name'] in self.vocabulary:
return self.vocabulary[variable['var_name']]
else:
if self.debug_undefined_variables:
return 'UNDEFINED_%s' % variable['var_name']
assert False, 'Found no interpretation for %s in %s' % (
variable['var_name'], self.vocabulary)
def ConvertRecord(self, args):
result = {}
for f_v in args['field_value']:
assert 'expression' in f_v['value'], (
'Bad record: %s' % args)
result[f_v['field']] = self.ConvertToSql(f_v['value']['expression'])
return result
def RecordAsJson(self, record):
json_field_values = []
for f_v in record['field_value']:
json_field_values.append('"{field}": {value}'.format(
field=f_v['field'],
value=self.ConvertToSql(f_v['value']['expression'])))
return '{%s}' % ', '.join(json_field_values)
def Record(self, record):
if self.convert_to_json:
return self.RecordAsJson(record)
if self.dialect.Name() == 'SqLite':
arguments_str = ', '.join(
"'%s', %s" % (f_v['field'],
self.ConvertToSql(f_v['value']['expression']) )
for f_v in record['field_value'])
return 'JSON_OBJECT(%s)' % arguments_str
arguments_str = ', '.join(
'%s AS %s' % (self.ConvertToSql(f_v['value']['expression']),
f_v['field'])
for f_v in record['field_value'])
return 'STRUCT(%s)' % arguments_str
def GenericSqlExpression(self, record):
top_record = self.ConvertRecord(record)
if set(top_record) != set([0, 1]):
raise self.exception_maker(
'SqlExpr must have 2 positional arguments, got: %s' % top_record)
if ('literal' not in record['field_value'][0]
['value']['expression'] or
'the_string' not in
record['field_value'][0]['value']['expression']['literal']):
raise self.exception_maker(
'SqlExpr must have first argument be string, got: %s' %
top_record[0])
template = (
record['field_value'][0]['value']['expression']['literal']
['the_string']['the_string'])
if 'record' not in record['field_value'][1]['value']['expression']:
raise self.exception_maker(
'Sectond argument of SqlExpr must be record literal. Got: %s' %
top_record[1])
args = self.ConvertRecord(
record['field_value'][1]['value']['expression']['record'])
return template.format(**args)
def Implication(self, implication):
when_then_clauses = []
for cond_cons in implication['if_then']:
when_then_clauses.append(
'WHEN {cond} THEN {cons}'.format(
cond=self.ConvertToSql(cond_cons['condition']),
cons=self.ConvertToSql(cond_cons['consequence'])))
when_then_clauses_str = ' '.join(when_then_clauses)
otherwise = self.ConvertToSql(implication['otherwise'])
return 'CASE %s ELSE %s END' % (when_then_clauses_str, otherwise)
def ConvertAnalyticListArgument(self, expression):
if ('literal' not in expression or
'the_list' not in expression['literal']):
raise self.exception_maker(
'Analytic list argument must resolve to list literal, got: %s' %
self.ConvertToSql(expression))
return self.ListLiteralInternals(expression['literal']['the_list'])
def ConvertAnalytic(self, call):
is_window = call['predicate_name'].startswith('Window')
if len(call['record']['field_value']) != 3 + is_window:
raise self.exception_maker(
'Function %s must have %d arguments.' % (call['predicate_name'],
3 + is_window))
aggregant = self.ConvertToSql(
call['record']['field_value'][0]['value']['expression'])
group_by = self.ConvertAnalyticListArgument(
call['record']['field_value'][1]['value']['expression'])
order_by = self.ConvertAnalyticListArgument(
call['record']['field_value'][2]['value']['expression'])
if is_window:
window_size = self.ConvertToSql(
call['record']['field_value'][3]['value']['expression'])
if not is_window:
return self.ANALYTIC_FUNCTIONS[call['predicate_name']].format(
aggregant, group_by, order_by)
else:
return self.ANALYTIC_FUNCTIONS[call['predicate_name']].format(
aggregant, group_by, order_by, window_size)
def SubIfStruct(self, implication, subscript):
def GetValueOfField(field_values, field):
for field_value in field_values:
if field_value['field'] == field:
return field_value['value']['expression']
raise self.exception_maker(
'Expected field %s missing in a record inside %s statement.' % (
color.Warn(subscript), color.Warn('if')))
assert False
all_records = all(
('record' in if_then['consequence'])
for if_then in implication['if_then'])
if not (all_records and 'record' in implication['otherwise']):
return None
new_if_thens = []
for if_then in implication['if_then']:
new_if_then = copy.deepcopy(if_then)
consequence = GetValueOfField(
if_then['consequence']['record']['field_value'], subscript)
new_if_then['consequence'] = consequence
new_if_thens.append(new_if_then)
new_otherwise = GetValueOfField(
implication['otherwise']['record']['field_value'], subscript)
new_expr = {
'implication': {'if_then': new_if_thens, 'otherwise': new_otherwise}}
return self.ConvertToSql(new_expr)
def ConvertToSql(self, expression):
if 'variable' in expression:
return self.Variable(expression['variable'])
if 'literal' in expression:
literal = expression['literal']
if 'the_number' in literal:
return self.IntLiteral(literal['the_number'])
if 'the_string' in literal:
return self.StrLiteral(literal['the_string'])
if 'the_list' in literal:
return self.ListLiteral(literal['the_list'])
if 'the_bool' in literal:
return self.BoolLiteral(literal['the_bool'])
if 'the_null' in literal:
return self.NullLiteral(literal['the_null'])
if 'the_predicate' in literal:
return self.PredicateLiteral(literal['the_predicate'])
assert False, 'Logica bug: unsupported literal parsed: %s' % literal
if 'call' in expression:
call = expression['call']
arguments = self.ConvertRecord(call['record'])
if call['predicate_name'] in self.ANALYTIC_FUNCTIONS:
return self.ConvertAnalytic(call)
if call['predicate_name'] == 'SqlExpr':
return self.GenericSqlExpression(call['record'])
if call['predicate_name'] == 'Cast':
if (len(arguments) != 2 or
'literal' not in
call['record']['field_value'][1]['value']['expression'] or
'the_string' not in
call['record']['field_value'][1]['value']['expression']['literal']):
raise self.exception_maker(
'Cast must have 2 arguments and the second argument must be a '
'string literal.')
cast_to = (
call['record']['field_value'][1]['value']['expression']['literal']
['the_string']['the_string'])
return 'CAST(%s AS %s)' % (
self.ConvertToSql(
call['record']['field_value'][0]['value']['expression']),
cast_to)
if call['predicate_name'] == 'FlagValue':
if (len(arguments) != 1 or
'literal' not in
call['record']['field_value'][0]['value']['expression'] or
'the_string' not in
call['record']['field_value'][0]['value']['expression']['literal']):
raise self.exception_maker(
'FlagValue argument must be a string literal.')
flag = (
call['record']['field_value'][0]['value']['expression']['literal']
['the_string']['the_string'])
if flag not in self.flag_values:
raise self.exception_maker(
'Unspecified flag: %s' % flag)
return self.StrLiteral(
{'the_string': self.flag_values[flag]})
for ydg_f, sql_f in self.built_in_functions.items():
if call['predicate_name'] == ydg_f:
if not sql_f:
raise self.exception_maker(
'Function %s is not supported by %s dialect.' % (
color.Warn(ydg_f), color.Warn(self.dialect.Name())))
if len(arguments) == 2 and ydg_f == '-':
continue
arity_range = self.BuiltInFunctionArityRange(ydg_f)
if not arity_range[0] <= len(arguments) <= arity_range[1]:
raise self.exception_maker(
color.Format(
'Built-in function {warning}{ydg_f}{end} takes {a} '
'arguments, but {b} arguments were given.',
dict(ydg_f=ydg_f, a=arity_range,
b=len(arguments))))
return self.Function(sql_f, arguments)
for udf, udf_sql in self.custom_udfs.items():
if call['predicate_name'] == udf:
arguments = dict(
(k, v) if isinstance(k, str) else ('col%d' % k, v)
for k, v in arguments.items())
try:
result = udf_sql.format(**arguments)
except KeyError:
raise self.exception_maker(
'Function %s call is inconsistent with its signature %s.' %
(color.Warn(udf), udf_sql))
return result
for ydg_op, sql_op in self.built_in_infix_operators.items():
if call['predicate_name'] == ydg_op:
result = self.Infix(sql_op, arguments)
result = '(' + result + ')'
return result
if 'subscript' in expression:
sub = expression['subscript']
subscript = sub['subscript']['literal']['the_symbol']['symbol']
if 'record' in sub['record']:
for f_v in sub['record']['record']['field_value']:
if f_v['field'] == subscript:
return self.ConvertToSql(f_v['value']['expression'])
if 'implication' in sub['record']:
simplified_sub = self.SubIfStruct(sub['record']['implication'],
subscript)
if simplified_sub:
return simplified_sub
record = self.ConvertToSql(sub['record'])
return self.Subscript(record, subscript)
if 'record' in expression:
record = expression['record']
return self.Record(record)
if 'combine' in expression:
return '(%s)' % (
self.subquery_translator.TranslateRule(expression['combine'],
self.vocabulary))
if 'implication' in expression:
implication = expression['implication']
return self.Implication(implication)
if 'call' in expression and 'predicate_name' in expression['call']:
raise self.exception_maker(color.Format(
'Unsupported supposedly built-in function: '
'{warning}{predicate}{end}.', dict(
predicate=expression['call']['predicate_name'])))
assert False, (
'Logica bug: expression %s failed to compile for unknown reason.' %
str(expression))
| true | true |
f7f8f4055005760b328adc7f17e7344bb07e40e6 | 491 | py | Python | app/decorators.py | rudGess/flasky_python | 9584e289b6773d45a8c005563e5335cf2c050b16 | [
"MIT"
] | null | null | null | app/decorators.py | rudGess/flasky_python | 9584e289b6773d45a8c005563e5335cf2c050b16 | [
"MIT"
] | null | null | null | app/decorators.py | rudGess/flasky_python | 9584e289b6773d45a8c005563e5335cf2c050b16 | [
"MIT"
] | null | null | null | from functools import wraps
from flask import abort
from flask_login import current_user
from .models import Permission
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorator_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorator_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMIN)(f) | 28.882353 | 51 | 0.688391 | from functools import wraps
from flask import abort
from flask_login import current_user
from .models import Permission
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorator_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorator_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMIN)(f) | true | true |
f7f8f43cb6af0c9b6fa95fe9b49b4abc6faad1cf | 2,579 | py | Python | django_kms/fields.py | skruger/django-kms-field | 31ecec0920f2cfc7bd65a51d141992939a638870 | [
"MIT"
] | 4 | 2020-12-22T07:01:21.000Z | 2021-11-02T05:13:46.000Z | django_kms/fields.py | skruger/django-kms-field | 31ecec0920f2cfc7bd65a51d141992939a638870 | [
"MIT"
] | null | null | null | django_kms/fields.py | skruger/django-kms-field | 31ecec0920f2cfc7bd65a51d141992939a638870 | [
"MIT"
] | 1 | 2022-02-15T19:07:12.000Z | 2022-02-15T19:07:12.000Z | import hashlib
import boto3
from django.db import models
from django.conf import settings
from django.core import checks
from .cache import SimpleCache
def get_kms_client():
return boto3.client('kms', getattr(settings, 'KMS_FIELD_REGION', 'us-east-1'))
class KMSEncryptedCharField(models.BinaryField):
def __init__(self, key_id=None, *args, **kwargs):
kwargs.setdefault('editable', True)
self.key_id = key_id or getattr(settings, "KMS_FIELD_KEY", None)
self._ciphertext_cache = SimpleCache(max_size=getattr(settings, "KMS_FIELD_CACHE_SIZE", 250))
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs['key_id'] = self.key_id
return name, path, args, kwargs
def check(self, **kwargs):
extra_checks = list()
if self.key_id is None:
extra_checks.append(
checks.Error(
"KMSEncryptedCharField must define a key_id attribute or "
"settings.KMS_FIELD_KEY must be set.",
obj=self,
)
)
return [
*super().check(**kwargs),
*extra_checks,
]
@property
def _kms(self):
if not hasattr(self, '_kms_client') or getattr(settings, 'UNIT_TEST', False):
# Always get_kms_client() in unit tests so mocks work
client = get_kms_client()
setattr(self, '_kms_client', client)
return getattr(self, '_kms_client')
def from_db_value(self, value, expression, connection):
if value is None:
return value
ciphertext = bytes(value)
ciphertext_hash = hashlib.sha1()
ciphertext_hash.update(ciphertext)
cache_key = ciphertext_hash.hexdigest()
try:
return self._ciphertext_cache.get(cache_key)
except self._ciphertext_cache.CacheMiss:
result = self._kms.decrypt(CiphertextBlob=bytes(value))
new_value = result.get('Plaintext').decode()
self._ciphertext_cache.set(cache_key, new_value)
return new_value
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, str):
result = self._kms.encrypt(KeyId=self.key_id, Plaintext=value.encode())
return super().get_db_prep_value(result['CiphertextBlob'], connection, prepared)
return super().get_db_prep_value(value, connection, prepared)
def to_python(self, value):
return value
| 33.493506 | 101 | 0.629314 | import hashlib
import boto3
from django.db import models
from django.conf import settings
from django.core import checks
from .cache import SimpleCache
def get_kms_client():
return boto3.client('kms', getattr(settings, 'KMS_FIELD_REGION', 'us-east-1'))
class KMSEncryptedCharField(models.BinaryField):
def __init__(self, key_id=None, *args, **kwargs):
kwargs.setdefault('editable', True)
self.key_id = key_id or getattr(settings, "KMS_FIELD_KEY", None)
self._ciphertext_cache = SimpleCache(max_size=getattr(settings, "KMS_FIELD_CACHE_SIZE", 250))
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs['key_id'] = self.key_id
return name, path, args, kwargs
def check(self, **kwargs):
extra_checks = list()
if self.key_id is None:
extra_checks.append(
checks.Error(
"KMSEncryptedCharField must define a key_id attribute or "
"settings.KMS_FIELD_KEY must be set.",
obj=self,
)
)
return [
*super().check(**kwargs),
*extra_checks,
]
@property
def _kms(self):
if not hasattr(self, '_kms_client') or getattr(settings, 'UNIT_TEST', False):
client = get_kms_client()
setattr(self, '_kms_client', client)
return getattr(self, '_kms_client')
def from_db_value(self, value, expression, connection):
if value is None:
return value
ciphertext = bytes(value)
ciphertext_hash = hashlib.sha1()
ciphertext_hash.update(ciphertext)
cache_key = ciphertext_hash.hexdigest()
try:
return self._ciphertext_cache.get(cache_key)
except self._ciphertext_cache.CacheMiss:
result = self._kms.decrypt(CiphertextBlob=bytes(value))
new_value = result.get('Plaintext').decode()
self._ciphertext_cache.set(cache_key, new_value)
return new_value
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, str):
result = self._kms.encrypt(KeyId=self.key_id, Plaintext=value.encode())
return super().get_db_prep_value(result['CiphertextBlob'], connection, prepared)
return super().get_db_prep_value(value, connection, prepared)
def to_python(self, value):
return value
| true | true |
f7f8f5dc93766d76651477266dea32ca815d388b | 1,442 | py | Python | prep_timit.py | toxidol/MLSP-FA2021 | 3109e62f00d6f16e410f4eae341df2e027ee9cc3 | [
"MIT"
] | null | null | null | prep_timit.py | toxidol/MLSP-FA2021 | 3109e62f00d6f16e410f4eae341df2e027ee9cc3 | [
"MIT"
] | null | null | null | prep_timit.py | toxidol/MLSP-FA2021 | 3109e62f00d6f16e410f4eae341df2e027ee9cc3 | [
"MIT"
] | null | null | null | '''
Add noise to audio files in TIMIT.
'''
import os
import glob
import random
from tqdm import tqdm
from create_mixed_audio_file_with_soundfile import mix_clean_with_noise
if __name__ == '__main__':
snr = 0 # in dB
timit_root = '/Users/goree/Desktop/cmu/datasets/timit/data' # change for user path structure
output_root = f'/Users/goree/Desktop/cmu/datasets/timit_snr{snr}' # change for user path structure
# change for user path structure
noise_files = [
'/Users/goree/Desktop/cmu/datasets/noise/white_noise.wav'
]
wav_files = glob.glob(os.path.join(timit_root, '*', '*', '*', '*.WAV.wav'))
for cur_wav in tqdm(wav_files):
# cur_dir = '/'.join([output_root] + os.path.dirname(cur_wav).split('/')[-3:])
cur_dir = os.path.join(output_root, *os.path.dirname(cur_wav).split('\\')[-3:]) # windows usage
cur_id = os.path.basename(cur_wav)[:-8]
os.makedirs(cur_dir, exist_ok=True)
mix_clean_with_noise(
cur_wav,
random.choice(noise_files),
snr,
os.path.join(cur_dir, f"{cur_id}.wav")
)
# this cp_cmd is for windows usage. Change 'copy' to 'cp' for unix systems
# also remove string quotes around source dest files in unix
cp_cmd = f'copy "{os.path.join(os.path.dirname(cur_wav), f"{cur_id}.TXT")}" "{os.path.join(cur_dir, f"{cur_id}.txt")}" >NUL'
os.system(cp_cmd)
| 36.05 | 132 | 0.63939 |
import os
import glob
import random
from tqdm import tqdm
from create_mixed_audio_file_with_soundfile import mix_clean_with_noise
if __name__ == '__main__':
snr = 0
timit_root = '/Users/goree/Desktop/cmu/datasets/timit/data'
output_root = f'/Users/goree/Desktop/cmu/datasets/timit_snr{snr}'
noise_files = [
'/Users/goree/Desktop/cmu/datasets/noise/white_noise.wav'
]
wav_files = glob.glob(os.path.join(timit_root, '*', '*', '*', '*.WAV.wav'))
for cur_wav in tqdm(wav_files):
cur_dir = os.path.join(output_root, *os.path.dirname(cur_wav).split('\\')[-3:])
cur_id = os.path.basename(cur_wav)[:-8]
os.makedirs(cur_dir, exist_ok=True)
mix_clean_with_noise(
cur_wav,
random.choice(noise_files),
snr,
os.path.join(cur_dir, f"{cur_id}.wav")
)
cp_cmd = f'copy "{os.path.join(os.path.dirname(cur_wav), f"{cur_id}.TXT")}" "{os.path.join(cur_dir, f"{cur_id}.txt")}" >NUL'
os.system(cp_cmd)
| true | true |
f7f8f7c3573646a35c91abe428ebbe00f9f0d7de | 7,154 | py | Python | awschecks/awstagscheck.py | pvbouwel/nagios-awschecks | 80acd7f31281176542a9b8ea6609093890d24cbd | [
"Apache-2.0"
] | null | null | null | awschecks/awstagscheck.py | pvbouwel/nagios-awschecks | 80acd7f31281176542a9b8ea6609093890d24cbd | [
"Apache-2.0"
] | 2 | 2021-05-01T05:24:48.000Z | 2021-05-01T06:07:42.000Z | awschecks/awstagscheck.py | pvbouwel/nagios-awschecks | 80acd7f31281176542a9b8ea6609093890d24cbd | [
"Apache-2.0"
] | null | null | null | __author__ = 'pvbouwel'
from nagioscheck import NagiosCheck
from nagioscheck import NagiosCheckThresholdError
from nagioscheck import NagiosCheckOptionError
class AWSTagCheck(NagiosCheck):
possible_resources = ['instance', 'volume', 'snapshot']
warning_tags = []
critical_tags = []
resources_to_check = []
def __init__(self, connection, warning, critical, options):
"""
The constructor for an AWSTagCheck
:param connection: An EC2_connection
:param warning: A list of mandatory tags that will result in a warning if not present
:param critical: A list of mandatory tags that will result in a critical if not present
:param options: An optional list with options (e.g. can contain credentials)
:return: void
"""
super(AWSTagCheck, self).__init__(connection, warning, critical, options)
def run(self):
"""
Implements the logic behind the check
:return:
"""
self.process_passed_thresholds()
self.check_options()
region_name = self.connection.region.name
#If volumes are to be checked
if 'volume' in self.resources_to_check:
all_volumes = self.connection.get_all_volumes()
for volume in all_volumes:
self.check_tags(volume.tags, volume.id, "Volume", region_name)
if 'snapshot' in self.resources_to_check:
all_snapshots = self.connection.get_all_snapshots(owner='self')
for snapshot in all_snapshots:
self.check_tags(snapshot.tags, snapshot.id, "Snapshot", region_name)
if 'instance' in self.resources_to_check:
#Currently get_only_instances is used to return all instances. In future boto releases this might change.
#get_all_instances currently returns reservations but in future might return instances
# More info at: http://boto.readthedocs.org/en/latest/ref/ec2.html#module-boto.ec2.connection
all_instances = self.connection.get_only_instances()
for instance in all_instances:
self.check_tags(instance.tags, instance.id, "Instance", region_name)
def check_tags(self, present_tags, resource_id, resource_type, region):
"""
Verifies whether the tags present on a resource are good enough. It sets the check results (warnings, criticals
and OKs).
:param present_tags: The tags that are present on the resource
:param resource_id: The resource ID of which the tags are checked
:param resource_type: The resource type of which the tags are checked
:return:
"""
resource_ok = True
for critical_tag in self.critical_tags:
if not critical_tag in present_tags:
self.criticals.append("CRITICAL: " + resource_type + " " + resource_id + "(" + region + ") is missing "
"tag " + critical_tag)
resource_ok = False
for warning_tag in self.warning_tags:
if not warning_tag in present_tags:
self.warnings.append("WARNING: " + resource_type + " " + resource_id + "(" + region + ") is missing "
"tag " + warning_tag)
resource_ok = False
if resource_ok:
self.OKs.append("OK: " + resource_type + " " + resource_id)
def process_passed_thresholds(self):
"""
Set the warning_tags and critical_tags lists to have appropriate content. Meaning if nothing is passed an
empty list and if a comma-separated string is passed it should be a list with the comma separated tags .
:return:
"""
if self.warning is None:
self.warning_tags = []
else:
self.check_is_string(self.warning)
self.warning_tags = self.warning.split(',')
if self.critical is None:
self.critical_tags = []
else:
self.check_is_string(self.critical)
self.critical_tags = self.critical.split(',')
def check_options(self):
"""
Check all options that can be passed
:return:
"""
self.resources_to_check = self.get_validated_resources_to_check()
def get_validated_resources_to_check(self):
"""
This method verifies whether the string that is passed as a comma separated list of resources contains valid
resource types. It will return a list of valid resources. Valid means that this check knows how to test them.
:return: a list of resources that are to be checked
"""
if 'resource' in self.options.keys():
input_resource = self.options['resource']
else:
self.log.debug("No resource passed as argument so all resources will be validated.")
return self.possible_resources
if not isinstance(input_resource, str):
err_message = "If a resource is passed as option, it should be a comma-separated string! Received: " \
+ type(input_resource)
raise NagiosCheckOptionError(err_message)
if input_resource.upper() == "ALL":
return self.possible_resources
possible_resources_list = []
input_resources = input_resource.split(',')
for ir in input_resources:
if ir in self.possible_resources:
possible_resources_list.append(ir)
else:
raise NagiosCheckOptionError("Unknown resource type passed as resource " + ir)
return possible_resources_list
@staticmethod
def check_is_string(received_object):
if isinstance(received_object, str):
return True
else:
err_message = "Warning and critical threshold should be of type string. Received " + \
str(type(received_object))
raise NagiosCheckThresholdError(err_message)
def print_usage(self):
usage_info = "-- Usage information for AWSTagCheck --\n"
usage_info += "This check is initiated with --check=awstagscheck and will report missing tags\n"
usage_info += "--- arguments --- \n\n"
usage_info += "warning is a list containing comma-separated tags that will result in a warning when missing.\n"
usage_info += "critical is a list containing comma-separated tags that will result in a critical when missing."\
"\n"
usage_info += "resource is a AWS resource-type that needs to be checked, by default all will be checked.\n "
usage_info += " If this option is used only the resource of the mentioned type will be checked."
usage_info += " A commaseparated list can be used to take multiple. ALL can be passed to check all "
usage_info += " supported resource types (default behavior)."
usage_info += " Possible values:"
for resource in self.possible_resources:
usage_info += " - " + resource
usage_info += "\n"
print(usage_info)
| 44.434783 | 120 | 0.632653 | __author__ = 'pvbouwel'
from nagioscheck import NagiosCheck
from nagioscheck import NagiosCheckThresholdError
from nagioscheck import NagiosCheckOptionError
class AWSTagCheck(NagiosCheck):
possible_resources = ['instance', 'volume', 'snapshot']
warning_tags = []
critical_tags = []
resources_to_check = []
def __init__(self, connection, warning, critical, options):
super(AWSTagCheck, self).__init__(connection, warning, critical, options)
def run(self):
self.process_passed_thresholds()
self.check_options()
region_name = self.connection.region.name
if 'volume' in self.resources_to_check:
all_volumes = self.connection.get_all_volumes()
for volume in all_volumes:
self.check_tags(volume.tags, volume.id, "Volume", region_name)
if 'snapshot' in self.resources_to_check:
all_snapshots = self.connection.get_all_snapshots(owner='self')
for snapshot in all_snapshots:
self.check_tags(snapshot.tags, snapshot.id, "Snapshot", region_name)
if 'instance' in self.resources_to_check:
= self.connection.get_only_instances()
for instance in all_instances:
self.check_tags(instance.tags, instance.id, "Instance", region_name)
def check_tags(self, present_tags, resource_id, resource_type, region):
resource_ok = True
for critical_tag in self.critical_tags:
if not critical_tag in present_tags:
self.criticals.append("CRITICAL: " + resource_type + " " + resource_id + "(" + region + ") is missing "
"tag " + critical_tag)
resource_ok = False
for warning_tag in self.warning_tags:
if not warning_tag in present_tags:
self.warnings.append("WARNING: " + resource_type + " " + resource_id + "(" + region + ") is missing "
"tag " + warning_tag)
resource_ok = False
if resource_ok:
self.OKs.append("OK: " + resource_type + " " + resource_id)
def process_passed_thresholds(self):
if self.warning is None:
self.warning_tags = []
else:
self.check_is_string(self.warning)
self.warning_tags = self.warning.split(',')
if self.critical is None:
self.critical_tags = []
else:
self.check_is_string(self.critical)
self.critical_tags = self.critical.split(',')
def check_options(self):
self.resources_to_check = self.get_validated_resources_to_check()
def get_validated_resources_to_check(self):
if 'resource' in self.options.keys():
input_resource = self.options['resource']
else:
self.log.debug("No resource passed as argument so all resources will be validated.")
return self.possible_resources
if not isinstance(input_resource, str):
err_message = "If a resource is passed as option, it should be a comma-separated string! Received: " \
+ type(input_resource)
raise NagiosCheckOptionError(err_message)
if input_resource.upper() == "ALL":
return self.possible_resources
possible_resources_list = []
input_resources = input_resource.split(',')
for ir in input_resources:
if ir in self.possible_resources:
possible_resources_list.append(ir)
else:
raise NagiosCheckOptionError("Unknown resource type passed as resource " + ir)
return possible_resources_list
@staticmethod
def check_is_string(received_object):
if isinstance(received_object, str):
return True
else:
err_message = "Warning and critical threshold should be of type string. Received " + \
str(type(received_object))
raise NagiosCheckThresholdError(err_message)
def print_usage(self):
usage_info = "-- Usage information for AWSTagCheck --\n"
usage_info += "This check is initiated with --check=awstagscheck and will report missing tags\n"
usage_info += "--- arguments --- \n\n"
usage_info += "warning is a list containing comma-separated tags that will result in a warning when missing.\n"
usage_info += "critical is a list containing comma-separated tags that will result in a critical when missing."\
"\n"
usage_info += "resource is a AWS resource-type that needs to be checked, by default all will be checked.\n "
usage_info += " If this option is used only the resource of the mentioned type will be checked."
usage_info += " A commaseparated list can be used to take multiple. ALL can be passed to check all "
usage_info += " supported resource types (default behavior)."
usage_info += " Possible values:"
for resource in self.possible_resources:
usage_info += " - " + resource
usage_info += "\n"
print(usage_info)
| true | true |
f7f8f86dbe8447ac259cdbc99f1185e6db2a2623 | 10,763 | py | Python | superset/models/sql_lab.py | isabella232/incubator-superset-internal | 81f77fd28e8b4320b2b7e829312cefa8d326235a | [
"Apache-2.0"
] | null | null | null | superset/models/sql_lab.py | isabella232/incubator-superset-internal | 81f77fd28e8b4320b2b7e829312cefa8d326235a | [
"Apache-2.0"
] | 1 | 2021-02-23T16:45:47.000Z | 2021-04-24T23:15:03.000Z | superset/models/sql_lab.py | isabella232/incubator-superset-internal | 81f77fd28e8b4320b2b7e829312cefa8d326235a | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""A collection of ORM sqlalchemy models for SQL Lab"""
import re
from datetime import datetime
from typing import Any, Dict, List
import simplejson as json
import sqlalchemy as sqla
from flask import Markup
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
from humanize import naturaltime
from sqlalchemy import (
Boolean,
Column,
DateTime,
ForeignKey,
Integer,
Numeric,
String,
Text,
)
from sqlalchemy.engine.url import URL
from sqlalchemy.orm import backref, relationship
from superset import security_manager
from superset.models.helpers import AuditMixinNullable, ExtraJSONMixin
from superset.models.tags import QueryUpdater
from superset.sql_parse import CtasMethod, ParsedQuery, Table
from superset.utils.core import QueryStatus, user_label
class Query(Model, ExtraJSONMixin):
"""ORM model for SQL query
Now that SQL Lab support multi-statement execution, an entry in this
table may represent multiple SQL statements executed sequentially"""
__tablename__ = "query"
id = Column(Integer, primary_key=True)
client_id = Column(String(11), unique=True, nullable=False)
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
# Store the tmp table into the DB only if the user asks for it.
tmp_table_name = Column(String(256))
tmp_schema_name = Column(String(256))
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
status = Column(String(16), default=QueryStatus.PENDING)
tab_name = Column(String(256))
sql_editor_id = Column(String(256))
schema = Column(String(256))
sql = Column(Text)
# Query to retrieve the results,
# used only in case of select_as_cta_used is true.
select_sql = Column(Text)
executed_sql = Column(Text)
# Could be configured in the superset config.
limit = Column(Integer)
select_as_cta = Column(Boolean)
select_as_cta_used = Column(Boolean, default=False)
ctas_method = Column(String(16), default=CtasMethod.TABLE)
progress = Column(Integer, default=0) # 1..100
# # of rows in the result set or rows modified.
rows = Column(Integer)
error_message = Column(Text)
# key used to store the results in the results backend
results_key = Column(String(64), index=True)
# Using Numeric in place of DateTime for sub-second precision
# stored as seconds since epoch, allowing for milliseconds
start_time = Column(Numeric(precision=20, scale=6))
start_running_time = Column(Numeric(precision=20, scale=6))
end_time = Column(Numeric(precision=20, scale=6))
end_result_backend_time = Column(Numeric(precision=20, scale=6))
tracking_url = Column(Text)
changed_on = Column(
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True
)
database = relationship(
"Database",
foreign_keys=[database_id],
backref=backref("queries", cascade="all, delete-orphan"),
)
user = relationship(security_manager.user_model, foreign_keys=[user_id])
__table_args__ = (sqla.Index("ti_user_id_changed_on", user_id, changed_on),)
def to_dict(self) -> Dict[str, Any]:
return {
"changedOn": self.changed_on,
"changed_on": self.changed_on.isoformat(),
"dbId": self.database_id,
"db": self.database.database_name,
"endDttm": self.end_time,
"errorMessage": self.error_message,
"executedSql": self.executed_sql,
"id": self.client_id,
"queryId": self.id,
"limit": self.limit,
"progress": self.progress,
"rows": self.rows,
"schema": self.schema,
"ctas": self.select_as_cta,
"serverId": self.id,
"sql": self.sql,
"sqlEditorId": self.sql_editor_id,
"startDttm": self.start_time,
"state": self.status.lower(),
"tab": self.tab_name,
"tempSchema": self.tmp_schema_name,
"tempTable": self.tmp_table_name,
"userId": self.user_id,
"user": user_label(self.user),
"resultsKey": self.results_key,
"trackingUrl": self.tracking_url,
"extra": self.extra,
}
@property
def name(self) -> str:
"""Name property"""
ts = datetime.now().isoformat()
ts = ts.replace("-", "").replace(":", "").split(".")[0]
tab = self.tab_name.replace(" ", "_").lower() if self.tab_name else "notab"
tab = re.sub(r"\W+", "", tab)
return f"sqllab_{tab}_{ts}"
@property
def database_name(self) -> str:
return self.database.name
@property
def username(self) -> str:
return self.user.username
def raise_for_access(self) -> None:
"""
Raise an exception if the user cannot access the resource.
:raises SupersetSecurityException: If the user cannot access the resource
"""
security_manager.raise_for_access(query=self)
class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin):
"""ORM model for SQL query"""
__tablename__ = "saved_query"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True)
schema = Column(String(128))
label = Column(String(256))
description = Column(Text)
sql = Column(Text)
user = relationship(
security_manager.user_model,
backref=backref("saved_queries", cascade="all, delete-orphan"),
foreign_keys=[user_id],
)
database = relationship(
"Database",
foreign_keys=[db_id],
backref=backref("saved_queries", cascade="all, delete-orphan"),
)
rows = Column(Integer, nullable=True)
last_run = Column(DateTime, nullable=True)
def __repr__(self) -> str:
return str(self.label)
@property
def pop_tab_link(self) -> Markup:
return Markup(
f"""
<a href="/superset/sqllab?savedQueryId={self.id}">
<i class="fa fa-link"></i>
</a>
"""
)
@property
def user_email(self) -> str:
return self.user.email
@property
def sqlalchemy_uri(self) -> URL:
return self.database.sqlalchemy_uri
def url(self) -> str:
return "/superset/sqllab?savedQueryId={0}".format(self.id)
@property
def sql_tables(self) -> List[Table]:
return list(ParsedQuery(self.sql).tables)
@property
def last_run_humanized(self) -> str:
return naturaltime(datetime.now() - self.changed_on)
@property
def _last_run_delta_humanized(self) -> str:
return naturaltime(datetime.now() - self.changed_on)
@renders("changed_on")
def last_run_delta_humanized(self) -> str:
return self._last_run_delta_humanized
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state"
# basic info
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
label = Column(String(256))
active = Column(Boolean, default=False)
# selected DB and schema
database_id = Column(Integer, ForeignKey("dbs.id"))
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
# tables that are open in the schema browser and their data previews
table_schemas = relationship(
"TableSchema",
cascade="all, delete-orphan",
backref="tab_state",
passive_deletes=True,
)
# the query in the textarea, and results (if any)
sql = Column(Text)
query_limit = Column(Integer)
# latest query that was run
latest_query_id = Column(Integer, ForeignKey("query.client_id"))
latest_query = relationship("Query")
# other properties
autorun = Column(Boolean, default=False)
template_params = Column(Text)
def to_dict(self) -> Dict[str, Any]:
return {
"id": self.id,
"user_id": self.user_id,
"label": self.label,
"active": self.active,
"database_id": self.database_id,
"schema": self.schema,
"table_schemas": [ts.to_dict() for ts in self.table_schemas],
"sql": self.sql,
"query_limit": self.query_limit,
"latest_query": self.latest_query.to_dict() if self.latest_query else None,
"autorun": self.autorun,
"template_params": self.template_params,
}
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema"
id = Column(Integer, primary_key=True, autoincrement=True)
tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE"))
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
table = Column(String(256))
# JSON describing the schema, partitions, latest partition, etc.
description = Column(Text)
expanded = Column(Boolean, default=False)
def to_dict(self) -> Dict[str, Any]:
try:
description = json.loads(self.description)
except json.JSONDecodeError:
description = None
return {
"id": self.id,
"tab_state_id": self.tab_state_id,
"database_id": self.database_id,
"schema": self.schema,
"table": self.table,
"description": description,
"expanded": self.expanded,
}
# events for updating tags
sqla.event.listen(SavedQuery, "after_insert", QueryUpdater.after_insert)
sqla.event.listen(SavedQuery, "after_update", QueryUpdater.after_update)
sqla.event.listen(SavedQuery, "after_delete", QueryUpdater.after_delete)
| 33.634375 | 87 | 0.658181 |
import re
from datetime import datetime
from typing import Any, Dict, List
import simplejson as json
import sqlalchemy as sqla
from flask import Markup
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
from humanize import naturaltime
from sqlalchemy import (
Boolean,
Column,
DateTime,
ForeignKey,
Integer,
Numeric,
String,
Text,
)
from sqlalchemy.engine.url import URL
from sqlalchemy.orm import backref, relationship
from superset import security_manager
from superset.models.helpers import AuditMixinNullable, ExtraJSONMixin
from superset.models.tags import QueryUpdater
from superset.sql_parse import CtasMethod, ParsedQuery, Table
from superset.utils.core import QueryStatus, user_label
class Query(Model, ExtraJSONMixin):
__tablename__ = "query"
id = Column(Integer, primary_key=True)
client_id = Column(String(11), unique=True, nullable=False)
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
tmp_table_name = Column(String(256))
tmp_schema_name = Column(String(256))
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
status = Column(String(16), default=QueryStatus.PENDING)
tab_name = Column(String(256))
sql_editor_id = Column(String(256))
schema = Column(String(256))
sql = Column(Text)
select_sql = Column(Text)
executed_sql = Column(Text)
limit = Column(Integer)
select_as_cta = Column(Boolean)
select_as_cta_used = Column(Boolean, default=False)
ctas_method = Column(String(16), default=CtasMethod.TABLE)
progress = Column(Integer, default=0)
= Column(Text)
results_key = Column(String(64), index=True)
start_time = Column(Numeric(precision=20, scale=6))
start_running_time = Column(Numeric(precision=20, scale=6))
end_time = Column(Numeric(precision=20, scale=6))
end_result_backend_time = Column(Numeric(precision=20, scale=6))
tracking_url = Column(Text)
changed_on = Column(
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True
)
database = relationship(
"Database",
foreign_keys=[database_id],
backref=backref("queries", cascade="all, delete-orphan"),
)
user = relationship(security_manager.user_model, foreign_keys=[user_id])
__table_args__ = (sqla.Index("ti_user_id_changed_on", user_id, changed_on),)
def to_dict(self) -> Dict[str, Any]:
return {
"changedOn": self.changed_on,
"changed_on": self.changed_on.isoformat(),
"dbId": self.database_id,
"db": self.database.database_name,
"endDttm": self.end_time,
"errorMessage": self.error_message,
"executedSql": self.executed_sql,
"id": self.client_id,
"queryId": self.id,
"limit": self.limit,
"progress": self.progress,
"rows": self.rows,
"schema": self.schema,
"ctas": self.select_as_cta,
"serverId": self.id,
"sql": self.sql,
"sqlEditorId": self.sql_editor_id,
"startDttm": self.start_time,
"state": self.status.lower(),
"tab": self.tab_name,
"tempSchema": self.tmp_schema_name,
"tempTable": self.tmp_table_name,
"userId": self.user_id,
"user": user_label(self.user),
"resultsKey": self.results_key,
"trackingUrl": self.tracking_url,
"extra": self.extra,
}
@property
def name(self) -> str:
ts = datetime.now().isoformat()
ts = ts.replace("-", "").replace(":", "").split(".")[0]
tab = self.tab_name.replace(" ", "_").lower() if self.tab_name else "notab"
tab = re.sub(r"\W+", "", tab)
return f"sqllab_{tab}_{ts}"
@property
def database_name(self) -> str:
return self.database.name
@property
def username(self) -> str:
return self.user.username
def raise_for_access(self) -> None:
security_manager.raise_for_access(query=self)
class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "saved_query"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True)
schema = Column(String(128))
label = Column(String(256))
description = Column(Text)
sql = Column(Text)
user = relationship(
security_manager.user_model,
backref=backref("saved_queries", cascade="all, delete-orphan"),
foreign_keys=[user_id],
)
database = relationship(
"Database",
foreign_keys=[db_id],
backref=backref("saved_queries", cascade="all, delete-orphan"),
)
rows = Column(Integer, nullable=True)
last_run = Column(DateTime, nullable=True)
def __repr__(self) -> str:
return str(self.label)
@property
def pop_tab_link(self) -> Markup:
return Markup(
f"""
<a href="/superset/sqllab?savedQueryId={self.id}">
<i class="fa fa-link"></i>
</a>
"""
)
@property
def user_email(self) -> str:
return self.user.email
@property
def sqlalchemy_uri(self) -> URL:
return self.database.sqlalchemy_uri
def url(self) -> str:
return "/superset/sqllab?savedQueryId={0}".format(self.id)
@property
def sql_tables(self) -> List[Table]:
return list(ParsedQuery(self.sql).tables)
@property
def last_run_humanized(self) -> str:
return naturaltime(datetime.now() - self.changed_on)
@property
def _last_run_delta_humanized(self) -> str:
return naturaltime(datetime.now() - self.changed_on)
@renders("changed_on")
def last_run_delta_humanized(self) -> str:
return self._last_run_delta_humanized
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state"
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
label = Column(String(256))
active = Column(Boolean, default=False)
database_id = Column(Integer, ForeignKey("dbs.id"))
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
table_schemas = relationship(
"TableSchema",
cascade="all, delete-orphan",
backref="tab_state",
passive_deletes=True,
)
sql = Column(Text)
query_limit = Column(Integer)
latest_query_id = Column(Integer, ForeignKey("query.client_id"))
latest_query = relationship("Query")
autorun = Column(Boolean, default=False)
template_params = Column(Text)
def to_dict(self) -> Dict[str, Any]:
return {
"id": self.id,
"user_id": self.user_id,
"label": self.label,
"active": self.active,
"database_id": self.database_id,
"schema": self.schema,
"table_schemas": [ts.to_dict() for ts in self.table_schemas],
"sql": self.sql,
"query_limit": self.query_limit,
"latest_query": self.latest_query.to_dict() if self.latest_query else None,
"autorun": self.autorun,
"template_params": self.template_params,
}
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema"
id = Column(Integer, primary_key=True, autoincrement=True)
tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE"))
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
table = Column(String(256))
description = Column(Text)
expanded = Column(Boolean, default=False)
def to_dict(self) -> Dict[str, Any]:
try:
description = json.loads(self.description)
except json.JSONDecodeError:
description = None
return {
"id": self.id,
"tab_state_id": self.tab_state_id,
"database_id": self.database_id,
"schema": self.schema,
"table": self.table,
"description": description,
"expanded": self.expanded,
}
sqla.event.listen(SavedQuery, "after_insert", QueryUpdater.after_insert)
sqla.event.listen(SavedQuery, "after_update", QueryUpdater.after_update)
sqla.event.listen(SavedQuery, "after_delete", QueryUpdater.after_delete)
| true | true |
f7f8f91f3b4c784eb51e7f9535f53fbad1535a11 | 5,471 | py | Python | lte/gateway/python/magma/mobilityd/tests/test_dhcp_client.py | silveryfu/magma | ba490fb13d054b7085bbaca474aa831c4e7bcb94 | [
"BSD-3-Clause"
] | null | null | null | lte/gateway/python/magma/mobilityd/tests/test_dhcp_client.py | silveryfu/magma | ba490fb13d054b7085bbaca474aa831c4e7bcb94 | [
"BSD-3-Clause"
] | 85 | 2020-08-11T06:27:29.000Z | 2022-03-22T10:15:09.000Z | lte/gateway/python/magma/mobilityd/tests/test_dhcp_client.py | rckclmbr/magma | 6e537f7274ec3b7827743c546e2926defcc63103 | [
"BSD-3-Clause"
] | null | null | null | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import logging
import os
import subprocess
import sys
import threading
import time
import unittest
from freezegun import freeze_time
from magma.pipelined.bridge_util import BridgeTools
from magma.mobilityd.dhcp_client import DHCPClient
from magma.mobilityd.mac import MacAddress
from magma.mobilityd.dhcp_desc import DHCPState, DHCPDescriptor
from magma.mobilityd.uplink_gw import UplinkGatewayInfo
from scapy.layers.dhcp import DHCP
from scapy.layers.l2 import Ether
from scapy.sendrecv import AsyncSniffer
LOG = logging.getLogger('mobilityd.dhcp.test')
LOG.isEnabledFor(logging.DEBUG)
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
SCRIPT_PATH = "/home/vagrant/magma/lte/gateway/python/magma/mobilityd/"
DHCP_IFACE = "t0uplink_p0"
PKT_CAPTURE_WAIT = 2
"""
Test dhclient class independent of IP allocator.
"""
class DhcpClient(unittest.TestCase):
def setUp(self):
self._br = "t_up_br0"
try:
subprocess.check_call(["pkill", "dnsmasq"])
except subprocess.CalledProcessError:
pass
setup_dhcp_server = SCRIPT_PATH + "scripts/setup-test-dhcp-srv.sh"
subprocess.check_call([setup_dhcp_server, "t0"])
setup_uplink_br = [SCRIPT_PATH + "scripts/setup-uplink-br.sh",
self._br,
DHCP_IFACE,
"8A:00:00:00:00:01"]
subprocess.check_call(setup_uplink_br)
self.dhcp_wait = threading.Condition()
self.dhcp_store = {}
self.gw_info_map = {}
self.gw_info = UplinkGatewayInfo(self.gw_info_map)
self._dhcp_client = DHCPClient(dhcp_wait=self.dhcp_wait,
dhcp_store=self.dhcp_store,
gw_info=self.gw_info,
iface="t_dhcp0",
lease_renew_wait_min=1)
self._dhcp_client.run()
def tearDown(self):
self._dhcp_client.stop()
BridgeTools.destroy_bridge(self._br)
@unittest.skipIf(os.getuid(), reason="needs root user")
def test_dhcp_lease1(self):
self._setup_sniffer()
mac1 = MacAddress("11:22:33:44:55:66")
dhcp1 = self._alloc_ip_address_from_dhcp(mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
# trigger lease reneval before deadline
time1 = datetime.datetime.now() + datetime.timedelta(seconds=100)
self._start_sniffer()
with freeze_time(time1):
time.sleep(PKT_CAPTURE_WAIT)
self._stop_sniffer_and_check(DHCPState.REQUEST, mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
# trigger lease after deadline
time2 = datetime.datetime.now() + datetime.timedelta(seconds=200)
self._start_sniffer()
with freeze_time(time2):
time.sleep(PKT_CAPTURE_WAIT)
self._stop_sniffer_and_check(DHCPState.DISCOVER, mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
self._dhcp_client.release_ip_address(mac1)
dhcp1 = self.dhcp_store.get(mac1.as_redis_key())
self.assertEqual(dhcp1.state_requested, DHCPState.RELEASE)
def _alloc_ip_address_from_dhcp(self, mac: MacAddress) -> DHCPDescriptor:
retry_count = 0
with self.dhcp_wait:
dhcp_desc = None
while (retry_count < 60 and (dhcp_desc is None or
dhcp_desc.ip_is_allocated() is not True)):
if retry_count % 5 == 0:
self._dhcp_client.send_dhcp_packet(mac, DHCPState.DISCOVER)
self.dhcp_wait.wait(timeout=1)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac)
retry_count = retry_count + 1
return dhcp_desc
def _handle_dhcp_req_packet(self, packet):
if DHCP not in packet:
return
self.pkt_list.append(packet)
def _setup_sniffer(self):
self._sniffer = AsyncSniffer(iface=DHCP_IFACE,
filter="udp and (port 67 or 68)",
prn=self._handle_dhcp_req_packet)
def _start_sniffer(self):
self.pkt_list = []
self._sniffer.start()
time.sleep(.5)
def _stop_sniffer_and_check(self, state: DHCPState, mac: MacAddress):
self._sniffer.stop()
for pkt in self.pkt_list:
logging.debug("pkt: %s " % pkt.summary())
if DHCP in pkt:
if pkt[DHCP].options[0][1] == int(state) and \
pkt[Ether].src == str(mac):
return
assert 0
| 36.966216 | 87 | 0.638457 | import datetime
import logging
import os
import subprocess
import sys
import threading
import time
import unittest
from freezegun import freeze_time
from magma.pipelined.bridge_util import BridgeTools
from magma.mobilityd.dhcp_client import DHCPClient
from magma.mobilityd.mac import MacAddress
from magma.mobilityd.dhcp_desc import DHCPState, DHCPDescriptor
from magma.mobilityd.uplink_gw import UplinkGatewayInfo
from scapy.layers.dhcp import DHCP
from scapy.layers.l2 import Ether
from scapy.sendrecv import AsyncSniffer
LOG = logging.getLogger('mobilityd.dhcp.test')
LOG.isEnabledFor(logging.DEBUG)
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
SCRIPT_PATH = "/home/vagrant/magma/lte/gateway/python/magma/mobilityd/"
DHCP_IFACE = "t0uplink_p0"
PKT_CAPTURE_WAIT = 2
class DhcpClient(unittest.TestCase):
def setUp(self):
self._br = "t_up_br0"
try:
subprocess.check_call(["pkill", "dnsmasq"])
except subprocess.CalledProcessError:
pass
setup_dhcp_server = SCRIPT_PATH + "scripts/setup-test-dhcp-srv.sh"
subprocess.check_call([setup_dhcp_server, "t0"])
setup_uplink_br = [SCRIPT_PATH + "scripts/setup-uplink-br.sh",
self._br,
DHCP_IFACE,
"8A:00:00:00:00:01"]
subprocess.check_call(setup_uplink_br)
self.dhcp_wait = threading.Condition()
self.dhcp_store = {}
self.gw_info_map = {}
self.gw_info = UplinkGatewayInfo(self.gw_info_map)
self._dhcp_client = DHCPClient(dhcp_wait=self.dhcp_wait,
dhcp_store=self.dhcp_store,
gw_info=self.gw_info,
iface="t_dhcp0",
lease_renew_wait_min=1)
self._dhcp_client.run()
def tearDown(self):
self._dhcp_client.stop()
BridgeTools.destroy_bridge(self._br)
@unittest.skipIf(os.getuid(), reason="needs root user")
def test_dhcp_lease1(self):
self._setup_sniffer()
mac1 = MacAddress("11:22:33:44:55:66")
dhcp1 = self._alloc_ip_address_from_dhcp(mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
time1 = datetime.datetime.now() + datetime.timedelta(seconds=100)
self._start_sniffer()
with freeze_time(time1):
time.sleep(PKT_CAPTURE_WAIT)
self._stop_sniffer_and_check(DHCPState.REQUEST, mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
time2 = datetime.datetime.now() + datetime.timedelta(seconds=200)
self._start_sniffer()
with freeze_time(time2):
time.sleep(PKT_CAPTURE_WAIT)
self._stop_sniffer_and_check(DHCPState.DISCOVER, mac1)
self.assertEqual(dhcp1.state_requested, DHCPState.REQUEST)
assert (dhcp1.state == DHCPState.OFFER or dhcp1.state == DHCPState.ACK)
self._dhcp_client.release_ip_address(mac1)
dhcp1 = self.dhcp_store.get(mac1.as_redis_key())
self.assertEqual(dhcp1.state_requested, DHCPState.RELEASE)
def _alloc_ip_address_from_dhcp(self, mac: MacAddress) -> DHCPDescriptor:
retry_count = 0
with self.dhcp_wait:
dhcp_desc = None
while (retry_count < 60 and (dhcp_desc is None or
dhcp_desc.ip_is_allocated() is not True)):
if retry_count % 5 == 0:
self._dhcp_client.send_dhcp_packet(mac, DHCPState.DISCOVER)
self.dhcp_wait.wait(timeout=1)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac)
retry_count = retry_count + 1
return dhcp_desc
def _handle_dhcp_req_packet(self, packet):
if DHCP not in packet:
return
self.pkt_list.append(packet)
def _setup_sniffer(self):
self._sniffer = AsyncSniffer(iface=DHCP_IFACE,
filter="udp and (port 67 or 68)",
prn=self._handle_dhcp_req_packet)
def _start_sniffer(self):
self.pkt_list = []
self._sniffer.start()
time.sleep(.5)
def _stop_sniffer_and_check(self, state: DHCPState, mac: MacAddress):
self._sniffer.stop()
for pkt in self.pkt_list:
logging.debug("pkt: %s " % pkt.summary())
if DHCP in pkt:
if pkt[DHCP].options[0][1] == int(state) and \
pkt[Ether].src == str(mac):
return
assert 0
| true | true |
f7f8fa647fba78c1ce9c223ce3f3d69b64ab9abd | 3,970 | py | Python | cirq/testing/lin_alg_utils.py | kunalq/Cirq | e73c9bef672e83143ab04e7f169988149055d630 | [
"Apache-2.0"
] | 1 | 2019-09-04T16:55:30.000Z | 2019-09-04T16:55:30.000Z | cirq/testing/lin_alg_utils.py | rajeshkumarkarra/Cirq | 5ad06cc7a487ca94436715a3c51b6a50dfd10513 | [
"Apache-2.0"
] | null | null | null | cirq/testing/lin_alg_utils.py | rajeshkumarkarra/Cirq | 5ad06cc7a487ca94436715a3c51b6a50dfd10513 | [
"Apache-2.0"
] | 1 | 2018-10-25T19:36:50.000Z | 2018-10-25T19:36:50.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A testing class with utilities for checking linear algebra."""
from typing import Optional
import numpy as np
from cirq import linalg
def random_superposition(dim: int) -> np.ndarray:
"""Returns a random unit-length vector from the uniform distribution.
Args:
dim: The dimension of the vector.
Returns:
The sampled unit-length vector.
"""
state_vector = np.random.randn(dim).astype(complex)
state_vector += 1j * np.random.randn(dim)
state_vector /= np.linalg.norm(state_vector)
return state_vector
def random_unitary(dim: int) -> np.ndarray:
"""Returns a random unitary matrix distributed with Haar measure.
Args:
dim: The width and height of the matrix.
Returns:
The sampled unitary matrix.
References:
'How to generate random matrices from the classical compact groups'
http://arxiv.org/abs/math-ph/0609050
"""
z = (np.random.randn(dim, dim) + 1j * np.random.randn(dim, dim))
q, r = np.linalg.qr(z)
d = np.diag(r)
return q * (d / abs(d))
def random_orthogonal(dim: int) -> np.ndarray:
"""Returns a random orthogonal matrix distributed with Haar measure.
Args:
dim: The width and height of the matrix.
Returns:
The sampled orthogonal matrix.
References:
'How to generate random matrices from the classical compact groups'
http://arxiv.org/abs/math-ph/0609050
"""
m = np.random.randn(dim, dim)
q, r = np.linalg.qr(m)
d = np.diag(r)
return q * (d / abs(d))
def random_special_unitary(dim: int) -> np.ndarray:
"""Returns a random special unitary distributed with Haar measure.
Args:
dim: The width and height of the matrix.
Returns:
The sampled special unitary.
"""
r = random_unitary(dim)
r[0, :] /= np.linalg.det(r)
return r
def random_special_orthogonal(dim: int) -> np.ndarray:
"""Returns a random special orthogonal matrix distributed with Haar measure.
Args:
dim: The width and height of the matrix.
Returns:
The sampled special orthogonal matrix.
"""
m = random_orthogonal(dim)
if np.linalg.det(m) < 0:
m[0, :] *= -1
return m
def assert_allclose_up_to_global_phase(
actual: np.ndarray,
desired: np.ndarray,
*, # Forces keyword args.
rtol: float = 1e-7,
atol: float, # Require atol to be specified
equal_nan: bool = True,
err_msg: Optional[str] = '',
verbose: bool = True) -> None:
"""Checks if a ~= b * exp(i t) for some t.
Args:
actual: A numpy array.
desired: Another numpy array.
rtol: Relative error tolerance.
atol: Absolute error tolerance.
equal_nan: Whether or not NaN entries should be considered equal to
other NaN entries.
err_msg: The error message to be printed in case of failure.
verbose: If True, the conflicting values are appended to the error
message.
Raises:
AssertionError: The matrices aren't nearly equal up to global phase.
"""
actual, desired = linalg.match_global_phase(actual, desired)
np.testing.assert_allclose(
actual=actual,
desired=desired,
rtol=rtol,
atol=atol,
equal_nan=equal_nan,
err_msg=err_msg,
verbose=verbose)
| 28.357143 | 80 | 0.653904 |
from typing import Optional
import numpy as np
from cirq import linalg
def random_superposition(dim: int) -> np.ndarray:
state_vector = np.random.randn(dim).astype(complex)
state_vector += 1j * np.random.randn(dim)
state_vector /= np.linalg.norm(state_vector)
return state_vector
def random_unitary(dim: int) -> np.ndarray:
z = (np.random.randn(dim, dim) + 1j * np.random.randn(dim, dim))
q, r = np.linalg.qr(z)
d = np.diag(r)
return q * (d / abs(d))
def random_orthogonal(dim: int) -> np.ndarray:
m = np.random.randn(dim, dim)
q, r = np.linalg.qr(m)
d = np.diag(r)
return q * (d / abs(d))
def random_special_unitary(dim: int) -> np.ndarray:
r = random_unitary(dim)
r[0, :] /= np.linalg.det(r)
return r
def random_special_orthogonal(dim: int) -> np.ndarray:
m = random_orthogonal(dim)
if np.linalg.det(m) < 0:
m[0, :] *= -1
return m
def assert_allclose_up_to_global_phase(
actual: np.ndarray,
desired: np.ndarray,
*,
rtol: float = 1e-7,
atol: float,
equal_nan: bool = True,
err_msg: Optional[str] = '',
verbose: bool = True) -> None:
actual, desired = linalg.match_global_phase(actual, desired)
np.testing.assert_allclose(
actual=actual,
desired=desired,
rtol=rtol,
atol=atol,
equal_nan=equal_nan,
err_msg=err_msg,
verbose=verbose)
| true | true |
f7f8fae07784bbb86adba0fefb6eda74a718187e | 360 | py | Python | app/handlers/emails.py | Stanford-PERTS/triton | 5a4f401fc7019d59ce4c41eafa6c5bda822fae0a | [
"CC0-1.0"
] | null | null | null | app/handlers/emails.py | Stanford-PERTS/triton | 5a4f401fc7019d59ce4c41eafa6c5bda822fae0a | [
"CC0-1.0"
] | null | null | null | app/handlers/emails.py | Stanford-PERTS/triton | 5a4f401fc7019d59ce4c41eafa6c5bda822fae0a | [
"CC0-1.0"
] | null | null | null | from gae_handlers import RestHandler
from model import Email
class Emails(RestHandler):
requires_auth = True
model = Email
def get(self, **kwargs):
self.not_allowed()
def put(self):
self.not_allowed()
def delete(self):
self.not_allowed()
def not_allowed(self):
return self.http_not_allowed('POST')
| 18 | 44 | 0.65 | from gae_handlers import RestHandler
from model import Email
class Emails(RestHandler):
requires_auth = True
model = Email
def get(self, **kwargs):
self.not_allowed()
def put(self):
self.not_allowed()
def delete(self):
self.not_allowed()
def not_allowed(self):
return self.http_not_allowed('POST')
| true | true |
f7f8fb22ebff466907fc03b928d0b7c0bb52885f | 2,475 | py | Python | src/engine/SCons/Tool/ipkg.py | EmanueleCannizzaro/scons | 6baa4e65cdf4df6951473545b69435711864e509 | [
"MIT"
] | 1 | 2019-09-18T06:37:02.000Z | 2019-09-18T06:37:02.000Z | src/engine/SCons/Tool/ipkg.py | EmanueleCannizzaro/scons | 6baa4e65cdf4df6951473545b69435711864e509 | [
"MIT"
] | null | null | null | src/engine/SCons/Tool/ipkg.py | EmanueleCannizzaro/scons | 6baa4e65cdf4df6951473545b69435711864e509 | [
"MIT"
] | null | null | null | """SCons.Tool.ipkg
Tool-specific initialization for ipkg.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
The ipkg tool calls the ipkg-build. Its only argument should be the
packages fake_root.
"""
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/ipkg.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import os
import SCons.Builder
def generate(env):
"""Add Builders and construction variables for ipkg to an Environment."""
try:
bld = env['BUILDERS']['Ipkg']
except KeyError:
bld = SCons.Builder.Builder( action = '$IPKGCOM',
suffix = '$IPKGSUFFIX',
source_scanner = None,
target_scanner = None)
env['BUILDERS']['Ipkg'] = bld
env['IPKG'] = 'ipkg-build'
env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}'
env['IPKGUSER'] = os.popen('id -un').read().strip()
env['IPKGGROUP'] = os.popen('id -gn').read().strip()
env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP')
env['IPKGSUFFIX'] = '.ipk'
def exists(env):
return env.Detect('ipkg-build')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 36.397059 | 103 | 0.688081 |
__revision__ = "src/engine/SCons/Tool/ipkg.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import os
import SCons.Builder
def generate(env):
try:
bld = env['BUILDERS']['Ipkg']
except KeyError:
bld = SCons.Builder.Builder( action = '$IPKGCOM',
suffix = '$IPKGSUFFIX',
source_scanner = None,
target_scanner = None)
env['BUILDERS']['Ipkg'] = bld
env['IPKG'] = 'ipkg-build'
env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}'
env['IPKGUSER'] = os.popen('id -un').read().strip()
env['IPKGGROUP'] = os.popen('id -gn').read().strip()
env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP')
env['IPKGSUFFIX'] = '.ipk'
def exists(env):
return env.Detect('ipkg-build')
| true | true |
f7f8fc22209ed29922a9fded6cbf07758909bd88 | 5,186 | py | Python | Views/Creation/transitionAddUI.py | yvesjordan06/automata-brains | 1c34dd9315fcee7ce1807a2b94a0ec48421d03b1 | [
"MIT"
] | 3 | 2020-01-31T15:54:48.000Z | 2020-02-01T10:01:35.000Z | Views/Creation/transitionAddUI.py | yvesjordan06/automata-brains | 1c34dd9315fcee7ce1807a2b94a0ec48421d03b1 | [
"MIT"
] | null | null | null | Views/Creation/transitionAddUI.py | yvesjordan06/automata-brains | 1c34dd9315fcee7ce1807a2b94a0ec48421d03b1 | [
"MIT"
] | 2 | 2020-02-01T09:59:51.000Z | 2020-02-01T10:02:12.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI/transition.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from Models.Automate import Automate
from Models.Etat import Etat
from Models.Transition import Transition
class Ui_Form(object):
def __init__(self, automate: Automate):
self.automate = automate
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(332, 207)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.etatArriveBox = QtWidgets.QComboBox(self.groupBox)
self.etatArriveBox.setStyleSheet("padding:8px")
self.etatArriveBox.setObjectName("comboBox_3")
self.horizontalLayout_2.addWidget(self.etatArriveBox)
self.symbolBox = QtWidgets.QComboBox(self.groupBox)
self.symbolBox.setStyleSheet("padding:8px")
self.symbolBox.setObjectName("symbolBox")
self.horizontalLayout_2.addWidget(self.symbolBox)
self.etatDepartBox = QtWidgets.QComboBox(self.groupBox)
self.etatDepartBox.setStyleSheet("padding:8px")
self.etatDepartBox.setObjectName("comboBox")
self.horizontalLayout_2.addWidget(self.etatDepartBox)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.epsilonCheck = QtWidgets.QCheckBox(self.groupBox)
self.epsilonCheck.setStyleSheet("padding:8px")
self.epsilonCheck.setObjectName("epsilonCheck")
self.verticalLayout_2.addWidget(self.epsilonCheck)
self.addButton = QtWidgets.QPushButton(self.groupBox)
self.addButton.setStyleSheet("padding:8px")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icons/new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.addButton.setIcon(icon)
self.addButton.setObjectName("addButton")
self.verticalLayout_2.addWidget(self.addButton)
self.verticalLayout.addWidget(self.groupBox)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.etatArriveBox.setMaximumWidth(60)
self.etatDepartBox.setMaximumWidth(60)
self.symbolBox.setMaximumWidth(60)
# Binding
self.epsilonCheck.clicked.connect(self.epsilon_checker)
self.addButton.clicked.connect(self.action_create_transition)
self.automate.automate_modifier.connect(self.action_set_state)
# First Set State
self.action_set_state()
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.groupBox.setTitle(_translate("Form", "Transition"))
self.label_2.setText(_translate("Form", "Depart"))
self.label_3.setText(_translate("Form", "Symbol"))
self.label.setText(_translate("Form", "Arrive"))
self.epsilonCheck.setText(_translate("Form", "Epsilon"))
self.addButton.setText(_translate("Form", "Ajouter"))
def action_set_state(self):
self.etatDepartBox.clear()
self.symbolBox.clear()
self.etatArriveBox.clear()
self.etatDepartBox.addItems([str(etat) for etat in self.automate.etats])
self.symbolBox.addItems(self.automate.alphabet.list)
self.etatArriveBox.addItems([str(etat) for etat in self.automate.etats])
def action_create_transition(self):
arrive = self.etatDepartBox.currentText()
symbole = self.symbolBox.currentText()
depart = self.etatArriveBox.currentText()
depart = Etat(depart)
arrive = Etat(arrive)
symbole = '' if self.epsilonCheck.isChecked() else symbole
transition = Transition(depart, symbole, arrive)
self.automate.ajoute_transition(transition)
def epsilon_checker(self, checked: bool):
self.symbolBox.setDisabled(checked)
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| 39.587786 | 91 | 0.703047 |
from PyQt5 import QtCore, QtGui, QtWidgets
from Models.Automate import Automate
from Models.Etat import Etat
from Models.Transition import Transition
class Ui_Form(object):
def __init__(self, automate: Automate):
self.automate = automate
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(332, 207)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.etatArriveBox = QtWidgets.QComboBox(self.groupBox)
self.etatArriveBox.setStyleSheet("padding:8px")
self.etatArriveBox.setObjectName("comboBox_3")
self.horizontalLayout_2.addWidget(self.etatArriveBox)
self.symbolBox = QtWidgets.QComboBox(self.groupBox)
self.symbolBox.setStyleSheet("padding:8px")
self.symbolBox.setObjectName("symbolBox")
self.horizontalLayout_2.addWidget(self.symbolBox)
self.etatDepartBox = QtWidgets.QComboBox(self.groupBox)
self.etatDepartBox.setStyleSheet("padding:8px")
self.etatDepartBox.setObjectName("comboBox")
self.horizontalLayout_2.addWidget(self.etatDepartBox)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.epsilonCheck = QtWidgets.QCheckBox(self.groupBox)
self.epsilonCheck.setStyleSheet("padding:8px")
self.epsilonCheck.setObjectName("epsilonCheck")
self.verticalLayout_2.addWidget(self.epsilonCheck)
self.addButton = QtWidgets.QPushButton(self.groupBox)
self.addButton.setStyleSheet("padding:8px")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icons/new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.addButton.setIcon(icon)
self.addButton.setObjectName("addButton")
self.verticalLayout_2.addWidget(self.addButton)
self.verticalLayout.addWidget(self.groupBox)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.etatArriveBox.setMaximumWidth(60)
self.etatDepartBox.setMaximumWidth(60)
self.symbolBox.setMaximumWidth(60)
self.epsilonCheck.clicked.connect(self.epsilon_checker)
self.addButton.clicked.connect(self.action_create_transition)
self.automate.automate_modifier.connect(self.action_set_state)
self.action_set_state()
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.groupBox.setTitle(_translate("Form", "Transition"))
self.label_2.setText(_translate("Form", "Depart"))
self.label_3.setText(_translate("Form", "Symbol"))
self.label.setText(_translate("Form", "Arrive"))
self.epsilonCheck.setText(_translate("Form", "Epsilon"))
self.addButton.setText(_translate("Form", "Ajouter"))
def action_set_state(self):
self.etatDepartBox.clear()
self.symbolBox.clear()
self.etatArriveBox.clear()
self.etatDepartBox.addItems([str(etat) for etat in self.automate.etats])
self.symbolBox.addItems(self.automate.alphabet.list)
self.etatArriveBox.addItems([str(etat) for etat in self.automate.etats])
def action_create_transition(self):
arrive = self.etatDepartBox.currentText()
symbole = self.symbolBox.currentText()
depart = self.etatArriveBox.currentText()
depart = Etat(depart)
arrive = Etat(arrive)
symbole = '' if self.epsilonCheck.isChecked() else symbole
transition = Transition(depart, symbole, arrive)
self.automate.ajoute_transition(transition)
def epsilon_checker(self, checked: bool):
self.symbolBox.setDisabled(checked)
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| true | true |
f7f8fcc3f8ac166f01bc66ee014bf273fbb9bbce | 38,459 | py | Python | tests/urlpatterns_reverse/tests.py | kux/django | d61ebc8fed212366340b1ed6f5d7722613801459 | [
"BSD-3-Clause"
] | null | null | null | tests/urlpatterns_reverse/tests.py | kux/django | d61ebc8fed212366340b1ed6f5d7722613801459 | [
"BSD-3-Clause"
] | null | null | null | tests/urlpatterns_reverse/tests.py | kux/django | d61ebc8fed212366340b1ed6f5d7722613801459 | [
"BSD-3-Clause"
] | 1 | 2020-04-24T21:08:18.000Z | 2020-04-24T21:08:18.000Z | # -*- coding: utf-8 -*-
"""
Unit tests for reverse URL lookups.
"""
from __future__ import unicode_literals
import sys
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.conf.urls import include
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.core.urlresolvers import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, Resolver404,
ResolverMatch, get_callable, get_resolver, resolve, reverse, reverse_lazy,
)
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, override_settings,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from . import middleware, urlconf_outer, views
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', None, '', 'normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/view_class/42/37/', 'view-class', None, '', 'view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/normal/42/37/', 'inc-normal-view', None, '', 'inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/view_class/42/37/', 'inc-view-class', None, '', 'inc-view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', None, '', 'mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
('/included/mixed_args/42/37/', 'inc-mixed-args', None, '', 'inc-mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
# Unnamed views should have None as the url_name. Regression data for #21157.
('/unnamed/normal/42/37/', None, None, '', 'urlpatterns_reverse.views.empty_view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/unnamed/view_class/42/37/', None, None, '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', None, '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', None, '', 'inc-no-kwargs', views.empty_view, ('42', '37'), {}),
# Namespaces
('/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/normal/42/37/', 'inc-normal-view', None, 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
# Nested namespaces
('/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
# Namespaces capturing variables
('/inc70/', 'inner-nothing', None, 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, tuple(), {'outer': '70'}),
('/inc78/extra/foobar/', 'inner-extra', None, 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, tuple(), {'outer': '78', 'extra': 'foobar'}),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
('windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [], dict(drive_name='C', path=r'Documents and Settings\spam')),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Regression for #9038
# These views are resolved by method name. Each method is deployed twice -
# once with an explicit argument, and once using the default value on
# the method. This is potentially ambiguous, as you have to pick the
# correct view for the arguments provided.
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/', [], {}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/10/', [], {'arg1': 10}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(TestCase):
def test_no_urls_exception(self):
"""
RegexURLResolver should raise an exception when no urlpatterns exist.
"""
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
self.assertRaisesMessage(ImproperlyConfigured,
"The included urlconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import.",
getattr, resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(TestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
self.assertRaises(NoReverseMatch, reverse, None)
def test_prefix_braces(self):
self.assertEqual('/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include', prefix='/{{invalid}}/'))
def test_prefix_parenthesis(self):
self.assertEqual('/bogus%29/includes/non_path_include/',
reverse('non_path_include', prefix='/bogus)/'))
def test_prefix_format_char(self):
self.assertEqual('/bump%2520map/includes/non_path_include/',
reverse('non_path_include', prefix='/bump%20map/'))
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022
self.assertEqual('/%7Eme/places/1/',
reverse('places', args=[1], prefix='/~me/'))
def test_patterns_reported(self):
# Regression for #17076
try:
# this url exists, but requires an argument
reverse("people", args=[])
except NoReverseMatch as e:
pattern_description = r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
self.assertIn(pattern_description, str(e))
else:
# we can't use .assertRaises, since we want to inspect the
# exception
self.fail("Expected a NoReverseMatch, but none occurred.")
def test_reverse_returns_unicode(self):
name, expected, args, kwargs = test_data[0]
self.assertIsInstance(
reverse(name, args=args, kwargs=kwargs),
six.text_type
)
class ResolverTests(unittest.TestCase):
def test_resolver_repr(self):
"""
Test repr of RegexURLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced urlconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
try:
resolver.resolve(proxy_url)
except TypeError:
self.fail('Failed to coerce lazy object to text')
def test_non_regex(self):
"""
Verifies that we raise a Resolver404 if what we are resolving doesn't
meet the basic requirements of a path to match - i.e., at the very
least, it matches the root pattern '^/'. We must never return None
from resolve, or we will get a TypeError further down the line.
Regression for #10834.
"""
self.assertRaises(Resolver404, resolve, '')
self.assertRaises(Resolver404, resolve, 'a')
self.assertRaises(Resolver404, resolve, '\\')
self.assertRaises(Resolver404, resolve, '.')
def test_404_tried_urls_have_names(self):
"""
Verifies that the list of URLs that come back from a Resolver404
exception contains a list in the right format for printing out in
the DEBUG 404 page with both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
try:
resolve('/included/non-existent-url', urlconf=urls)
self.fail('resolve did not raise a 404')
except Resolver404 as e:
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
tried = e.args[0]['tried']
self.assertEqual(len(e.args[0]['tried']), len(url_types_names), 'Wrong number of tried URLs returned. Expected %s, got %s.' % (len(url_types_names), len(e.args[0]['tried'])))
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(t.name, e['name'], 'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name))
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.login(username='alfred', password='testpw')
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
if six.PY2:
self.assertEqual(
b'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
Test that reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings('settings.py', extra="""
from django.core.urlresolvers import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(TestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
self.assertRaises(NoReverseMatch, redirect, 'not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_reverse_by_path_nested(self):
# Views that are added to urlpatterns using include() should be
# reversible by dotted path.
self.assertEqual(reverse('urlpatterns_reverse.views.nested_view'), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
self.assertRaises(NoReverseMatch, redirect, absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class NamespaceTests(TestCase):
def test_ambiguous_object(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing')
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
"Non-existent namespaces raise errors"
self.assertRaises(NoReverseMatch, reverse, 'blahblah:urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
"Normal lookups work as expected"
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
"Normal lookups work on names included from other patterns"
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
"Dynamic URL objects can be found using a namespace"
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_embedded_namespace_object(self):
"Namespaces can be installed anywhere in the URL pattern tree"
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
"Namespaces can be applied to include()'d urlpatterns"
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual('/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
"When using an include with namespaces when there is a regex variable in front of it"
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4}))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
"Namespaces can be embedded"
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/test3/inner/42/37/', reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
"Namespaces can be nested"
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/37/42/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/42/37/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view'))
def test_app_lookup_object(self):
"A default application namespace can be used for lookup"
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
"A default application namespace is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual('/included/test3/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3'))
self.assertEqual('/included/test3/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3'))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3'))
def test_app_lookup_object_without_default(self):
"An application namespace without a default is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual('/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1'))
self.assertEqual('/other1/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1'))
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual('/+%5C$*/included/normal/42/37/', reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
"Namespace prefixes can capture variables: see #15900"
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'}))
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(TestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,'
b'inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(TestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve_error_handler(400), handler)
self.assertEqual(self.resolver.resolve_error_handler(404), handler)
self.assertEqual(self.resolver.resolve_error_handler(500), handler)
def test_callable_handers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(TestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
try:
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 404 handler")
try:
self.assertRaises(ValueError, self.client.get, '/bad_view/')
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 500 handler")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(TestCase):
"""Tests for handler404 and handler500 if urlconf is None"""
def test_no_handler_exception(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(TestCase):
def test_urlpattern_resolve(self):
for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
# Test legacy support for extracting "function, args, kwargs"
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# Test ResolverMatch capabilities.
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# ... and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(TestCase):
def test_erroneous_resolve(self):
self.assertRaises(ImportError, self.client.get, '/erroneous_inner/')
self.assertRaises(ImportError, self.client.get, '/erroneous_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_inner/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable/')
# Regression test for #21157
self.assertRaises(ImportError, self.client.get, '/erroneous_unqualified/')
def test_erroneous_reverse(self):
"""
Ensure that a useful exception is raised when a regex is invalid in the
URLConf.
Refs #6170.
"""
# The regex error will be hit before NoReverseMatch can be raised
self.assertRaises(ImproperlyConfigured, reverse, 'whatever blah blah')
class ViewLoadingTests(TestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'),
empty_view)
# passing a callable should return the callable
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
# A missing view (identified by an AttributeError) should raise
# ViewDoesNotExist, ...
six.assertRaisesRegex(self, ViewDoesNotExist,
".*View does not exist in.*",
get_callable,
'urlpatterns_reverse.views.i_should_not_exist')
# ... but if the AttributeError is caused by something else don't
# swallow it.
self.assertRaises(AttributeError, get_callable,
'urlpatterns_reverse.views_broken.i_am_broken')
class IncludeTests(SimpleTestCase):
def test_include_app_name_but_no_namespace(self):
msg = "Must specify a namespace if specifying app_name."
with self.assertRaisesMessage(ValueError, msg):
include('urls', app_name='bar')
| 50.471129 | 240 | 0.640838 |
from __future__ import unicode_literals
import sys
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.conf.urls import include
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.core.urlresolvers import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, Resolver404,
ResolverMatch, get_callable, get_resolver, resolve, reverse, reverse_lazy,
)
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, override_settings,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from . import middleware, urlconf_outer, views
from .views import empty_view
resolve_test_data = (
('/normal/42/37/', 'normal-view', None, '', 'normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/view_class/42/37/', 'view-class', None, '', 'view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/normal/42/37/', 'inc-normal-view', None, '', 'inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/view_class/42/37/', 'inc-view-class', None, '', 'inc-view-class', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/mixed_args/42/37/', 'mixed-args', None, '', 'mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
('/included/mixed_args/42/37/', 'inc-mixed-args', None, '', 'inc-mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
/unnamed/normal/42/37/', None, None, '', 'urlpatterns_reverse.views.empty_view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/unnamed/view_class/42/37/', None, None, '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/no_kwargs/42/37/', 'no-kwargs', None, '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', None, '', 'inc-no-kwargs', views.empty_view, ('42', '37'), {}),
('/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/normal/42/37/', 'inc-normal-view', None, 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/inc70/', 'inner-nothing', None, 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, tuple(), {'outer': '70'}),
('/inc78/extra/foobar/', 'inner-extra', None, 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, tuple(), {'outer': '78', 'extra': 'foobar'}),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
('windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [], dict(drive_name='C', path=r'Documents and Settings\spam')),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/', [], {}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/10/', [], {'arg1': 10}),
('non_path_include', '/includes/non_path_include/', [], {}),
'defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(TestCase):
def test_no_urls_exception(self):
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
self.assertRaisesMessage(ImproperlyConfigured,
"The included urlconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import.",
getattr, resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(TestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
self.assertRaises(NoReverseMatch, reverse, None)
def test_prefix_braces(self):
self.assertEqual('/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include', prefix='/{{invalid}}/'))
def test_prefix_parenthesis(self):
self.assertEqual('/bogus%29/includes/non_path_include/',
reverse('non_path_include', prefix='/bogus)/'))
def test_prefix_format_char(self):
self.assertEqual('/bump%2520map/includes/non_path_include/',
reverse('non_path_include', prefix='/bump%20map/'))
def test_non_urlsafe_prefix_with_args(self):
self.assertEqual('/%7Eme/places/1/',
reverse('places', args=[1], prefix='/~me/'))
def test_patterns_reported(self):
try:
reverse("people", args=[])
except NoReverseMatch as e:
pattern_description = r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
self.assertIn(pattern_description, str(e))
else:
# exception
self.fail("Expected a NoReverseMatch, but none occurred.")
def test_reverse_returns_unicode(self):
name, expected, args, kwargs = test_data[0]
self.assertIsInstance(
reverse(name, args=args, kwargs=kwargs),
six.text_type
)
class ResolverTests(unittest.TestCase):
def test_resolver_repr(self):
# Pick a resolver from a namespaced urlconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
try:
resolver.resolve(proxy_url)
except TypeError:
self.fail('Failed to coerce lazy object to text')
def test_non_regex(self):
self.assertRaises(Resolver404, resolve, '')
self.assertRaises(Resolver404, resolve, 'a')
self.assertRaises(Resolver404, resolve, '\\')
self.assertRaises(Resolver404, resolve, '.')
def test_404_tried_urls_have_names(self):
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
try:
resolve('/included/non-existent-url', urlconf=urls)
self.fail('resolve did not raise a 404')
except Resolver404 as e:
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
tried = e.args[0]['tried']
self.assertEqual(len(e.args[0]['tried']), len(url_types_names), 'Wrong number of tried URLs returned. Expected %s, got %s.' % (len(url_types_names), len(e.args[0]['tried'])))
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(t.name, e['name'], 'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name))
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.login(username='alfred', password='testpw')
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
if six.PY2:
self.assertEqual(
b'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py', extra="""
from django.core.urlresolvers import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(TestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
self.assertRaises(NoReverseMatch, redirect, 'not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_reverse_by_path_nested(self):
self.assertEqual(reverse('urlpatterns_reverse.views.nested_view'), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
self.assertRaises(NoReverseMatch, redirect, absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class NamespaceTests(TestCase):
def test_ambiguous_object(self):
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing')
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
self.assertRaises(NoReverseMatch, reverse, 'blahblah:urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_embedded_namespace_object(self):
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual('/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4}))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/test3/inner/42/37/', reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/37/42/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/42/37/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view'))
def test_app_lookup_object(self):
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual('/included/test3/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3'))
self.assertEqual('/included/test3/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3'))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3'))
def test_app_lookup_object_without_default(self):
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual('/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1'))
self.assertEqual('/other1/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1'))
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual('/+%5C$*/included/normal/42/37/', reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'}))
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(TestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,'
b'inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(TestCase):
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve_error_handler(400), handler)
self.assertEqual(self.resolver.resolve_error_handler(404), handler)
self.assertEqual(self.resolver.resolve_error_handler(500), handler)
def test_callable_handers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(TestCase):
def test_default_handler(self):
try:
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 404 handler")
try:
self.assertRaises(ValueError, self.client.get, '/bad_view/')
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 500 handler")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(TestCase):
def test_no_handler_exception(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(TestCase):
def test_urlpattern_resolve(self):
for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(TestCase):
def test_erroneous_resolve(self):
self.assertRaises(ImportError, self.client.get, '/erroneous_inner/')
self.assertRaises(ImportError, self.client.get, '/erroneous_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_inner/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable/')
self.assertRaises(ImportError, self.client.get, '/erroneous_unqualified/')
def test_erroneous_reverse(self):
self.assertRaises(ImproperlyConfigured, reverse, 'whatever blah blah')
class ViewLoadingTests(TestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'),
empty_view)
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
six.assertRaisesRegex(self, ViewDoesNotExist,
".*View does not exist in.*",
get_callable,
'urlpatterns_reverse.views.i_should_not_exist')
# swallow it.
self.assertRaises(AttributeError, get_callable,
'urlpatterns_reverse.views_broken.i_am_broken')
class IncludeTests(SimpleTestCase):
def test_include_app_name_but_no_namespace(self):
msg = "Must specify a namespace if specifying app_name."
with self.assertRaisesMessage(ValueError, msg):
include('urls', app_name='bar')
| true | true |
f7f8fcdf0fa75890bcd55e1d8dde8f464567a7d9 | 5,239 | py | Python | setupext/platform.py | bpercy11/jpype | f4d59ee1829bb6060e7d2ed82af4390b59d43309 | [
"Apache-2.0"
] | null | null | null | setupext/platform.py | bpercy11/jpype | f4d59ee1829bb6060e7d2ed82af4390b59d43309 | [
"Apache-2.0"
] | 51 | 2017-06-18T15:45:23.000Z | 2021-02-17T21:24:35.000Z | setupext/platform.py | bpercy11/jpype | f4d59ee1829bb6060e7d2ed82af4390b59d43309 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# *****************************************************************************
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See NOTICE file for details.
#
# *****************************************************************************
import setupext
import os
import sys
import sysconfig
import distutils.log
# This handles all of the work to make our platform specific extension options.
def Platform(include_dirs=None, sources=None, platform=sys.platform):
if include_dirs is None:
include_dirs = []
if sources is None:
sources = []
platform_specific = {
'include_dirs': include_dirs,
'sources': setupext.utils.find_sources(sources),
}
fallback_jni = os.path.join('native', 'jni_include')
# try to include JNI first from eventually given JAVA_HOME, then from distributed
java_home = os.getenv('JAVA_HOME', '')
found_jni = False
if os.path.exists(java_home):
platform_specific['include_dirs'] += [os.path.join(java_home, 'include')]
# check if jni.h can be found
for d in platform_specific['include_dirs']:
if os.path.exists(os.path.join(str(d), 'jni.h')):
distutils.log.info("Found native jni.h at %s", d)
found_jni = True
break
if not found_jni:
distutils.log.warn('Falling back to provided JNI headers, since your provided'
' JAVA_HOME "%s" does not provide jni.h', java_home)
if not found_jni:
platform_specific['include_dirs'] += [fallback_jni]
platform_specific['extra_link_args'] = []
distutils.log.info("Configure platform to", platform)
static = True
if platform == 'win32':
distutils.log.info("Add windows settings")
# platform_specific['libraries'] = ['Advapi32']
platform_specific['define_macros'] = [('WIN32', 1)]
if sys.version > '3':
platform_specific['extra_compile_args'] = [
'/Zi', '/EHsc', '/std:c++14']
else:
platform_specific['extra_compile_args'] = ['/Zi', '/EHsc']
# platform_specific['extra_link_args'] = ['/DEBUG']
jni_md_platform = 'win32'
elif platform == 'darwin':
distutils.log.info("Add darwin settings")
platform_specific['libraries'] = ['dl']
platform_specific['define_macros'] = [('MACOSX', 1)]
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-O2']
jni_md_platform = 'darwin'
elif platform.startswith('linux'):
distutils.log.info("Add linux settings")
platform_specific['libraries'] = ['dl']
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-O2']
jni_md_platform = 'linux'
elif platform.startswith('aix7'):
distutils.log.info("Add aix settings")
platform_specific['libraries'] = ['dl']
platform_specific['extra_compile_args'] = ['-g3', '-std=c++11', '-O2']
jni_md_platform = 'aix7'
elif platform.startswith('freebsd'):
distutils.log.info("Add freebsd settings")
jni_md_platform = 'freebsd'
elif platform.startswith('android'):
distutils.log.info("Add android settings")
platform_specific['libraries'] = ['dl', 'c++_shared', 'SDL2']
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-fexceptions', '-frtti', '-O2']
print("PLATFORM_SPECIFIC:", platform_specific)
jni_md_platform = 'linux'
static = False
elif platform == 'zos':
distutils.log.info("Add zos settings")
jni_md_platform = 'zos'
else:
jni_md_platform = None
distutils.log.warn("Your platform '%s' is not being handled explicitly."
" It may work or not!", platform)
# This code is used to include python library in the build when starting Python from
# within Java. It will be used in the future, but is not currently required.
# if static and sysconfig.get_config_var('BLDLIBRARY') is not None:
# platform_specific['extra_link_args'].append(sysconfig.get_config_var('BLDLIBRARY'))
if found_jni:
distutils.log.info("Add JNI directory %s" % os.path.join(java_home, 'include', jni_md_platform))
platform_specific['include_dirs'] += \
[os.path.join(java_home, 'include', jni_md_platform)]
return platform_specific
# include this stolen from FindJNI.cmake
"""
FIND_PATH(JAVA_INCLUDE_PATH2 jni_md.h
${JAVA_INCLUDE_PATH}
${JAVA_INCLUDE_PATH}/win32
${JAVA_INCLUDE_PATH}/linux
${JAVA_INCLUDE_PATH}/freebsd
${JAVA_INCLUDE_PATH}/solaris
${JAVA_INCLUDE_PATH}/hp-ux
${JAVA_INCLUDE_PATH}/alpha
)"""
| 37.421429 | 104 | 0.626074 |
import setupext
import os
import sys
import sysconfig
import distutils.log
def Platform(include_dirs=None, sources=None, platform=sys.platform):
if include_dirs is None:
include_dirs = []
if sources is None:
sources = []
platform_specific = {
'include_dirs': include_dirs,
'sources': setupext.utils.find_sources(sources),
}
fallback_jni = os.path.join('native', 'jni_include')
java_home = os.getenv('JAVA_HOME', '')
found_jni = False
if os.path.exists(java_home):
platform_specific['include_dirs'] += [os.path.join(java_home, 'include')]
for d in platform_specific['include_dirs']:
if os.path.exists(os.path.join(str(d), 'jni.h')):
distutils.log.info("Found native jni.h at %s", d)
found_jni = True
break
if not found_jni:
distutils.log.warn('Falling back to provided JNI headers, since your provided'
' JAVA_HOME "%s" does not provide jni.h', java_home)
if not found_jni:
platform_specific['include_dirs'] += [fallback_jni]
platform_specific['extra_link_args'] = []
distutils.log.info("Configure platform to", platform)
static = True
if platform == 'win32':
distutils.log.info("Add windows settings")
platform_specific['define_macros'] = [('WIN32', 1)]
if sys.version > '3':
platform_specific['extra_compile_args'] = [
'/Zi', '/EHsc', '/std:c++14']
else:
platform_specific['extra_compile_args'] = ['/Zi', '/EHsc']
jni_md_platform = 'win32'
elif platform == 'darwin':
distutils.log.info("Add darwin settings")
platform_specific['libraries'] = ['dl']
platform_specific['define_macros'] = [('MACOSX', 1)]
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-O2']
jni_md_platform = 'darwin'
elif platform.startswith('linux'):
distutils.log.info("Add linux settings")
platform_specific['libraries'] = ['dl']
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-O2']
jni_md_platform = 'linux'
elif platform.startswith('aix7'):
distutils.log.info("Add aix settings")
platform_specific['libraries'] = ['dl']
platform_specific['extra_compile_args'] = ['-g3', '-std=c++11', '-O2']
jni_md_platform = 'aix7'
elif platform.startswith('freebsd'):
distutils.log.info("Add freebsd settings")
jni_md_platform = 'freebsd'
elif platform.startswith('android'):
distutils.log.info("Add android settings")
platform_specific['libraries'] = ['dl', 'c++_shared', 'SDL2']
platform_specific['extra_compile_args'] = ['-g0', '-std=c++11', '-fexceptions', '-frtti', '-O2']
print("PLATFORM_SPECIFIC:", platform_specific)
jni_md_platform = 'linux'
static = False
elif platform == 'zos':
distutils.log.info("Add zos settings")
jni_md_platform = 'zos'
else:
jni_md_platform = None
distutils.log.warn("Your platform '%s' is not being handled explicitly."
" It may work or not!", platform)
if found_jni:
distutils.log.info("Add JNI directory %s" % os.path.join(java_home, 'include', jni_md_platform))
platform_specific['include_dirs'] += \
[os.path.join(java_home, 'include', jni_md_platform)]
return platform_specific
| true | true |
f7f8fd0149883f695cc5f6113ad072a0bc0a8be3 | 14,786 | py | Python | ta/wrapper.py | amalekji/trading-tech-analysis | b360062d6b2a31f0bf237e42a9a399d3b1f6c306 | [
"MIT"
] | 1 | 2020-07-18T09:05:57.000Z | 2020-07-18T09:05:57.000Z | ta/wrapper.py | amalekji/trading-tech-analysis | b360062d6b2a31f0bf237e42a9a399d3b1f6c306 | [
"MIT"
] | null | null | null | ta/wrapper.py | amalekji/trading-tech-analysis | b360062d6b2a31f0bf237e42a9a399d3b1f6c306 | [
"MIT"
] | 1 | 2020-08-25T18:16:11.000Z | 2020-08-25T18:16:11.000Z | """
.. module:: wrapper
:synopsis: Wrapper of Indicators.
.. moduleauthor:: Dario Lopez Padial (Bukosabino)
"""
import pandas as pd
from ta.momentum import (AwesomeOscillatorIndicator, KAMAIndicator,
ROCIndicator, RSIIndicator, StochasticOscillator,
TSIIndicator, UltimateOscillator, WilliamsRIndicator)
from ta.others import (CumulativeReturnIndicator, DailyLogReturnIndicator,
DailyReturnIndicator)
from ta.trend import (MACD, ADXIndicator, AroonIndicator, CCIIndicator,
DPOIndicator, EMAIndicator, IchimokuIndicator,
KSTIndicator, MassIndex, PSARIndicator, SMAIndicator,
TRIXIndicator, VortexIndicator)
from ta.volatility import (AverageTrueRange, BollingerBands, DonchianChannel,
KeltnerChannel)
from ta.volume import (AccDistIndexIndicator, ChaikinMoneyFlowIndicator,
EaseOfMovementIndicator, ForceIndexIndicator,
MFIIndicator, NegativeVolumeIndexIndicator,
OnBalanceVolumeIndicator, VolumePriceTrendIndicator,
VolumeWeightedAveragePrice)
def add_volume_ta(df: pd.DataFrame, high: str, low: str, close: str, volume: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
"""Add volume technical analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
high (str): Name of 'high' column.
low (str): Name of 'low' column.
close (str): Name of 'close' column.
volume (str): Name of 'volume' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
# Accumulation Distribution Index
df[f'{colprefix}volume_adi'] = AccDistIndexIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], fillna=fillna).acc_dist_index()
# On Balance Volume
df[f'{colprefix}volume_obv'] = OnBalanceVolumeIndicator(
close=df[close], volume=df[volume], fillna=fillna).on_balance_volume()
# Chaikin Money Flow
df[f'{colprefix}volume_cmf'] = ChaikinMoneyFlowIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], fillna=fillna).chaikin_money_flow()
# Force Index
df[f'{colprefix}volume_fi'] = ForceIndexIndicator(
close=df[close], volume=df[volume], n=13, fillna=fillna).force_index()
# Money Flow Indicator
df[f'{colprefix}momentum_mfi'] = MFIIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], n=14, fillna=fillna).money_flow_index()
# Ease of Movement
indicator = EaseOfMovementIndicator(high=df[high], low=df[low], volume=df[volume], n=14, fillna=fillna)
df[f'{colprefix}volume_em'] = indicator.ease_of_movement()
df[f'{colprefix}volume_sma_em'] = indicator.sma_ease_of_movement()
# Volume Price Trend
df[f'{colprefix}volume_vpt'] = VolumePriceTrendIndicator(
close=df[close], volume=df[volume], fillna=fillna).volume_price_trend()
# Negative Volume Index
df[f'{colprefix}volume_nvi'] = NegativeVolumeIndexIndicator(
close=df[close], volume=df[volume], fillna=fillna).negative_volume_index()
# Volume Weighted Average Price
df[f'{colprefix}volume_vwap'] = VolumeWeightedAveragePrice(
high=df[high], low=df[low], close=df[close], volume=df[volume], n=14, fillna=fillna
).volume_weighted_average_price()
return df
def add_volatility_ta(df: pd.DataFrame, high: str, low: str, close: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
"""Add volatility technical analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
high (str): Name of 'high' column.
low (str): Name of 'low' column.
close (str): Name of 'close' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
# Average True Range
df[f'{colprefix}volatility_atr'] = AverageTrueRange(
close=df[close], high=df[high], low=df[low], n=10, fillna=fillna).average_true_range()
# Bollinger Bands
indicator_bb = BollingerBands(close=df[close], n=20, ndev=2, fillna=fillna)
df[f'{colprefix}volatility_bbm'] = indicator_bb.bollinger_mavg()
df[f'{colprefix}volatility_bbh'] = indicator_bb.bollinger_hband()
df[f'{colprefix}volatility_bbl'] = indicator_bb.bollinger_lband()
df[f'{colprefix}volatility_bbw'] = indicator_bb.bollinger_wband()
df[f'{colprefix}volatility_bbp'] = indicator_bb.bollinger_pband()
df[f'{colprefix}volatility_bbhi'] = indicator_bb.bollinger_hband_indicator()
df[f'{colprefix}volatility_bbli'] = indicator_bb.bollinger_lband_indicator()
# Keltner Channel
indicator_kc = KeltnerChannel(close=df[close], high=df[high], low=df[low], n=10, fillna=fillna)
df[f'{colprefix}volatility_kcc'] = indicator_kc.keltner_channel_mband()
df[f'{colprefix}volatility_kch'] = indicator_kc.keltner_channel_hband()
df[f'{colprefix}volatility_kcl'] = indicator_kc.keltner_channel_lband()
df[f'{colprefix}volatility_kcw'] = indicator_kc.keltner_channel_wband()
df[f'{colprefix}volatility_kcp'] = indicator_kc.keltner_channel_pband()
df[f'{colprefix}volatility_kchi'] = indicator_kc.keltner_channel_hband_indicator()
df[f'{colprefix}volatility_kcli'] = indicator_kc.keltner_channel_lband_indicator()
# Donchian Channel
indicator_dc = DonchianChannel(high=df[high], low=df[low], close=df[close], n=20, offset=0, fillna=fillna)
df[f'{colprefix}volatility_dcl'] = indicator_dc.donchian_channel_lband()
df[f'{colprefix}volatility_dch'] = indicator_dc.donchian_channel_hband()
df[f'{colprefix}volatility_dcm'] = indicator_dc.donchian_channel_mband()
df[f'{colprefix}volatility_dcw'] = indicator_dc.donchian_channel_wband()
df[f'{colprefix}volatility_dcp'] = indicator_dc.donchian_channel_pband()
return df
def add_trend_ta(df: pd.DataFrame, high: str, low: str, close: str, fillna: bool = False,
colprefix: str = "") -> pd.DataFrame:
"""Add trend technical analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
high (str): Name of 'high' column.
low (str): Name of 'low' column.
close (str): Name of 'close' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
# MACD
indicator_macd = MACD(close=df[close], n_slow=26, n_fast=12, n_sign=9, fillna=fillna)
df[f'{colprefix}trend_macd'] = indicator_macd.macd()
df[f'{colprefix}trend_macd_signal'] = indicator_macd.macd_signal()
df[f'{colprefix}trend_macd_diff'] = indicator_macd.macd_diff()
# SMAs
df[f'{colprefix}trend_sma_fast'] = SMAIndicator(
close=df[close], n=12, fillna=fillna).sma_indicator()
df[f'{colprefix}trend_sma_slow'] = SMAIndicator(
close=df[close], n=26, fillna=fillna).sma_indicator()
# EMAs
df[f'{colprefix}trend_ema_fast'] = EMAIndicator(
close=df[close], n=12, fillna=fillna).ema_indicator()
df[f'{colprefix}trend_ema_slow'] = EMAIndicator(
close=df[close], n=26, fillna=fillna).ema_indicator()
# Average Directional Movement Index (ADX)
indicator = ADXIndicator(high=df[high], low=df[low], close=df[close], n=14, fillna=fillna)
df[f'{colprefix}trend_adx'] = indicator.adx()
df[f'{colprefix}trend_adx_pos'] = indicator.adx_pos()
df[f'{colprefix}trend_adx_neg'] = indicator.adx_neg()
# Vortex Indicator
indicator = VortexIndicator(high=df[high], low=df[low], close=df[close], n=14, fillna=fillna)
df[f'{colprefix}trend_vortex_ind_pos'] = indicator.vortex_indicator_pos()
df[f'{colprefix}trend_vortex_ind_neg'] = indicator.vortex_indicator_neg()
df[f'{colprefix}trend_vortex_ind_diff'] = indicator.vortex_indicator_diff()
# TRIX Indicator
indicator = TRIXIndicator(close=df[close], n=15, fillna=fillna)
df[f'{colprefix}trend_trix'] = indicator.trix()
# Mass Index
indicator = MassIndex(high=df[high], low=df[low], n=9, n2=25, fillna=fillna)
df[f'{colprefix}trend_mass_index'] = indicator.mass_index()
# CCI Indicator
indicator = CCIIndicator(high=df[high], low=df[low], close=df[close], n=20, c=0.015, fillna=fillna)
df[f'{colprefix}trend_cci'] = indicator.cci()
# DPO Indicator
indicator = DPOIndicator(close=df[close], n=20, fillna=fillna)
df[f'{colprefix}trend_dpo'] = indicator.dpo()
# KST Indicator
indicator = KSTIndicator(close=df[close],
r1=10, r2=15, r3=20,
r4=30, n1=10, n2=10, n3=10,
n4=15, nsig=9, fillna=fillna)
df[f'{colprefix}trend_kst'] = indicator.kst()
df[f'{colprefix}trend_kst_sig'] = indicator.kst_sig()
df[f'{colprefix}trend_kst_diff'] = indicator.kst_diff()
# Ichimoku Indicator
indicator = IchimokuIndicator(high=df[high], low=df[low], n1=9, n2=26, n3=52, visual=False, fillna=fillna)
df[f'{colprefix}trend_ichimoku_conv'] = indicator.ichimoku_conversion_line()
df[f'{colprefix}trend_ichimoku_base'] = indicator.ichimoku_base_line()
df[f'{colprefix}trend_ichimoku_a'] = indicator.ichimoku_a()
df[f'{colprefix}trend_ichimoku_b'] = indicator.ichimoku_b()
indicator = IchimokuIndicator(high=df[high], low=df[low], n1=9, n2=26, n3=52, visual=True, fillna=fillna)
df[f'{colprefix}trend_visual_ichimoku_a'] = indicator.ichimoku_a()
df[f'{colprefix}trend_visual_ichimoku_b'] = indicator.ichimoku_b()
# Aroon Indicator
indicator = AroonIndicator(close=df[close], n=25, fillna=fillna)
df[f'{colprefix}trend_aroon_up'] = indicator.aroon_up()
df[f'{colprefix}trend_aroon_down'] = indicator.aroon_down()
df[f'{colprefix}trend_aroon_ind'] = indicator.aroon_indicator()
# PSAR Indicator
indicator = PSARIndicator(high=df[high], low=df[low], close=df[close], step=0.02, max_step=0.20, fillna=fillna)
# df[f'{colprefix}trend_psar'] = indicator.psar()
df[f'{colprefix}trend_psar_up'] = indicator.psar_up()
df[f'{colprefix}trend_psar_down'] = indicator.psar_down()
df[f'{colprefix}trend_psar_up_indicator'] = indicator.psar_up_indicator()
df[f'{colprefix}trend_psar_down_indicator'] = indicator.psar_down_indicator()
return df
def add_momentum_ta(df: pd.DataFrame, high: str, low: str, close: str, volume: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
"""Add trend technical analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
high (str): Name of 'high' column.
low (str): Name of 'low' column.
close (str): Name of 'close' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
# Relative Strength Index (RSI)
df[f'{colprefix}momentum_rsi'] = RSIIndicator(close=df[close], n=14, fillna=fillna).rsi()
# TSI Indicator
df[f'{colprefix}momentum_tsi'] = TSIIndicator(close=df[close], r=25, s=13, fillna=fillna).tsi()
# Ultimate Oscillator
df[f'{colprefix}momentum_uo'] = UltimateOscillator(
high=df[high], low=df[low], close=df[close], s=7, m=14, len=28, ws=4.0, wm=2.0, wl=1.0,
fillna=fillna).uo()
# Stoch Indicator
indicator = StochasticOscillator(high=df[high], low=df[low], close=df[close], n=14, d_n=3, fillna=fillna)
df[f'{colprefix}momentum_stoch'] = indicator.stoch()
df[f'{colprefix}momentum_stoch_signal'] = indicator.stoch_signal()
# Williams R Indicator
df[f'{colprefix}momentum_wr'] = WilliamsRIndicator(
high=df[high], low=df[low], close=df[close], lbp=14, fillna=fillna).wr()
# Awesome Oscillator
df[f'{colprefix}momentum_ao'] = AwesomeOscillatorIndicator(
high=df[high], low=df[low], s=5, len=34, fillna=fillna).ao()
# KAMA
df[f'{colprefix}momentum_kama'] = KAMAIndicator(
close=df[close], n=10, pow1=2, pow2=30, fillna=fillna).kama()
# Rate Of Change
df[f'{colprefix}momentum_roc'] = ROCIndicator(close=df[close], n=12, fillna=fillna).roc()
return df
def add_others_ta(df: pd.DataFrame, close: str, fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
"""Add others analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
close (str): Name of 'close' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
# Daily Return
df[f'{colprefix}others_dr'] = DailyReturnIndicator(close=df[close], fillna=fillna).daily_return()
# Daily Log Return
df[f'{colprefix}others_dlr'] = DailyLogReturnIndicator(close=df[close], fillna=fillna).daily_log_return()
# Cumulative Return
df[f'{colprefix}others_cr'] = CumulativeReturnIndicator(
close=df[close], fillna=fillna).cumulative_return()
return df
def add_all_ta_features(df: pd.DataFrame, open: str, high: str, low: str,
close: str, volume: str, fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
"""Add all technical analysis features to dataframe.
Args:
df (pandas.core.frame.DataFrame): Dataframe base.
open (str): Name of 'open' column.
high (str): Name of 'high' column.
low (str): Name of 'low' column.
close (str): Name of 'close' column.
volume (str): Name of 'volume' column.
fillna(bool): if True, fill nan values.
colprefix(str): Prefix column names inserted
Returns:
pandas.core.frame.DataFrame: Dataframe with new features.
"""
df = add_volume_ta(df=df, high=high, low=low, close=close, volume=volume, fillna=fillna, colprefix=colprefix)
df = add_volatility_ta(df=df, high=high, low=low, close=close, fillna=fillna, colprefix=colprefix)
df = add_trend_ta(df=df, high=high, low=low, close=close, fillna=fillna, colprefix=colprefix)
df = add_momentum_ta(df=df, high=high, low=low, close=close, volume=volume, fillna=fillna, colprefix=colprefix)
df = add_others_ta(df=df, close=close, fillna=fillna, colprefix=colprefix)
return df
| 44.536145 | 115 | 0.679562 |
import pandas as pd
from ta.momentum import (AwesomeOscillatorIndicator, KAMAIndicator,
ROCIndicator, RSIIndicator, StochasticOscillator,
TSIIndicator, UltimateOscillator, WilliamsRIndicator)
from ta.others import (CumulativeReturnIndicator, DailyLogReturnIndicator,
DailyReturnIndicator)
from ta.trend import (MACD, ADXIndicator, AroonIndicator, CCIIndicator,
DPOIndicator, EMAIndicator, IchimokuIndicator,
KSTIndicator, MassIndex, PSARIndicator, SMAIndicator,
TRIXIndicator, VortexIndicator)
from ta.volatility import (AverageTrueRange, BollingerBands, DonchianChannel,
KeltnerChannel)
from ta.volume import (AccDistIndexIndicator, ChaikinMoneyFlowIndicator,
EaseOfMovementIndicator, ForceIndexIndicator,
MFIIndicator, NegativeVolumeIndexIndicator,
OnBalanceVolumeIndicator, VolumePriceTrendIndicator,
VolumeWeightedAveragePrice)
def add_volume_ta(df: pd.DataFrame, high: str, low: str, close: str, volume: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
df[f'{colprefix}volume_adi'] = AccDistIndexIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], fillna=fillna).acc_dist_index()
df[f'{colprefix}volume_obv'] = OnBalanceVolumeIndicator(
close=df[close], volume=df[volume], fillna=fillna).on_balance_volume()
df[f'{colprefix}volume_cmf'] = ChaikinMoneyFlowIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], fillna=fillna).chaikin_money_flow()
df[f'{colprefix}volume_fi'] = ForceIndexIndicator(
close=df[close], volume=df[volume], n=13, fillna=fillna).force_index()
df[f'{colprefix}momentum_mfi'] = MFIIndicator(
high=df[high], low=df[low], close=df[close], volume=df[volume], n=14, fillna=fillna).money_flow_index()
indicator = EaseOfMovementIndicator(high=df[high], low=df[low], volume=df[volume], n=14, fillna=fillna)
df[f'{colprefix}volume_em'] = indicator.ease_of_movement()
df[f'{colprefix}volume_sma_em'] = indicator.sma_ease_of_movement()
df[f'{colprefix}volume_vpt'] = VolumePriceTrendIndicator(
close=df[close], volume=df[volume], fillna=fillna).volume_price_trend()
df[f'{colprefix}volume_nvi'] = NegativeVolumeIndexIndicator(
close=df[close], volume=df[volume], fillna=fillna).negative_volume_index()
df[f'{colprefix}volume_vwap'] = VolumeWeightedAveragePrice(
high=df[high], low=df[low], close=df[close], volume=df[volume], n=14, fillna=fillna
).volume_weighted_average_price()
return df
def add_volatility_ta(df: pd.DataFrame, high: str, low: str, close: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
df[f'{colprefix}volatility_atr'] = AverageTrueRange(
close=df[close], high=df[high], low=df[low], n=10, fillna=fillna).average_true_range()
indicator_bb = BollingerBands(close=df[close], n=20, ndev=2, fillna=fillna)
df[f'{colprefix}volatility_bbm'] = indicator_bb.bollinger_mavg()
df[f'{colprefix}volatility_bbh'] = indicator_bb.bollinger_hband()
df[f'{colprefix}volatility_bbl'] = indicator_bb.bollinger_lband()
df[f'{colprefix}volatility_bbw'] = indicator_bb.bollinger_wband()
df[f'{colprefix}volatility_bbp'] = indicator_bb.bollinger_pband()
df[f'{colprefix}volatility_bbhi'] = indicator_bb.bollinger_hband_indicator()
df[f'{colprefix}volatility_bbli'] = indicator_bb.bollinger_lband_indicator()
indicator_kc = KeltnerChannel(close=df[close], high=df[high], low=df[low], n=10, fillna=fillna)
df[f'{colprefix}volatility_kcc'] = indicator_kc.keltner_channel_mband()
df[f'{colprefix}volatility_kch'] = indicator_kc.keltner_channel_hband()
df[f'{colprefix}volatility_kcl'] = indicator_kc.keltner_channel_lband()
df[f'{colprefix}volatility_kcw'] = indicator_kc.keltner_channel_wband()
df[f'{colprefix}volatility_kcp'] = indicator_kc.keltner_channel_pband()
df[f'{colprefix}volatility_kchi'] = indicator_kc.keltner_channel_hband_indicator()
df[f'{colprefix}volatility_kcli'] = indicator_kc.keltner_channel_lband_indicator()
indicator_dc = DonchianChannel(high=df[high], low=df[low], close=df[close], n=20, offset=0, fillna=fillna)
df[f'{colprefix}volatility_dcl'] = indicator_dc.donchian_channel_lband()
df[f'{colprefix}volatility_dch'] = indicator_dc.donchian_channel_hband()
df[f'{colprefix}volatility_dcm'] = indicator_dc.donchian_channel_mband()
df[f'{colprefix}volatility_dcw'] = indicator_dc.donchian_channel_wband()
df[f'{colprefix}volatility_dcp'] = indicator_dc.donchian_channel_pband()
return df
def add_trend_ta(df: pd.DataFrame, high: str, low: str, close: str, fillna: bool = False,
colprefix: str = "") -> pd.DataFrame:
indicator_macd = MACD(close=df[close], n_slow=26, n_fast=12, n_sign=9, fillna=fillna)
df[f'{colprefix}trend_macd'] = indicator_macd.macd()
df[f'{colprefix}trend_macd_signal'] = indicator_macd.macd_signal()
df[f'{colprefix}trend_macd_diff'] = indicator_macd.macd_diff()
df[f'{colprefix}trend_sma_fast'] = SMAIndicator(
close=df[close], n=12, fillna=fillna).sma_indicator()
df[f'{colprefix}trend_sma_slow'] = SMAIndicator(
close=df[close], n=26, fillna=fillna).sma_indicator()
df[f'{colprefix}trend_ema_fast'] = EMAIndicator(
close=df[close], n=12, fillna=fillna).ema_indicator()
df[f'{colprefix}trend_ema_slow'] = EMAIndicator(
close=df[close], n=26, fillna=fillna).ema_indicator()
indicator = ADXIndicator(high=df[high], low=df[low], close=df[close], n=14, fillna=fillna)
df[f'{colprefix}trend_adx'] = indicator.adx()
df[f'{colprefix}trend_adx_pos'] = indicator.adx_pos()
df[f'{colprefix}trend_adx_neg'] = indicator.adx_neg()
indicator = VortexIndicator(high=df[high], low=df[low], close=df[close], n=14, fillna=fillna)
df[f'{colprefix}trend_vortex_ind_pos'] = indicator.vortex_indicator_pos()
df[f'{colprefix}trend_vortex_ind_neg'] = indicator.vortex_indicator_neg()
df[f'{colprefix}trend_vortex_ind_diff'] = indicator.vortex_indicator_diff()
indicator = TRIXIndicator(close=df[close], n=15, fillna=fillna)
df[f'{colprefix}trend_trix'] = indicator.trix()
indicator = MassIndex(high=df[high], low=df[low], n=9, n2=25, fillna=fillna)
df[f'{colprefix}trend_mass_index'] = indicator.mass_index()
indicator = CCIIndicator(high=df[high], low=df[low], close=df[close], n=20, c=0.015, fillna=fillna)
df[f'{colprefix}trend_cci'] = indicator.cci()
indicator = DPOIndicator(close=df[close], n=20, fillna=fillna)
df[f'{colprefix}trend_dpo'] = indicator.dpo()
indicator = KSTIndicator(close=df[close],
r1=10, r2=15, r3=20,
r4=30, n1=10, n2=10, n3=10,
n4=15, nsig=9, fillna=fillna)
df[f'{colprefix}trend_kst'] = indicator.kst()
df[f'{colprefix}trend_kst_sig'] = indicator.kst_sig()
df[f'{colprefix}trend_kst_diff'] = indicator.kst_diff()
indicator = IchimokuIndicator(high=df[high], low=df[low], n1=9, n2=26, n3=52, visual=False, fillna=fillna)
df[f'{colprefix}trend_ichimoku_conv'] = indicator.ichimoku_conversion_line()
df[f'{colprefix}trend_ichimoku_base'] = indicator.ichimoku_base_line()
df[f'{colprefix}trend_ichimoku_a'] = indicator.ichimoku_a()
df[f'{colprefix}trend_ichimoku_b'] = indicator.ichimoku_b()
indicator = IchimokuIndicator(high=df[high], low=df[low], n1=9, n2=26, n3=52, visual=True, fillna=fillna)
df[f'{colprefix}trend_visual_ichimoku_a'] = indicator.ichimoku_a()
df[f'{colprefix}trend_visual_ichimoku_b'] = indicator.ichimoku_b()
indicator = AroonIndicator(close=df[close], n=25, fillna=fillna)
df[f'{colprefix}trend_aroon_up'] = indicator.aroon_up()
df[f'{colprefix}trend_aroon_down'] = indicator.aroon_down()
df[f'{colprefix}trend_aroon_ind'] = indicator.aroon_indicator()
indicator = PSARIndicator(high=df[high], low=df[low], close=df[close], step=0.02, max_step=0.20, fillna=fillna)
df[f'{colprefix}trend_psar_up'] = indicator.psar_up()
df[f'{colprefix}trend_psar_down'] = indicator.psar_down()
df[f'{colprefix}trend_psar_up_indicator'] = indicator.psar_up_indicator()
df[f'{colprefix}trend_psar_down_indicator'] = indicator.psar_down_indicator()
return df
def add_momentum_ta(df: pd.DataFrame, high: str, low: str, close: str, volume: str,
fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
df[f'{colprefix}momentum_rsi'] = RSIIndicator(close=df[close], n=14, fillna=fillna).rsi()
df[f'{colprefix}momentum_tsi'] = TSIIndicator(close=df[close], r=25, s=13, fillna=fillna).tsi()
df[f'{colprefix}momentum_uo'] = UltimateOscillator(
high=df[high], low=df[low], close=df[close], s=7, m=14, len=28, ws=4.0, wm=2.0, wl=1.0,
fillna=fillna).uo()
indicator = StochasticOscillator(high=df[high], low=df[low], close=df[close], n=14, d_n=3, fillna=fillna)
df[f'{colprefix}momentum_stoch'] = indicator.stoch()
df[f'{colprefix}momentum_stoch_signal'] = indicator.stoch_signal()
df[f'{colprefix}momentum_wr'] = WilliamsRIndicator(
high=df[high], low=df[low], close=df[close], lbp=14, fillna=fillna).wr()
df[f'{colprefix}momentum_ao'] = AwesomeOscillatorIndicator(
high=df[high], low=df[low], s=5, len=34, fillna=fillna).ao()
df[f'{colprefix}momentum_kama'] = KAMAIndicator(
close=df[close], n=10, pow1=2, pow2=30, fillna=fillna).kama()
df[f'{colprefix}momentum_roc'] = ROCIndicator(close=df[close], n=12, fillna=fillna).roc()
return df
def add_others_ta(df: pd.DataFrame, close: str, fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
df[f'{colprefix}others_dr'] = DailyReturnIndicator(close=df[close], fillna=fillna).daily_return()
df[f'{colprefix}others_dlr'] = DailyLogReturnIndicator(close=df[close], fillna=fillna).daily_log_return()
df[f'{colprefix}others_cr'] = CumulativeReturnIndicator(
close=df[close], fillna=fillna).cumulative_return()
return df
def add_all_ta_features(df: pd.DataFrame, open: str, high: str, low: str,
close: str, volume: str, fillna: bool = False, colprefix: str = "") -> pd.DataFrame:
df = add_volume_ta(df=df, high=high, low=low, close=close, volume=volume, fillna=fillna, colprefix=colprefix)
df = add_volatility_ta(df=df, high=high, low=low, close=close, fillna=fillna, colprefix=colprefix)
df = add_trend_ta(df=df, high=high, low=low, close=close, fillna=fillna, colprefix=colprefix)
df = add_momentum_ta(df=df, high=high, low=low, close=close, volume=volume, fillna=fillna, colprefix=colprefix)
df = add_others_ta(df=df, close=close, fillna=fillna, colprefix=colprefix)
return df
| true | true |
f7f8fd3989b258dee19eb783744d48f0724395fc | 7,871 | py | Python | Tests/Methods/Machine/test_Magnet_Type_10_meth.py | harshasunder-1/pyleecan | 32ae60f98b314848eb9b385e3652d7fc50a77420 | [
"Apache-2.0"
] | 2 | 2020-08-28T14:54:55.000Z | 2021-03-13T19:34:45.000Z | Tests/Methods/Machine/test_Magnet_Type_10_meth.py | harshasunder-1/pyleecan | 32ae60f98b314848eb9b385e3652d7fc50a77420 | [
"Apache-2.0"
] | null | null | null | Tests/Methods/Machine/test_Magnet_Type_10_meth.py | harshasunder-1/pyleecan | 32ae60f98b314848eb9b385e3652d7fc50a77420 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import pytest
from pyleecan.Classes.LamSlotMag import LamSlotMag
from pyleecan.Classes.SlotMFlat import SlotMFlat
from pyleecan.Classes.MagnetType10 import MagnetType10
from pyleecan.Classes.Segment import Segment
from pyleecan.Methods.Machine.Magnet.comp_surface import comp_surface
from pyleecan.Methods import ParentMissingError
from numpy import exp
Mag10_test = list()
# Internal Slot
lam = LamSlotMag(is_internal=True, Rext=0.1325)
lam.slot = SlotMFlat(H0=5e-3, W0=10e-3, Zs=12)
lam.slot.magnet = [MagnetType10(Hmag=5e-3, Wmag=10e-3)]
Mag10_test.append(
{"test_obj": lam, "S_exp": 5e-5, "Ao": 0.078449, "H_exp": 5e-3, "Rmec": 0.1325}
)
# Outward Slot
lam = LamSlotMag(is_internal=False, Rint=0.1325)
lam.slot = SlotMFlat(H0=5e-3, W0=10e-3, Zs=12)
lam.slot.magnet = [MagnetType10(Hmag=5e-3, Wmag=10e-3)]
Mag10_test.append(
{
"test_obj": lam,
"S_exp": 5e-5,
"Ao": 0.072745,
"H_exp": 5e-3,
"Rmec": 0.1324056630650208,
}
)
# For AlmostEqual
DELTA = 1e-4
@pytest.mark.METHODS
class Test_Magnet_Type_10_meth(object):
"""unittest for MagnetType10 methods"""
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_surface(self, test_dict):
"""Check that the computation of the surface is correct"""
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_surface()
a = result
b = test_dict["S_exp"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
# Compare numerical and analytical results
b = comp_surface(test_obj.slot.magnet[0])
msg = "Analytical: " + str(a) + " Numerical " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_height(self, test_dict):
"""Check that the computation of the height is correct"""
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_height()
a = result
b = test_dict["H_exp"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_angle_op(self, test_dict):
"""Check that the computation of the opening angle is correct"""
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_angle_opening()
a = result
b = test_dict["Ao"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
# Checking the error
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.comp_angle_opening()
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_radius_mec(self, test_dict):
"""Check that the computation of the opening angle is correct"""
test_obj = test_dict["test_obj"]
result = test_obj.comp_radius_mec()
assert result == test_dict["Rmec"]
def test_build_geometry_in(self):
"""check that curve_list is correct (inwards magnet)"""
lam = LamSlotMag(
Rint=40e-3,
Rext=1,
is_internal=True,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
alpha = lam.slot.comp_angle_opening_magnet()
Z1 = 1 * exp(-1j * alpha / 2) - 0.2
Z2 = 1 * exp(1j * alpha / 2) - 0.2
Z3 = Z1 + 0.2
Z4 = Z2 + 0.2
# Creation of curve
curve_list = list()
curve_list.append(Segment(Z1, Z3))
curve_list.append(Segment(Z3, Z4))
curve_list.append(Segment(Z4, Z2))
curve_list.append(Segment(Z2, Z1))
surface = test_obj.build_geometry()
result = surface[0].get_lines()
for i in range(0, len(result)):
a = result[i].begin
b = curve_list[i].begin
assert abs((a - b) / a - 0) < DELTA
a = result[i].end
b = curve_list[i].end
assert abs((a - b) / a - 0) < DELTA
# Checking the error
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.build_geometry()
def test_build_geometry_out(self):
"""check that curve_list is correct (outwards magnet)"""
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
alpha = lam.slot.comp_angle_opening_magnet()
Z1 = 1 * exp(-1j * alpha / 2) + 0.2
Z2 = 1 * exp(1j * alpha / 2) + 0.2
Z3 = Z1 - 0.2
Z4 = Z2 - 0.2
# Creation of curve
curve_list = list()
curve_list.append(Segment(Z1, Z3))
curve_list.append(Segment(Z3, Z4))
curve_list.append(Segment(Z4, Z2))
curve_list.append(Segment(Z2, Z1))
surface = test_obj.build_geometry()
result = surface[0].get_lines()
for i in range(0, len(result)):
a = result[i].begin
b = curve_list[i].begin
assert abs((a - b) / a - 0) < DELTA
a = result[i].end
b = curve_list[i].end
assert abs((a - b) / a - 0) < DELTA
# Checking the error
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.build_geometry()
# Is simplified + W0 > Wmag
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.8, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
surface = test_obj.build_geometry(is_simplified=True)
assert len(surface[0].line_list) == 3
# Is simplified + H0 < Hmag
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
test_obj = lam.slot.magnet[0]
surface = test_obj.build_geometry(is_simplified=True)
assert len(surface[0].line_list) == 3
# Type Magnet 1
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
lam.slot.magnet[0].type_magnetization = 1
surface = lam.build_geometry()
assert len(surface) == 9
# Type Magnet 2
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
lam.slot.magnet[0].type_magnetization = 2
surface = lam.build_geometry()
assert len(surface) == 9
| 30.041985 | 83 | 0.552026 |
import pytest
from pyleecan.Classes.LamSlotMag import LamSlotMag
from pyleecan.Classes.SlotMFlat import SlotMFlat
from pyleecan.Classes.MagnetType10 import MagnetType10
from pyleecan.Classes.Segment import Segment
from pyleecan.Methods.Machine.Magnet.comp_surface import comp_surface
from pyleecan.Methods import ParentMissingError
from numpy import exp
Mag10_test = list()
lam = LamSlotMag(is_internal=True, Rext=0.1325)
lam.slot = SlotMFlat(H0=5e-3, W0=10e-3, Zs=12)
lam.slot.magnet = [MagnetType10(Hmag=5e-3, Wmag=10e-3)]
Mag10_test.append(
{"test_obj": lam, "S_exp": 5e-5, "Ao": 0.078449, "H_exp": 5e-3, "Rmec": 0.1325}
)
lam = LamSlotMag(is_internal=False, Rint=0.1325)
lam.slot = SlotMFlat(H0=5e-3, W0=10e-3, Zs=12)
lam.slot.magnet = [MagnetType10(Hmag=5e-3, Wmag=10e-3)]
Mag10_test.append(
{
"test_obj": lam,
"S_exp": 5e-5,
"Ao": 0.072745,
"H_exp": 5e-3,
"Rmec": 0.1324056630650208,
}
)
DELTA = 1e-4
@pytest.mark.METHODS
class Test_Magnet_Type_10_meth(object):
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_surface(self, test_dict):
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_surface()
a = result
b = test_dict["S_exp"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
b = comp_surface(test_obj.slot.magnet[0])
msg = "Analytical: " + str(a) + " Numerical " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_height(self, test_dict):
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_height()
a = result
b = test_dict["H_exp"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_angle_op(self, test_dict):
test_obj = test_dict["test_obj"]
result = test_obj.slot.magnet[0].comp_angle_opening()
a = result
b = test_dict["Ao"]
msg = "Return " + str(a) + " expected " + str(b)
assert abs((a - b) / a - 0) < DELTA, msg
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.comp_angle_opening()
@pytest.mark.parametrize("test_dict", Mag10_test)
def test_comp_radius_mec(self, test_dict):
test_obj = test_dict["test_obj"]
result = test_obj.comp_radius_mec()
assert result == test_dict["Rmec"]
def test_build_geometry_in(self):
lam = LamSlotMag(
Rint=40e-3,
Rext=1,
is_internal=True,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
alpha = lam.slot.comp_angle_opening_magnet()
Z1 = 1 * exp(-1j * alpha / 2) - 0.2
Z2 = 1 * exp(1j * alpha / 2) - 0.2
Z3 = Z1 + 0.2
Z4 = Z2 + 0.2
curve_list = list()
curve_list.append(Segment(Z1, Z3))
curve_list.append(Segment(Z3, Z4))
curve_list.append(Segment(Z4, Z2))
curve_list.append(Segment(Z2, Z1))
surface = test_obj.build_geometry()
result = surface[0].get_lines()
for i in range(0, len(result)):
a = result[i].begin
b = curve_list[i].begin
assert abs((a - b) / a - 0) < DELTA
a = result[i].end
b = curve_list[i].end
assert abs((a - b) / a - 0) < DELTA
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.build_geometry()
def test_build_geometry_out(self):
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
alpha = lam.slot.comp_angle_opening_magnet()
Z1 = 1 * exp(-1j * alpha / 2) + 0.2
Z2 = 1 * exp(1j * alpha / 2) + 0.2
Z3 = Z1 - 0.2
Z4 = Z2 - 0.2
curve_list = list()
curve_list.append(Segment(Z1, Z3))
curve_list.append(Segment(Z3, Z4))
curve_list.append(Segment(Z4, Z2))
curve_list.append(Segment(Z2, Z1))
surface = test_obj.build_geometry()
result = surface[0].get_lines()
for i in range(0, len(result)):
a = result[i].begin
b = curve_list[i].begin
assert abs((a - b) / a - 0) < DELTA
a = result[i].end
b = curve_list[i].end
assert abs((a - b) / a - 0) < DELTA
magnet = MagnetType10(Hmag=5e-3, Wmag=10e-3)
with pytest.raises(ParentMissingError) as context:
magnet.build_geometry()
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.8, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.2)]
)
test_obj = lam.slot.magnet[0]
surface = test_obj.build_geometry(is_simplified=True)
assert len(surface[0].line_list) == 3
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
test_obj = lam.slot.magnet[0]
surface = test_obj.build_geometry(is_simplified=True)
assert len(surface[0].line_list) == 3
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
lam.slot.magnet[0].type_magnetization = 1
surface = lam.build_geometry()
assert len(surface) == 9
lam = LamSlotMag(
Rint=1,
Rext=0.09,
is_internal=False,
is_stator=False,
L1=0.45,
Nrvd=1,
Wrvd=0.05,
)
lam.slot = SlotMFlat(
Zs=8, W0=0.6, H0=0.2, magnet=[MagnetType10(Wmag=0.6, Hmag=0.8)]
)
lam.slot.magnet[0].type_magnetization = 2
surface = lam.build_geometry()
assert len(surface) == 9
| true | true |
f7f8fd94d0979b4da9c3f98c6a085813e1539c36 | 2,284 | py | Python | util/third_party/tensorflow_extra/tool/tflite/tflite/FakeQuantOptions.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 82 | 2016-06-29T17:24:43.000Z | 2021-04-16T06:49:17.000Z | util/third_party/tensorflow_extra/tool/tflite/tflite/FakeQuantOptions.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 6 | 2022-01-12T18:22:08.000Z | 2022-03-25T10:19:27.000Z | util/third_party/tensorflow_extra/tool/tflite/tflite/FakeQuantOptions.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 56 | 2016-08-02T10:50:50.000Z | 2021-07-19T08:57:34.000Z | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class FakeQuantOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsFakeQuantOptions(cls, buf, offset=0):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = FakeQuantOptions()
x.Init(buf, n + offset)
return x
@classmethod
def FakeQuantOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# FakeQuantOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# FakeQuantOptions
def Min(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# FakeQuantOptions
def Max(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
# FakeQuantOptions
def NumBits(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# FakeQuantOptions
def NarrowRange(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
def FakeQuantOptionsStart(builder): builder.StartObject(4)
def FakeQuantOptionsAddMin(builder, min): builder.PrependFloat32Slot(0, min, 0.0)
def FakeQuantOptionsAddMax(builder, max): builder.PrependFloat32Slot(1, max, 0.0)
def FakeQuantOptionsAddNumBits(builder, numBits): builder.PrependInt32Slot(2, numBits, 0)
def FakeQuantOptionsAddNarrowRange(builder, narrowRange): builder.PrependBoolSlot(3, narrowRange, 0)
def FakeQuantOptionsEnd(builder): return builder.EndObject()
| 37.442623 | 114 | 0.706655 |
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class FakeQuantOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsFakeQuantOptions(cls, buf, offset=0):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = FakeQuantOptions()
x.Init(buf, n + offset)
return x
@classmethod
def FakeQuantOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def Min(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
def Max(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
return 0.0
def NumBits(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def NarrowRange(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
def FakeQuantOptionsStart(builder): builder.StartObject(4)
def FakeQuantOptionsAddMin(builder, min): builder.PrependFloat32Slot(0, min, 0.0)
def FakeQuantOptionsAddMax(builder, max): builder.PrependFloat32Slot(1, max, 0.0)
def FakeQuantOptionsAddNumBits(builder, numBits): builder.PrependInt32Slot(2, numBits, 0)
def FakeQuantOptionsAddNarrowRange(builder, narrowRange): builder.PrependBoolSlot(3, narrowRange, 0)
def FakeQuantOptionsEnd(builder): return builder.EndObject()
| true | true |
f7f8fde234a004edd794854839a327987ec9b813 | 407 | py | Python | ir_sensor.py | xp7551/robot | ba34d0d5ba4c2b497370159a8d0c0b2d6e02d44e | [
"Apache-2.0"
] | null | null | null | ir_sensor.py | xp7551/robot | ba34d0d5ba4c2b497370159a8d0c0b2d6e02d44e | [
"Apache-2.0"
] | null | null | null | ir_sensor.py | xp7551/robot | ba34d0d5ba4c2b497370159a8d0c0b2d6e02d44e | [
"Apache-2.0"
] | null | null | null | import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
RECEIVER = 12
LIGHT = 26
GPIO.setup(RECEIVER,GPIO.IN)
GPIO.setup(LIGHT,GPIO.OUT)
GPIO.output(LIGHT, False)
def flash(seconds):
GPIO.output(LIGHT,True)
time.sleep(seconds)
GPIO.output(LIGHT,False)
time.sleep(2)
#flash(.5)
while True:
if GPIO.input(RECEIVER)==0:
flash(.3)
time.sleep(.1)
GPIO.cleanup()
| 16.28 | 28 | 0.712531 | import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
RECEIVER = 12
LIGHT = 26
GPIO.setup(RECEIVER,GPIO.IN)
GPIO.setup(LIGHT,GPIO.OUT)
GPIO.output(LIGHT, False)
def flash(seconds):
GPIO.output(LIGHT,True)
time.sleep(seconds)
GPIO.output(LIGHT,False)
time.sleep(2)
while True:
if GPIO.input(RECEIVER)==0:
flash(.3)
time.sleep(.1)
GPIO.cleanup()
| true | true |
f7f8fe2ae62001f14decf23cf96ad9423321072b | 3,748 | py | Python | tests/tests_git/test_git_repository.py | lobdelle/opensearch-build | d2d77fb4282cc3f3c0f938f0bfc83b640621a0f6 | [
"Apache-2.0"
] | null | null | null | tests/tests_git/test_git_repository.py | lobdelle/opensearch-build | d2d77fb4282cc3f3c0f938f0bfc83b640621a0f6 | [
"Apache-2.0"
] | null | null | null | tests/tests_git/test_git_repository.py | lobdelle/opensearch-build | d2d77fb4282cc3f3c0f938f0bfc83b640621a0f6 | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
#
# The OpenSearch Contributors require contributions made to
# this file be licensed under the Apache-2.0 license or a
# compatible open source license.
import os
import subprocess
import unittest
from unittest.mock import patch
from git.git_repository import GitRepository
from system.temporary_directory import TemporaryDirectory
class TestGitRepository(unittest.TestCase):
def setUp(self):
self.repo = GitRepository(
url="https://github.com/opensearch-project/.github",
ref="8ac515431bf24caf92fea9d9b0af3b8f10b88453",
)
def test_checkout(self):
self.assertEqual(self.repo.url, "https://github.com/opensearch-project/.github")
self.assertEqual(self.repo.ref, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertEqual(self.repo.sha, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertIs(type(self.repo.temp_dir), TemporaryDirectory)
self.assertEqual(self.repo.dir, os.path.realpath(self.repo.temp_dir.name))
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "CODE_OF_CONDUCT.md")))
# was added in the next commit
self.assertFalse(os.path.exists(os.path.join(self.repo.dir, "CONTRIBUTING.md")))
def test_execute(self):
self.repo.execute("echo $PWD > created.txt")
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "created.txt")))
def test_execute_in_dir(self):
self.repo.execute("echo $PWD > created.txt", os.path.join(self.repo.dir, "ISSUE_TEMPLATE"))
self.assertFalse(os.path.isfile(os.path.join(self.repo.dir, "created.txt")))
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "ISSUE_TEMPLATE", "created.txt")))
@patch("subprocess.check_call")
def test_execute_silent(self, mock_subprocess):
self.repo.execute_silent("echo .")
subprocess.check_call.assert_called_with(
"echo .",
cwd=self.repo.dir,
shell=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
@patch("subprocess.check_output")
def test_output(self, mock_subprocess):
self.repo.output("echo hello")
subprocess.check_output.assert_called_with("echo hello", cwd=self.repo.dir, shell=True)
class TestGitRepositoryDir(unittest.TestCase):
def test_checkout_into_dir(self):
with TemporaryDirectory() as tmpdir:
subdir = os.path.join(tmpdir.name, ".github")
repo = GitRepository(
url="https://github.com/opensearch-project/.github",
ref="8ac515431bf24caf92fea9d9b0af3b8f10b88453",
directory=subdir,
)
self.assertEqual(repo.url, "https://github.com/opensearch-project/.github")
self.assertEqual(repo.ref, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertEqual(repo.sha, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertIsNone(repo.temp_dir)
self.assertEqual(repo.dir, subdir)
self.assertTrue(os.path.isfile(os.path.join(repo.dir, "CODE_OF_CONDUCT.md")))
class TestGitRepositoryWithWorkingDir(unittest.TestCase):
def test_checkout_into_dir(self):
with GitRepository(
url="https://github.com/opensearch-project/.github",
ref="163b5acaf6c7d220f800684801bbf2e12f99c797",
working_subdirectory="ISSUE_TEMPLATE",
) as repo:
working_directory = os.path.join(repo.dir, "ISSUE_TEMPLATE")
self.assertEqual(repo.working_directory, working_directory)
self.assertTrue("ISSUE_TEMPLATE" in repo.output("pwd"))
self.assertFalse(os.path.exists(repo.dir))
| 42.590909 | 101 | 0.685699 |
import os
import subprocess
import unittest
from unittest.mock import patch
from git.git_repository import GitRepository
from system.temporary_directory import TemporaryDirectory
class TestGitRepository(unittest.TestCase):
def setUp(self):
self.repo = GitRepository(
url="https://github.com/opensearch-project/.github",
ref="8ac515431bf24caf92fea9d9b0af3b8f10b88453",
)
def test_checkout(self):
self.assertEqual(self.repo.url, "https://github.com/opensearch-project/.github")
self.assertEqual(self.repo.ref, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertEqual(self.repo.sha, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertIs(type(self.repo.temp_dir), TemporaryDirectory)
self.assertEqual(self.repo.dir, os.path.realpath(self.repo.temp_dir.name))
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "CODE_OF_CONDUCT.md")))
self.assertFalse(os.path.exists(os.path.join(self.repo.dir, "CONTRIBUTING.md")))
def test_execute(self):
self.repo.execute("echo $PWD > created.txt")
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "created.txt")))
def test_execute_in_dir(self):
self.repo.execute("echo $PWD > created.txt", os.path.join(self.repo.dir, "ISSUE_TEMPLATE"))
self.assertFalse(os.path.isfile(os.path.join(self.repo.dir, "created.txt")))
self.assertTrue(os.path.isfile(os.path.join(self.repo.dir, "ISSUE_TEMPLATE", "created.txt")))
@patch("subprocess.check_call")
def test_execute_silent(self, mock_subprocess):
self.repo.execute_silent("echo .")
subprocess.check_call.assert_called_with(
"echo .",
cwd=self.repo.dir,
shell=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
@patch("subprocess.check_output")
def test_output(self, mock_subprocess):
self.repo.output("echo hello")
subprocess.check_output.assert_called_with("echo hello", cwd=self.repo.dir, shell=True)
class TestGitRepositoryDir(unittest.TestCase):
def test_checkout_into_dir(self):
with TemporaryDirectory() as tmpdir:
subdir = os.path.join(tmpdir.name, ".github")
repo = GitRepository(
url="https://github.com/opensearch-project/.github",
ref="8ac515431bf24caf92fea9d9b0af3b8f10b88453",
directory=subdir,
)
self.assertEqual(repo.url, "https://github.com/opensearch-project/.github")
self.assertEqual(repo.ref, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertEqual(repo.sha, "8ac515431bf24caf92fea9d9b0af3b8f10b88453")
self.assertIsNone(repo.temp_dir)
self.assertEqual(repo.dir, subdir)
self.assertTrue(os.path.isfile(os.path.join(repo.dir, "CODE_OF_CONDUCT.md")))
class TestGitRepositoryWithWorkingDir(unittest.TestCase):
def test_checkout_into_dir(self):
with GitRepository(
url="https://github.com/opensearch-project/.github",
ref="163b5acaf6c7d220f800684801bbf2e12f99c797",
working_subdirectory="ISSUE_TEMPLATE",
) as repo:
working_directory = os.path.join(repo.dir, "ISSUE_TEMPLATE")
self.assertEqual(repo.working_directory, working_directory)
self.assertTrue("ISSUE_TEMPLATE" in repo.output("pwd"))
self.assertFalse(os.path.exists(repo.dir))
| true | true |
f7f8ff8b0baee3aeee8955d966f0bbbf22772121 | 15,058 | py | Python | simulate_expression_compendia_modules/cca_core.py | ajlee21/Batch_effects_simulation | d707321346de48de5e63cf251280bdf9372be59c | [
"BSD-3-Clause"
] | 6 | 2020-05-04T15:16:32.000Z | 2021-02-28T04:49:21.000Z | simulate_expression_compendia_modules/cca_core.py | ajlee21/Batch_effects_simulation | d707321346de48de5e63cf251280bdf9372be59c | [
"BSD-3-Clause"
] | 12 | 2020-02-27T20:12:36.000Z | 2021-04-07T20:28:35.000Z | simulate_expression_compendia_modules/cca_core.py | ajlee21/Batch_effects_simulation | d707321346de48de5e63cf251280bdf9372be59c | [
"BSD-3-Clause"
] | 2 | 2019-06-02T18:29:17.000Z | 2020-02-13T09:33:37.000Z | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The core code for applying Canonical Correlation Analysis to deep networks.
This module contains the core functions to apply canonical correlation analysis
to deep neural networks. The main function is get_cca_similarity, which takes in
two sets of activations, typically the neurons in two layers and their outputs
on all of the datapoints D = [d_1,...,d_m] that have been passed through.
Inputs have shape (num_neurons1, m), (num_neurons2, m). This can be directly
applied used on fully connected networks. For convolutional layers, the 3d block
of neurons can either be flattened entirely, along channels, or alternatively,
the dft_ccas (Discrete Fourier Transform) module can be used.
See https://arxiv.org/abs/1706.05806 for full details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
num_cca_trials = 5
epsilon = 1e-6
def positivedef_matrix_sqrt(array):
"""Stable method for computing matrix square roots, supports complex matrices.
Args:
array: A numpy 2d array, can be complex valued that is a positive
definite symmetric (or hermitian) matrix
Returns:
sqrtarray: The matrix square root of array
"""
w, v = np.linalg.eigh(array)
# A - np.dot(v, np.dot(np.diag(w), v.T))
wsqrt = np.sqrt(w)
sqrtarray = np.dot(v, np.dot(np.diag(wsqrt), np.conj(v).T))
return sqrtarray
def remove_small(sigma_xx, sigma_xy, sigma_yx, sigma_yy, threshold=1e-6):
"""Takes covariance between X, Y, and removes values of small magnitude.
Args:
sigma_xx: 2d numpy array, variance matrix for x
sigma_xy: 2d numpy array, crossvariance matrix for x,y
sigma_yx: 2d numpy array, crossvariance matrixy for x,y,
(conjugate) transpose of sigma_xy
sigma_yy: 2d numpy array, variance matrix for y
threshold: cutoff value for norm below which directions are thrown
away
Returns:
sigma_xx_crop: 2d array with low x norm directions removed
sigma_xy_crop: 2d array with low x and y norm directions removed
sigma_yx_crop: 2d array with low x and y norm directiosn removed
sigma_yy_crop: 2d array with low y norm directions removed
x_idxs: indexes of sigma_xx that were removed
y_idxs: indexes of sigma_yy that were removed
"""
x_diag = np.abs(np.diagonal(sigma_xx))
y_diag = np.abs(np.diagonal(sigma_yy))
x_idxs = x_diag >= threshold
y_idxs = y_diag >= threshold
sigma_xx_crop = sigma_xx[x_idxs][:, x_idxs]
sigma_xy_crop = sigma_xy[x_idxs][:, y_idxs]
sigma_yx_crop = sigma_yx[y_idxs][:, x_idxs]
sigma_yy_crop = sigma_yy[y_idxs][:, y_idxs]
return (sigma_xx_crop, sigma_xy_crop, sigma_yx_crop, sigma_yy_crop, x_idxs, y_idxs)
def compute_ccas(sigma_xx, sigma_xy, sigma_yx, sigma_yy, verbose=True):
"""Main cca computation function, takes in variances and crossvariances.
This function takes in the covariances and cross covariances of X, Y,
preprocesses them (removing small magnitudes) and outputs the raw results of
the cca computation, including cca directions in a rotated space, and the
cca correlation coefficient values.
Args:
sigma_xx: 2d numpy array, (num_neurons_x, num_neurons_x)
variance matrix for x
sigma_xy: 2d numpy array, (num_neurons_x, num_neurons_y)
crossvariance matrix for x,y
sigma_yx: 2d numpy array, (num_neurons_y, num_neurons_x)
crossvariance matrix for x,y (conj) transpose of sigma_xy
sigma_yy: 2d numpy array, (num_neurons_y, num_neurons_y)
variance matrix for y
verbose: boolean on whether to print intermediate outputs
Returns:
[ux, sx, vx]: [numpy 2d array, numpy 1d array, numpy 2d array]
ux and vx are (conj) transposes of each other, being
the canonical directions in the X subspace.
sx is the set of canonical correlation coefficients-
how well corresponding directions in vx, Vy correlate
with each other.
[uy, sy, vy]: Same as above, but for Y space
invsqrt_xx: Inverse square root of sigma_xx to transform canonical
directions back to original space
invsqrt_yy: Same as above but for sigma_yy
x_idxs: The indexes of the input sigma_xx that were pruned
by remove_small
y_idxs: Same as above but for sigma_yy
"""
(sigma_xx, sigma_xy, sigma_yx, sigma_yy, x_idxs, y_idxs) = remove_small(
sigma_xx, sigma_xy, sigma_yx, sigma_yy
)
numx = sigma_xx.shape[0]
numy = sigma_yy.shape[0]
if numx == 0 or numy == 0:
return (
[0, 0, 0],
[0, 0, 0],
np.zeros_like(sigma_xx),
np.zeros_like(sigma_yy),
x_idxs,
y_idxs,
)
if verbose:
print("adding eps to diagonal and taking inverse")
sigma_xx += epsilon * np.eye(numx)
sigma_yy += epsilon * np.eye(numy)
inv_xx = np.linalg.pinv(sigma_xx)
inv_yy = np.linalg.pinv(sigma_yy)
if verbose:
print("taking square root")
invsqrt_xx = positivedef_matrix_sqrt(inv_xx)
invsqrt_yy = positivedef_matrix_sqrt(inv_yy)
if verbose:
print("dot products...")
arr_x = np.dot(sigma_yx, invsqrt_xx)
arr_x = np.dot(inv_yy, arr_x)
arr_x = np.dot(invsqrt_xx, np.dot(sigma_xy, arr_x))
arr_y = np.dot(sigma_xy, invsqrt_yy)
arr_y = np.dot(inv_xx, arr_y)
arr_y = np.dot(invsqrt_yy, np.dot(sigma_yx, arr_y))
if verbose:
print("trying to take final svd")
arr_x_stable = arr_x + epsilon * np.eye(arr_x.shape[0])
arr_y_stable = arr_y + epsilon * np.eye(arr_y.shape[0])
try:
ux, sx, vx = np.linalg.svd(arr_x_stable)
uy, sy, vy = np.linalg.svd(arr_y_stable)
except:
return [0, 0, 0], [0, 0, 0], 0, 0, 0, 0
sx = np.sqrt(np.abs(sx))
sy = np.sqrt(np.abs(sy))
if verbose:
print("computed everything!")
return [ux, sx, vx], [uy, sy, vy], invsqrt_xx, invsqrt_yy, x_idxs, y_idxs
def sum_threshold(array, threshold):
"""Computes threshold index of decreasing nonnegative array by summing.
This function takes in a decreasing array nonnegative floats, and a
threshold between 0 and 1. It returns the index i at which the sum of the
array up to i is threshold*total mass of the array.
Args:
array: a 1d numpy array of decreasing, nonnegative floats
threshold: a number between 0 and 1
Returns:
i: index at which np.sum(array[:i]) >= threshold
"""
assert (threshold >= 0) and (threshold <= 1), "print incorrect threshold"
for i in range(len(array)):
if np.sum(array[:i]) / np.sum(array) >= threshold:
return i
def create_zero_dict(compute_dirns, dimension):
"""Outputs a zero dict when neuron activation norms too small.
This function creates a return_dict with appropriately shaped zero entries
when all neuron activations are very small.
Args:
compute_dirns: boolean, whether to have zero vectors for directions
dimension: int, defines shape of directions
Returns:
return_dict: a dict of appropriately shaped zero entries
"""
return_dict = {}
return_dict["mean"] = (np.asarray(0), np.asarray(0))
return_dict["sum"] = (np.asarray(0), np.asarray(0))
return_dict["cca_coef1"] = np.asarray(0)
return_dict["cca_coef2"] = np.asarray(0)
return_dict["idx1"] = 0
return_dict["idx2"] = 0
if compute_dirns:
return_dict["cca_dirns1"] = np.zeros((1, dimension))
return_dict["cca_dirns2"] = np.zeros((1, dimension))
return return_dict
def get_cca_similarity(acts1, acts2, threshold=0.98, compute_dirns=True, verbose=True):
"""The main function for computing cca similarities.
This function computes the cca similarity between two sets of activations,
returning a dict with the cca coefficients, a few statistics of the cca
coefficients, and (optionally) the actual directions.
Args:
acts1: (num_neurons1, data_points) a 2d numpy array of neurons by
datapoints where entry (i,j) is the output of neuron i on
datapoint j.
acts2: (num_neurons2, data_points) same as above, but (potentially)
for a different set of neurons. Note that acts1 and acts2
can have different numbers of neurons, but must agree on the
number of datapoints
threshold: float between 0, 1 used to get rid of trailing zeros in
the cca correlation coefficients to output more accurate
summary statistics of correlations.
compute_dirns: boolean value determining whether actual cca
directions are computed. (For very large neurons and
datasets, may be better to compute these on the fly
instead of store in memory.)
verbose: Boolean, whether info about intermediate outputs printed
Returns:
return_dict: A dictionary with outputs from the cca computations.
Contains neuron coefficients (combinations of neurons
that correspond to cca directions), the cca correlation
coefficients (how well aligned directions correlate),
x and y idxs (for computing cca directions on the fly
if compute_dirns=False), and summary statistics. If
compute_dirns=True, the cca directions are also
computed.
"""
# assert dimensionality equal
assert acts1.shape[1] == acts2.shape[1], "dimensions don't match"
# check that acts1, acts2 are transposition
assert acts1.shape[0] < acts1.shape[1], (
"input must be number of neurons" "by datapoints"
)
return_dict = {}
# compute covariance with numpy function for extra stability
numx = acts1.shape[0]
covariance = np.cov(acts1, acts2)
sigmaxx = covariance[:numx, :numx]
sigmaxy = covariance[:numx, numx:]
sigmayx = covariance[numx:, :numx]
sigmayy = covariance[numx:, numx:]
# rescale covariance to make cca computation more stable
xmax = np.max(np.abs(sigmaxx))
ymax = np.max(np.abs(sigmayy))
sigmaxx /= xmax
sigmayy /= ymax
sigmaxy /= np.sqrt(xmax * ymax)
sigmayx /= np.sqrt(xmax * ymax)
([_, sx, vx], [_, sy, vy], invsqrt_xx, invsqrt_yy, x_idxs, y_idxs) = compute_ccas(
sigmaxx, sigmaxy, sigmayx, sigmayy, verbose
)
# if x_idxs or y_idxs is all false, return_dict has zero entries
if (not np.any(x_idxs)) or (not np.any(y_idxs)):
return create_zero_dict(compute_dirns, acts1.shape[1])
if compute_dirns:
# orthonormal directions that are CCA directions
cca_dirns1 = np.dot(vx, np.dot(invsqrt_xx, acts1[x_idxs]))
cca_dirns2 = np.dot(vy, np.dot(invsqrt_yy, acts2[y_idxs]))
# get rid of trailing zeros in the cca coefficients
idx1 = sum_threshold(sx, threshold)
idx2 = sum_threshold(sy, threshold)
return_dict["neuron_coeffs1"] = np.dot(vx, invsqrt_xx)
return_dict["neuron_coeffs2"] = np.dot(vy, invsqrt_yy)
return_dict["cca_coef1"] = sx
return_dict["cca_coef2"] = sy
return_dict["x_idxs"] = x_idxs
return_dict["y_idxs"] = y_idxs
# summary statistics
return_dict["mean"] = (np.mean(sx[:idx1]), np.mean(sy[:idx2]))
return_dict["sum"] = (np.sum(sx), np.sum(sy))
if compute_dirns:
return_dict["cca_dirns1"] = cca_dirns1
return_dict["cca_dirns2"] = cca_dirns2
return return_dict
def robust_cca_similarity(
acts1, acts2, threshold=0.98, compute_dirns=True, verbose=False
):
"""Calls get_cca_similarity multiple times while adding noise.
This function is very similar to get_cca_similarity, and can be used if
get_cca_similarity doesn't converge for some pair of inputs. This function
adds some noise to the activations to help convergence.
Args:
acts1: (num_neurons1, data_points) a 2d numpy array of neurons by
datapoints where entry (i,j) is the output of neuron i on
datapoint j.
acts2: (num_neurons2, data_points) same as above, but (potentially)
for a different set of neurons. Note that acts1 and acts2
can have different numbers of neurons, but must agree on the
number of datapoints
threshold: float between 0, 1 used to get rid of trailing zeros in
the cca correlation coefficients to output more accurate
summary statistics of correlations.
compute_dirns: boolean value determining whether actual cca
directions are computed. (For very large neurons and
datasets, may be better to compute these on the fly
instead of store in memory.)
Returns:
return_dict: A dictionary with outputs from the cca computations.
Contains neuron coefficients (combinations of neurons
that correspond to cca directions), the cca correlation
coefficients (how well aligned directions correlate),
x and y idxs (for computing cca directions on the fly
if compute_dirns=False), and summary statistics. If
compute_dirns=True, the cca directions are also
computed.
"""
for trial in range(num_cca_trials):
try:
return_dict = get_cca_similarity(
acts1, acts2, threshold, compute_dirns, verbose=verbose
)
except np.LinAlgError:
acts1 = acts1 * 1e-1 + np.random.normal(size=acts1.shape) * epsilon
acts2 = acts2 * 1e-1 + np.random.normal(size=acts1.shape) * epsilon
if trial + 1 == num_cca_trials:
raise
return return_dict
| 42.900285 | 87 | 0.644043 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
num_cca_trials = 5
epsilon = 1e-6
def positivedef_matrix_sqrt(array):
w, v = np.linalg.eigh(array)
wsqrt = np.sqrt(w)
sqrtarray = np.dot(v, np.dot(np.diag(wsqrt), np.conj(v).T))
return sqrtarray
def remove_small(sigma_xx, sigma_xy, sigma_yx, sigma_yy, threshold=1e-6):
x_diag = np.abs(np.diagonal(sigma_xx))
y_diag = np.abs(np.diagonal(sigma_yy))
x_idxs = x_diag >= threshold
y_idxs = y_diag >= threshold
sigma_xx_crop = sigma_xx[x_idxs][:, x_idxs]
sigma_xy_crop = sigma_xy[x_idxs][:, y_idxs]
sigma_yx_crop = sigma_yx[y_idxs][:, x_idxs]
sigma_yy_crop = sigma_yy[y_idxs][:, y_idxs]
return (sigma_xx_crop, sigma_xy_crop, sigma_yx_crop, sigma_yy_crop, x_idxs, y_idxs)
def compute_ccas(sigma_xx, sigma_xy, sigma_yx, sigma_yy, verbose=True):
(sigma_xx, sigma_xy, sigma_yx, sigma_yy, x_idxs, y_idxs) = remove_small(
sigma_xx, sigma_xy, sigma_yx, sigma_yy
)
numx = sigma_xx.shape[0]
numy = sigma_yy.shape[0]
if numx == 0 or numy == 0:
return (
[0, 0, 0],
[0, 0, 0],
np.zeros_like(sigma_xx),
np.zeros_like(sigma_yy),
x_idxs,
y_idxs,
)
if verbose:
print("adding eps to diagonal and taking inverse")
sigma_xx += epsilon * np.eye(numx)
sigma_yy += epsilon * np.eye(numy)
inv_xx = np.linalg.pinv(sigma_xx)
inv_yy = np.linalg.pinv(sigma_yy)
if verbose:
print("taking square root")
invsqrt_xx = positivedef_matrix_sqrt(inv_xx)
invsqrt_yy = positivedef_matrix_sqrt(inv_yy)
if verbose:
print("dot products...")
arr_x = np.dot(sigma_yx, invsqrt_xx)
arr_x = np.dot(inv_yy, arr_x)
arr_x = np.dot(invsqrt_xx, np.dot(sigma_xy, arr_x))
arr_y = np.dot(sigma_xy, invsqrt_yy)
arr_y = np.dot(inv_xx, arr_y)
arr_y = np.dot(invsqrt_yy, np.dot(sigma_yx, arr_y))
if verbose:
print("trying to take final svd")
arr_x_stable = arr_x + epsilon * np.eye(arr_x.shape[0])
arr_y_stable = arr_y + epsilon * np.eye(arr_y.shape[0])
try:
ux, sx, vx = np.linalg.svd(arr_x_stable)
uy, sy, vy = np.linalg.svd(arr_y_stable)
except:
return [0, 0, 0], [0, 0, 0], 0, 0, 0, 0
sx = np.sqrt(np.abs(sx))
sy = np.sqrt(np.abs(sy))
if verbose:
print("computed everything!")
return [ux, sx, vx], [uy, sy, vy], invsqrt_xx, invsqrt_yy, x_idxs, y_idxs
def sum_threshold(array, threshold):
assert (threshold >= 0) and (threshold <= 1), "print incorrect threshold"
for i in range(len(array)):
if np.sum(array[:i]) / np.sum(array) >= threshold:
return i
def create_zero_dict(compute_dirns, dimension):
return_dict = {}
return_dict["mean"] = (np.asarray(0), np.asarray(0))
return_dict["sum"] = (np.asarray(0), np.asarray(0))
return_dict["cca_coef1"] = np.asarray(0)
return_dict["cca_coef2"] = np.asarray(0)
return_dict["idx1"] = 0
return_dict["idx2"] = 0
if compute_dirns:
return_dict["cca_dirns1"] = np.zeros((1, dimension))
return_dict["cca_dirns2"] = np.zeros((1, dimension))
return return_dict
def get_cca_similarity(acts1, acts2, threshold=0.98, compute_dirns=True, verbose=True):
assert acts1.shape[1] == acts2.shape[1], "dimensions don't match"
# check that acts1, acts2 are transposition
assert acts1.shape[0] < acts1.shape[1], (
"input must be number of neurons" "by datapoints"
)
return_dict = {}
# compute covariance with numpy function for extra stability
numx = acts1.shape[0]
covariance = np.cov(acts1, acts2)
sigmaxx = covariance[:numx, :numx]
sigmaxy = covariance[:numx, numx:]
sigmayx = covariance[numx:, :numx]
sigmayy = covariance[numx:, numx:]
# rescale covariance to make cca computation more stable
xmax = np.max(np.abs(sigmaxx))
ymax = np.max(np.abs(sigmayy))
sigmaxx /= xmax
sigmayy /= ymax
sigmaxy /= np.sqrt(xmax * ymax)
sigmayx /= np.sqrt(xmax * ymax)
([_, sx, vx], [_, sy, vy], invsqrt_xx, invsqrt_yy, x_idxs, y_idxs) = compute_ccas(
sigmaxx, sigmaxy, sigmayx, sigmayy, verbose
)
# if x_idxs or y_idxs is all false, return_dict has zero entries
if (not np.any(x_idxs)) or (not np.any(y_idxs)):
return create_zero_dict(compute_dirns, acts1.shape[1])
if compute_dirns:
# orthonormal directions that are CCA directions
cca_dirns1 = np.dot(vx, np.dot(invsqrt_xx, acts1[x_idxs]))
cca_dirns2 = np.dot(vy, np.dot(invsqrt_yy, acts2[y_idxs]))
# get rid of trailing zeros in the cca coefficients
idx1 = sum_threshold(sx, threshold)
idx2 = sum_threshold(sy, threshold)
return_dict["neuron_coeffs1"] = np.dot(vx, invsqrt_xx)
return_dict["neuron_coeffs2"] = np.dot(vy, invsqrt_yy)
return_dict["cca_coef1"] = sx
return_dict["cca_coef2"] = sy
return_dict["x_idxs"] = x_idxs
return_dict["y_idxs"] = y_idxs
# summary statistics
return_dict["mean"] = (np.mean(sx[:idx1]), np.mean(sy[:idx2]))
return_dict["sum"] = (np.sum(sx), np.sum(sy))
if compute_dirns:
return_dict["cca_dirns1"] = cca_dirns1
return_dict["cca_dirns2"] = cca_dirns2
return return_dict
def robust_cca_similarity(
acts1, acts2, threshold=0.98, compute_dirns=True, verbose=False
):
for trial in range(num_cca_trials):
try:
return_dict = get_cca_similarity(
acts1, acts2, threshold, compute_dirns, verbose=verbose
)
except np.LinAlgError:
acts1 = acts1 * 1e-1 + np.random.normal(size=acts1.shape) * epsilon
acts2 = acts2 * 1e-1 + np.random.normal(size=acts1.shape) * epsilon
if trial + 1 == num_cca_trials:
raise
return return_dict
| true | true |
f7f900a004a70ebc269357bed77d212a4a156452 | 14,176 | py | Python | tsai/models/TSTPlus.py | MOREDataset/tsai | 54987a579365ca7722475fff2fc4a24dc054e82c | [
"Apache-2.0"
] | null | null | null | tsai/models/TSTPlus.py | MOREDataset/tsai | 54987a579365ca7722475fff2fc4a24dc054e82c | [
"Apache-2.0"
] | null | null | null | tsai/models/TSTPlus.py | MOREDataset/tsai | 54987a579365ca7722475fff2fc4a24dc054e82c | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/108c_models.TSTPlus.ipynb (unless otherwise specified).
__all__ = ['SinCosPosEncoding', 'Coord2dPosEncoding', 'Coord1dPosEncoding', 'ScaledDotProductAttention',
'MultiHeadAttention', 'TSTEncoderLayer', 'TSTEncoder', 'TSTPlus', 'MultiTST']
# Cell
from ..imports import *
from ..utils import *
from .layers import *
from .utils import *
# Cell
def SinCosPosEncoding(q_len, d_model):
pe = torch.zeros(q_len, d_model, device=default_device())
position = torch.arange(0, q_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2) * -(math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
return pe.to(device=device)
# Cell
def Coord2dPosEncoding(q_len, d_model, eps=1e-3, verbose=False, device=default_device()):
x = 1
i = 0
for i in range(100):
cpe = 2 * (torch.linspace(0, 1, q_len).reshape(-1, 1) ** x) * (torch.linspace(0, 1, d_model).reshape(1, -1) ** x) - 1
pv(f'{i:4.0f} {x:5.3f} {cpe.mean():+6.3f}', verbose)
if abs(cpe.mean()) <= eps: break
elif cpe.mean() > eps: x += .001
else: x -= .001
i += 1
return cpe.to(device=device)
# Cell
def Coord1dPosEncoding(q_len, exponential=False, normalize=True, device=default_device()):
cpe = (2 * (torch.linspace(0, 1, q_len).reshape(-1, 1)**(.5 if exponential else 1)) - 1)
if normalize:
cpe = cpe - cpe.mean()
cpe = cpe / cpe.std()
return cpe.to(device=device)
# Cell
class ScaledDotProductAttention(Module):
def __init__(self, d_k:int): self.d_k = d_k
def forward(self, q:Tensor, k:Tensor, v:Tensor, mask:Optional[Tensor]=None):
# MatMul (q, k) - similarity scores for all pairs of positions in an input sequence
scores = torch.matmul(q, k) # scores : [bs x n_heads x q_len x q_len]
# Scale
scores = scores / (self.d_k ** 0.5)
# Mask (optional)
if mask is not None: scores.masked_fill_(mask, -1e9)
# SoftMax
attn = F.softmax(scores, dim=-1) # attn : [bs x n_heads x q_len x q_len]
# MatMul (attn, v)
context = torch.matmul(attn, v) # context: [bs x n_heads x q_len x d_v]
return context, attn
# Cell
class MultiHeadAttention(Module):
def __init__(self, d_model:int, n_heads:int, d_k:int, d_v:int):
r"""
Input shape: Q, K, V:[batch_size (bs) x q_len x d_model], mask:[q_len x q_len]
"""
self.n_heads, self.d_k, self.d_v = n_heads, d_k, d_v
self.W_Q = nn.Linear(d_model, d_k * n_heads, bias=False)
self.W_K = nn.Linear(d_model, d_k * n_heads, bias=False)
self.W_V = nn.Linear(d_model, d_v * n_heads, bias=False)
self.W_O = nn.Linear(n_heads * d_v, d_model, bias=False)
def forward(self, Q:Tensor, K:Tensor, V:Tensor, mask:Optional[Tensor]=None):
bs = Q.size(0)
# Linear (+ split in multiple heads)
q_s = self.W_Q(Q).view(bs, -1, self.n_heads, self.d_k).transpose(1,2) # q_s : [bs x n_heads x q_len x d_k]
k_s = self.W_K(K).view(bs, -1, self.n_heads, self.d_k).permute(0,2,3,1) # k_s : [bs x n_heads x d_k x q_len] - transpose(1,2) + transpose(2,3)
v_s = self.W_V(V).view(bs, -1, self.n_heads, self.d_v).transpose(1,2) # v_s : [bs x n_heads x q_len x d_v]
# Scaled Dot-Product Attention (multiple heads)
context, attn = ScaledDotProductAttention(self.d_k)(q_s, k_s, v_s) # context: [bs x n_heads x q_len x d_v], attn: [bs x n_heads x q_len x q_len]
# Concat
context = context.transpose(1, 2).contiguous().view(bs, -1, self.n_heads * self.d_v) # context: [bs x q_len x n_heads * d_v]
# Linear
output = self.W_O(context) # context: [bs x q_len x d_model]
return output, attn
# Cell
class TSTEncoderLayer(Module):
def __init__(self, d_model:int, n_heads:int, d_k:Optional[int]=None, d_v:Optional[int]=None, d_ff:int=256, res_dropout:float=0.1, activation:str="gelu"):
assert d_model // n_heads, f"d_model ({d_model}) must be divisible by n_heads ({n_heads})"
d_k = ifnone(d_k, d_model // n_heads)
d_v = ifnone(d_v, d_model // n_heads)
# Multi-Head attention
self.self_attn = MultiHeadAttention(d_model, n_heads, d_k, d_v)
# Add & Norm
self.dropout_attn = nn.Dropout(res_dropout)
self.batchnorm_attn = nn.BatchNorm1d(d_model)
# Position-wise Feed-Forward
self.ff = nn.Sequential(nn.Linear(d_model, d_ff), self._get_activation_fn(activation), nn.Linear(d_ff, d_model))
# Add & Norm
self.dropout_ffn = nn.Dropout(res_dropout)
self.batchnorm_ffn = nn.BatchNorm1d(d_model)
def forward(self, src:Tensor, mask:Optional[Tensor]=None) -> Tensor:
# Multi-Head attention sublayer
## Multi-Head attention
src2, attn = self.self_attn(src, src, src, mask=mask)
## Add & Norm
src = src + self.dropout_attn(src2) # Add: residual connection with residual dropout
src = self.batchnorm_attn(src.permute(1,2,0)).permute(2,0,1) # Norm: batchnorm (requires d_model features to be in dim 1)
# Feed-forward sublayer
## Position-wise Feed-Forward
src2 = self.ff(src)
## Add & Norm
src = src + self.dropout_ffn(src2) # Add: residual connection with residual dropout
src = self.batchnorm_ffn(src.permute(1,2,0)).permute(2,0,1) # Norm: batchnorm (requires d_model features to be in dim 1)
return src
def _get_activation_fn(self, activation):
if activation == "relu": return nn.ReLU()
elif activation == "gelu": return nn.GELU()
raise ValueError(f'{activation} is not available. You can use "relu" or "gelu"')
# Cell
class TSTEncoder(Module):
def __init__(self, encoder_layer, n_layers):
self.layers = nn.ModuleList([deepcopy(encoder_layer) for i in range(n_layers)])
def forward(self, src:Tensor, mask:Optional[Tensor]=None) -> Tensor:
output = src
for mod in self.layers: output = mod(output, mask=mask)
return output
# Cell
class TSTPlus(Module):
def __init__(self, c_in:int, c_out:int, seq_len:int, max_seq_len:Optional[int]=None,
n_layers:int=3, d_model:int=128, n_heads:int=16, d_k:Optional[int]=None, d_v:Optional[int]=None,
d_ff:int=256, res_dropout:float=0.1, activation:str="gelu", fc_dropout:float=0.,
pe:str='gauss', learn_pe:bool=True, flatten:bool=True, custom_head:Optional=None,
y_range:Optional[tuple]=None, verbose:bool=False, **kwargs):
r"""TST (Time Series Transformer) is a Transformer that takes continuous time series as inputs.
As mentioned in the paper, the input must be standardized by_var based on the entire training set.
Args:
c_in: the number of features (aka variables, dimensions, channels) in the time series dataset.
c_out: the number of target classes.
seq_len: number of time steps in the time series.
max_seq_len: useful to control the temporal resolution in long time series to avoid memory issues.
d_model: total dimension of the model (number of features created by the model)
n_heads: parallel attention heads.
d_k: size of the learned linear projection of queries and keys in the MHA. Usual values: 16-512. Default: None -> (d_model/n_heads) = 32.
d_v: size of the learned linear projection of values in the MHA. Usual values: 16-512. Default: None -> (d_model/n_heads) = 32.
d_ff: the dimension of the feedforward network model.
res_dropout: amount of residual dropout applied in the encoder.
activation: the activation function of intermediate layer, relu or gelu.
num_layers: the number of sub-encoder-layers in the encoder.
fc_dropout: dropout applied to the final fully connected layer.
pe: type of positional encoder. Available types: None, 'gauss' (default), 'lin1d', 'exp1d', '2d', 'sincos', 'zeros'.
learn_pe: learned positional encoder (True, default) or fixed positional encoder.
flatten: this will flattent the encoder output to be able to apply an mlp type of head (default=True)
custom_head: custom head that will be applied to the network. It must contain all kwargs (pass a partial function)
y_range: range of possible y values (used in regression tasks).
kwargs: nn.Conv1d kwargs. If not {}, a nn.Conv1d with those kwargs will be applied to original time series.
Input shape:
bs (batch size) x nvars (aka features, variables, dimensions, channels) x seq_len (aka time steps)
"""
self.c_out, self.seq_len = c_out, seq_len
# Input encoding
q_len = seq_len
self.new_q_len = False
if max_seq_len is not None and seq_len > max_seq_len: # Control temporal resolution
self.new_q_len = True
q_len = max_seq_len
tr_factor = math.ceil(seq_len / q_len)
total_padding = (tr_factor * q_len - seq_len)
padding = (total_padding // 2, total_padding - total_padding // 2)
self.W_P = nn.Sequential(Pad1d(padding), Conv1d(c_in, d_model, kernel_size=tr_factor, stride=tr_factor))
pv(f'temporal resolution modified: {seq_len} --> {q_len} time steps: kernel_size={tr_factor}, stride={tr_factor}, padding={padding}.\n', verbose)
elif kwargs:
self.new_q_len = True
t = torch.rand(1, 1, seq_len)
q_len = nn.Conv1d(1, 1, **kwargs)(t).shape[-1]
self.W_P = nn.Conv1d(c_in, d_model, **kwargs) # Eq 2
pv(f'Conv1d with kwargs={kwargs} applied to input to create input encodings\n', verbose)
else:
self.W_P = nn.Linear(c_in, d_model) # Eq 1: projection of feature vectors onto a d-dim vector space
# Positional encoding
if pe == None:
W_pos = torch.zeros((q_len, d_model), device=default_device()) # pe = None and learn_pe = False can be used to measure impact of pe
learn_pe = False
elif pe == 'zeros': W_pos = torch.zeros((q_len, d_model), device=default_device())
elif pe == 'gauss': W_pos = torch.normal(0, 1, (q_len, d_model), device=default_device())
elif pe == 'lin1d': W_pos = Coord1dPosEncoding(q_len, exponential=False, normalize=True)
elif pe == 'exp1d': W_pos = Coord1dPosEncoding(q_len, exponential=True, normalize=True)
elif pe == '2d': W_pos = Coord2dPosEncoding(q_len, d_model)
elif pe == 'sincos': W_pos = SinCosPosEncoding(q_len, d_model)
else: raise ValueError(f"{pe} is not a valid pe (positional encoder. Available types: 'gauss' (default), 'zeros', lin1d', 'exp1d', '2d', 'sincos'.)")
self.W_pos = nn.Parameter(W_pos, requires_grad=learn_pe)
# Residual dropout
self.res_dropout = nn.Dropout(res_dropout)
# Encoder
encoder_layer = TSTEncoderLayer(d_model, n_heads, d_k=d_k, d_v=d_v, d_ff=d_ff, res_dropout=res_dropout, activation=activation)
self.encoder = TSTEncoder(encoder_layer, n_layers)
self.flatten = Flatten() if flatten else None
# Head
self.head_nf = q_len * d_model if flatten else d_model
if custom_head: self.head = custom_head(self.head_nf, c_out) # custom head passed as a partial func with all its kwargs
else: self.head = self.create_head(self.head_nf, c_out, fc_dropout=fc_dropout, y_range=y_range)
def create_head(self, nf, c_out, fc_dropout=0., y_range=None, **kwargs):
layers = [nn.Dropout(fc_dropout)] if fc_dropout else []
layers += [nn.Linear(nf, c_out)]
if y_range: layers += [SigmoidRange(*y_range)]
return nn.Sequential(*layers)
def forward(self, x:Tensor, mask:Optional[Tensor]=None) -> Tensor: # x: [bs x nvars x q_len]
# Input encoding
if self.new_q_len: u = self.W_P(x).transpose(2,1) # Eq 2 # u: [bs x d_model x q_len] transposed to [bs x q_len x d_model]
else: u = self.W_P(x.transpose(2,1)) # Eq 1 # u: [bs x q_len x d_model] transposed to [bs x q_len x d_model]
# Positional encoding
u = self.res_dropout(u + self.W_pos)
# Encoder
z = self.encoder(u) # z: [bs x q_len x d_model]
if self.flatten is not None: z = self.flatten(z) # z: [bs x q_len * d_model]
else: z = z.transpose(2,1).contiguous() # z: [bs x d_model x q_len]
# Classification/ Regression head
return self.head(z) # output: [bs x c_out]
# Cell
@delegates(TSTPlus.__init__)
class MultiTST(Module):
_arch = TSTPlus
def __init__(self, feats, c_out, seq_len, **kwargs):
r"""
MultiTST is a class that allows you to create a model with multiple branches of TST.
Args:
* feats: list with number of features that will be passed to each body.
"""
self.feats = tuple(L(feats))
self.c_out, self.seq_len, self.kwargs = c_out, seq_len, kwargs
# Body
self.branches = nn.ModuleList()
self.head_nf = 0
for feat in self.feats:
m = create_model(self._arch, c_in=feat, c_out=c_out, seq_len=seq_len, **kwargs)
self.head_nf += m.head_nf
m.head = Noop
self.branches.append(m)
# Head
self.head = self._arch.create_head(self, self.head_nf, c_out, **kwargs)
def forward(self, x):
x = torch.split(x, self.feats, dim=1)
for i, branch in enumerate(self.branches):
out = branch(x[i]) if i == 0 else torch.cat([out, branch(x[i])], dim=1)
return self.head(out) | 49.051903 | 161 | 0.621755 |
__all__ = ['SinCosPosEncoding', 'Coord2dPosEncoding', 'Coord1dPosEncoding', 'ScaledDotProductAttention',
'MultiHeadAttention', 'TSTEncoderLayer', 'TSTEncoder', 'TSTPlus', 'MultiTST']
from ..imports import *
from ..utils import *
from .layers import *
from .utils import *
def SinCosPosEncoding(q_len, d_model):
pe = torch.zeros(q_len, d_model, device=default_device())
position = torch.arange(0, q_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2) * -(math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
return pe.to(device=device)
def Coord2dPosEncoding(q_len, d_model, eps=1e-3, verbose=False, device=default_device()):
x = 1
i = 0
for i in range(100):
cpe = 2 * (torch.linspace(0, 1, q_len).reshape(-1, 1) ** x) * (torch.linspace(0, 1, d_model).reshape(1, -1) ** x) - 1
pv(f'{i:4.0f} {x:5.3f} {cpe.mean():+6.3f}', verbose)
if abs(cpe.mean()) <= eps: break
elif cpe.mean() > eps: x += .001
else: x -= .001
i += 1
return cpe.to(device=device)
def Coord1dPosEncoding(q_len, exponential=False, normalize=True, device=default_device()):
cpe = (2 * (torch.linspace(0, 1, q_len).reshape(-1, 1)**(.5 if exponential else 1)) - 1)
if normalize:
cpe = cpe - cpe.mean()
cpe = cpe / cpe.std()
return cpe.to(device=device)
class ScaledDotProductAttention(Module):
def __init__(self, d_k:int): self.d_k = d_k
def forward(self, q:Tensor, k:Tensor, v:Tensor, mask:Optional[Tensor]=None):
scores = torch.matmul(q, k)
scores = scores / (self.d_k ** 0.5)
if mask is not None: scores.masked_fill_(mask, -1e9)
attn = F.softmax(scores, dim=-1)
context = torch.matmul(attn, v)
return context, attn
class MultiHeadAttention(Module):
def __init__(self, d_model:int, n_heads:int, d_k:int, d_v:int):
self.n_heads, self.d_k, self.d_v = n_heads, d_k, d_v
self.W_Q = nn.Linear(d_model, d_k * n_heads, bias=False)
self.W_K = nn.Linear(d_model, d_k * n_heads, bias=False)
self.W_V = nn.Linear(d_model, d_v * n_heads, bias=False)
self.W_O = nn.Linear(n_heads * d_v, d_model, bias=False)
def forward(self, Q:Tensor, K:Tensor, V:Tensor, mask:Optional[Tensor]=None):
bs = Q.size(0)
q_s = self.W_Q(Q).view(bs, -1, self.n_heads, self.d_k).transpose(1,2)
k_s = self.W_K(K).view(bs, -1, self.n_heads, self.d_k).permute(0,2,3,1)
v_s = self.W_V(V).view(bs, -1, self.n_heads, self.d_v).transpose(1,2)
context, attn = ScaledDotProductAttention(self.d_k)(q_s, k_s, v_s)
context = context.transpose(1, 2).contiguous().view(bs, -1, self.n_heads * self.d_v)
output = self.W_O(context)
return output, attn
class TSTEncoderLayer(Module):
def __init__(self, d_model:int, n_heads:int, d_k:Optional[int]=None, d_v:Optional[int]=None, d_ff:int=256, res_dropout:float=0.1, activation:str="gelu"):
assert d_model // n_heads, f"d_model ({d_model}) must be divisible by n_heads ({n_heads})"
d_k = ifnone(d_k, d_model // n_heads)
d_v = ifnone(d_v, d_model // n_heads)
self.self_attn = MultiHeadAttention(d_model, n_heads, d_k, d_v)
self.dropout_attn = nn.Dropout(res_dropout)
self.batchnorm_attn = nn.BatchNorm1d(d_model)
self.ff = nn.Sequential(nn.Linear(d_model, d_ff), self._get_activation_fn(activation), nn.Linear(d_ff, d_model))
self.dropout_ffn = nn.Dropout(res_dropout)
self.batchnorm_ffn = nn.BatchNorm1d(d_model)
def forward(self, src:Tensor, mask:Optional[Tensor]=None) -> Tensor:
self.self_attn(src, src, src, mask=mask)
= src + self.dropout_attn(src2)
src = self.batchnorm_attn(src.permute(1,2,0)).permute(2,0,1)
= src + self.dropout_ffn(src2)
src = self.batchnorm_ffn(src.permute(1,2,0)).permute(2,0,1)
return src
def _get_activation_fn(self, activation):
if activation == "relu": return nn.ReLU()
elif activation == "gelu": return nn.GELU()
raise ValueError(f'{activation} is not available. You can use "relu" or "gelu"')
class TSTEncoder(Module):
def __init__(self, encoder_layer, n_layers):
self.layers = nn.ModuleList([deepcopy(encoder_layer) for i in range(n_layers)])
def forward(self, src:Tensor, mask:Optional[Tensor]=None) -> Tensor:
output = src
for mod in self.layers: output = mod(output, mask=mask)
return output
class TSTPlus(Module):
def __init__(self, c_in:int, c_out:int, seq_len:int, max_seq_len:Optional[int]=None,
n_layers:int=3, d_model:int=128, n_heads:int=16, d_k:Optional[int]=None, d_v:Optional[int]=None,
d_ff:int=256, res_dropout:float=0.1, activation:str="gelu", fc_dropout:float=0.,
pe:str='gauss', learn_pe:bool=True, flatten:bool=True, custom_head:Optional=None,
y_range:Optional[tuple]=None, verbose:bool=False, **kwargs):
self.c_out, self.seq_len = c_out, seq_len
q_len = seq_len
self.new_q_len = False
if max_seq_len is not None and seq_len > max_seq_len:
self.new_q_len = True
q_len = max_seq_len
tr_factor = math.ceil(seq_len / q_len)
total_padding = (tr_factor * q_len - seq_len)
padding = (total_padding // 2, total_padding - total_padding // 2)
self.W_P = nn.Sequential(Pad1d(padding), Conv1d(c_in, d_model, kernel_size=tr_factor, stride=tr_factor))
pv(f'temporal resolution modified: {seq_len} --> {q_len} time steps: kernel_size={tr_factor}, stride={tr_factor}, padding={padding}.\n', verbose)
elif kwargs:
self.new_q_len = True
t = torch.rand(1, 1, seq_len)
q_len = nn.Conv1d(1, 1, **kwargs)(t).shape[-1]
self.W_P = nn.Conv1d(c_in, d_model, **kwargs)
pv(f'Conv1d with kwargs={kwargs} applied to input to create input encodings\n', verbose)
else:
self.W_P = nn.Linear(c_in, d_model)
if pe == None:
W_pos = torch.zeros((q_len, d_model), device=default_device())
learn_pe = False
elif pe == 'zeros': W_pos = torch.zeros((q_len, d_model), device=default_device())
elif pe == 'gauss': W_pos = torch.normal(0, 1, (q_len, d_model), device=default_device())
elif pe == 'lin1d': W_pos = Coord1dPosEncoding(q_len, exponential=False, normalize=True)
elif pe == 'exp1d': W_pos = Coord1dPosEncoding(q_len, exponential=True, normalize=True)
elif pe == '2d': W_pos = Coord2dPosEncoding(q_len, d_model)
elif pe == 'sincos': W_pos = SinCosPosEncoding(q_len, d_model)
else: raise ValueError(f"{pe} is not a valid pe (positional encoder. Available types: 'gauss' (default), 'zeros', lin1d', 'exp1d', '2d', 'sincos'.)")
self.W_pos = nn.Parameter(W_pos, requires_grad=learn_pe)
# Residual dropout
self.res_dropout = nn.Dropout(res_dropout)
# Encoder
encoder_layer = TSTEncoderLayer(d_model, n_heads, d_k=d_k, d_v=d_v, d_ff=d_ff, res_dropout=res_dropout, activation=activation)
self.encoder = TSTEncoder(encoder_layer, n_layers)
self.flatten = Flatten() if flatten else None
# Head
self.head_nf = q_len * d_model if flatten else d_model
if custom_head: self.head = custom_head(self.head_nf, c_out) # custom head passed as a partial func with all its kwargs
else: self.head = self.create_head(self.head_nf, c_out, fc_dropout=fc_dropout, y_range=y_range)
def create_head(self, nf, c_out, fc_dropout=0., y_range=None, **kwargs):
layers = [nn.Dropout(fc_dropout)] if fc_dropout else []
layers += [nn.Linear(nf, c_out)]
if y_range: layers += [SigmoidRange(*y_range)]
return nn.Sequential(*layers)
def forward(self, x:Tensor, mask:Optional[Tensor]=None) -> Tensor: # x: [bs x nvars x q_len]
# Input encoding
if self.new_q_len: u = self.W_P(x).transpose(2,1) # Eq 2 # u: [bs x d_model x q_len] transposed to [bs x q_len x d_model]
else: u = self.W_P(x.transpose(2,1)) # Eq 1 # u: [bs x q_len x d_model] transposed to [bs x q_len x d_model]
# Positional encoding
u = self.res_dropout(u + self.W_pos)
# Encoder
z = self.encoder(u) # z: [bs x q_len x d_model]
if self.flatten is not None: z = self.flatten(z) # z: [bs x q_len * d_model]
else: z = z.transpose(2,1).contiguous() # z: [bs x d_model x q_len]
# Classification/ Regression head
return self.head(z) # output: [bs x c_out]
# Cell
@delegates(TSTPlus.__init__)
class MultiTST(Module):
_arch = TSTPlus
def __init__(self, feats, c_out, seq_len, **kwargs):
self.feats = tuple(L(feats))
self.c_out, self.seq_len, self.kwargs = c_out, seq_len, kwargs
# Body
self.branches = nn.ModuleList()
self.head_nf = 0
for feat in self.feats:
m = create_model(self._arch, c_in=feat, c_out=c_out, seq_len=seq_len, **kwargs)
self.head_nf += m.head_nf
m.head = Noop
self.branches.append(m)
# Head
self.head = self._arch.create_head(self, self.head_nf, c_out, **kwargs)
def forward(self, x):
x = torch.split(x, self.feats, dim=1)
for i, branch in enumerate(self.branches):
out = branch(x[i]) if i == 0 else torch.cat([out, branch(x[i])], dim=1)
return self.head(out) | true | true |
f7f9015192572741b93dcf2a76b855f273eaa985 | 1,885 | py | Python | python-midonetclient/src/midonetclient/ip_addr_group.py | obino/midonet | 10cd954bec1290cf0c70aecaa1e13c91f1b008a6 | [
"Apache-2.0"
] | 221 | 2015-01-04T17:49:57.000Z | 2021-12-23T16:15:35.000Z | python-midonetclient/src/midonetclient/ip_addr_group.py | syseleven/midonet | e0b640c96d1a96177e8635112095a8546cdfa37c | [
"Apache-2.0"
] | 8 | 2018-05-24T13:36:03.000Z | 2021-02-19T16:01:43.000Z | python-midonetclient/src/midonetclient/ip_addr_group.py | syseleven/midonet | e0b640c96d1a96177e8635112095a8546cdfa37c | [
"Apache-2.0"
] | 95 | 2015-01-07T02:06:23.000Z | 2022-02-23T22:23:55.000Z |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Midokura PTE LTD.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from midonetclient import ip_addr_group_addr
from midonetclient import resource_base
from midonetclient import vendor_media_type
class IpAddrGroup(resource_base.ResourceBase):
media_type = vendor_media_type.APPLICATION_IP_ADDR_GROUP_JSON
def __init__(self, uri, dto, auth):
super(IpAddrGroup, self).__init__(uri, dto, auth)
def name(self, name):
self.dto['name'] = name
return self
def get_name(self):
return self.dto['name']
def id(self, id):
self.dto['id'] = id
return self
def get_id(self):
return self.dto['id']
def get_addrs(self, query=None):
headers = {'Accept':
vendor_media_type.APPLICATION_IP_ADDR_GROUP_ADDR_COLLECTION_JSON}
return self.get_children(self.dto['addrs'], query, headers,
ip_addr_group_addr.IpAddrGroupAddr)
def add_ipv4_addr(self):
return ip_addr_group_addr.IpAddrGroupAddr(self.dto['addrs'],
{'version': 4}, self.auth)
def add_ipv6_addr(self):
return ip_addr_group_addr.IpAddrGroupAddr(self.dto['addrs'],
{'version': 6}, self.auth)
| 31.949153 | 77 | 0.667374 |
from midonetclient import ip_addr_group_addr
from midonetclient import resource_base
from midonetclient import vendor_media_type
class IpAddrGroup(resource_base.ResourceBase):
media_type = vendor_media_type.APPLICATION_IP_ADDR_GROUP_JSON
def __init__(self, uri, dto, auth):
super(IpAddrGroup, self).__init__(uri, dto, auth)
def name(self, name):
self.dto['name'] = name
return self
def get_name(self):
return self.dto['name']
def id(self, id):
self.dto['id'] = id
return self
def get_id(self):
return self.dto['id']
def get_addrs(self, query=None):
headers = {'Accept':
vendor_media_type.APPLICATION_IP_ADDR_GROUP_ADDR_COLLECTION_JSON}
return self.get_children(self.dto['addrs'], query, headers,
ip_addr_group_addr.IpAddrGroupAddr)
def add_ipv4_addr(self):
return ip_addr_group_addr.IpAddrGroupAddr(self.dto['addrs'],
{'version': 4}, self.auth)
def add_ipv6_addr(self):
return ip_addr_group_addr.IpAddrGroupAddr(self.dto['addrs'],
{'version': 6}, self.auth)
| true | true |
f7f90189faf46bd4ef7740faaa5089554922c925 | 32,105 | py | Python | parser/fase2/team01/Grupo1/Librerias/storageManager/c3dGen.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 35 | 2020-12-07T03:11:43.000Z | 2021-04-15T17:38:16.000Z | parser/fase2/team01/Grupo1/Librerias/storageManager/c3dGen.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 47 | 2020-12-09T01:29:09.000Z | 2021-01-13T05:37:50.000Z | parser/fase2/team01/Grupo1/Librerias/storageManager/c3dGen.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 556 | 2020-12-07T03:13:31.000Z | 2021-06-17T17:41:10.000Z | # Package: C3D Gen
# License: Released under MIT License
# Notice: Copyright (c) 2020 TytusDB Team
# Developer: Team 01
import os
import json
import temporalesp as temposg
tempos = temposg.temporalesp()
path = 'c3d/'
#dataPath = path + 'databases'
##################
# Databases CRUD #
##################
# CREATE a database checking their existence
def createDatabaseC3D(database: str) -> int:
try:
if not database.isidentifier():
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= createDB(" + "t" + str(ilabel-1) + "" + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
# CREATE a database checking their existence
def useC3D(database: str) -> int:
try:
if not database.isidentifier():
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= useDatabase(" + "t" + str(ilabel-1) + "" + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
###############
# Tables CRUD #
###############
# CREATE a table checking their existence
def createTableC3D(database: str, table: str, numberColumns: int, cadenaE : str) -> int:
try:
if not database.isidentifier() or not table.isidentifier() or not isinstance(numberColumns, int):
raise Exception()
cadenaCreate = cadenaE.split('TABLE')
cadenaTabla = cadenaCreate[1].split('(',1)
cadenaCampos = cadenaTabla[1].split(',')
ultimoCampo = cadenaCampos[len(cadenaCampos)-1]
ultimoCampo = ultimoCampo.replace(')','')
ultimoCampo = ultimoCampo.replace(';','')
cadenaCampos[len(cadenaCampos)-1] = ultimoCampo
listaTablas = []
cadenaWhere = None
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=[]\n")
for camposel in cadenaCampos:
f.write(" t" + str(ilabel) + ".append('" + camposel.strip() + "')\n")
contadorCol += 1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= createTbl(" + "t" + str(ilabel-3) + ",t" + str(ilabel-2) + ",t" + str(ilabel-1) + ")\n")
f.write(" \n")
#f.write("main()\n")
f.close()
return 0
except:
return 1
def insertC3D(database: str, table: str, register: list, posIdentado: str) -> int:
try:
if not database.isidentifier() or not table.isidentifier() :
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(posIdentado + "t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
#f.write(" t" + str(ilabel) + "='" + str(register) + "'\n")
ilabel = tempos.incTemporal()
for valInsert in register :
if contadorCol==0 :
f.write(posIdentado + "t" + str(ilabel) + "=[]" "\n")
if es_numero(valInsert):
f.write(posIdentado + "t" + str(ilabel) + ".append(" + str(valInsert) + ")\n")
else:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + str(valInsert) + "')\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-5) + ",t" + str(ilabel-4) + ")\n")
f.write(posIdentado + "if t" + str(ilabel) + " is False :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel+1) + " \n")
f.write(posIdentado + "else :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel) + " \n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= insertC3D(" + "t" + str(ilabel-6) + ",t" + str(ilabel-5) + ",t" + str(ilabel-2) + ")\n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
f#.write(" print(t" + str(ilabel-1) + ") \n")
f.write(" \n")
#f.write("main()\n")
f.close()
return 0
except:
return 1
def updateC3D(database: str, table: str, register: dict, columns: list) -> int:
try:
if not database.isidentifier() or not table.isidentifier() :
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
#f.write(" t" + str(ilabel) + "='" + str(register) + "'\n")
ilabel = tempos.incTemporal()
for valInsert in register :
if contadorCol==0 :
f.write(" t" + str(ilabel) + "={}" "\n")
if es_numero(register[valInsert]):
f.write(" t" + str(ilabel) + "[" + str(valInsert) + "] = " + str(register[valInsert]) + "\n")
else:
f.write(" t" + str(ilabel) + "[" + str(valInsert) + "] = '" + str(register[valInsert]) + "'\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
contadorCol=0
for valInsert in columns :
if contadorCol==0 :
f.write(" t" + str(ilabel) + "=[]" "\n")
if es_numero(valInsert):
f.write(" t" + str(ilabel) + ".append(" + str(valInsert) + ")\n")
else:
f.write(" t" + str(ilabel) + ".append('" + str(valInsert) + "')\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-7) + ",t" + str(ilabel-6) + ")\n")
f.write(" if t" + str(ilabel) + " is False :\n")
f.write(" goto .labelt" + str(ilabel+1) + " \n")
f.write(" else :\n")
f.write(" goto .labelt" + str(ilabel) + " \n")
f.write(" label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= updateC3D(" + "t" + str(ilabel-8) + ",t" + str(ilabel-7) + ",t" + str(ilabel-4) + ",t" + str(ilabel-2) + ")\n")
f.write(" label .labelt" + str(ilabel) + "\n")
f#.write(" print(t" + str(ilabel-1) + ") \n")
f.write(" \n")
#f.write("main()\n")
f.close()
return 0
except:
return 1
def selectC3D(database: str, table: str, cadenaE: str, posIdentado: str ) -> int:
try:
cadenaSelect = cadenaE.split('SELECT')
listaTablas = []
cadenaWhere = None
cadenaCampos = cadenaSelect[1].split('FROM')
listacampos = cadenaCampos[0].split(',')
if len(cadenaCampos)>1:
cadenaTablas = cadenaCampos[1].split('WHERE')
listaTablas = cadenaTablas[0].split(',')
if len(cadenaTablas)>1:
cadenaWhere = cadenaTablas[1]
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "='" + database + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=[]\n")
for camposel in listacampos:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + camposel.strip() + "')\n")
contadorCol += 1
contadorTablas = 0
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=[]\n")
for tabsel in listaTablas:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + tabsel.strip() + "')\n")
contadorTablas += 1
ilabel = tempos.incTemporal()
if cadenaWhere == None:
f.write(posIdentado + "t" + str(ilabel) + "=''\n")
else:
fexpar=cadenaWhere.find('STR')
cadenaPar = ''
if fexpar>=0:
cadenaWheres = cadenaWhere.split('STR')
cadenaPar = cadenaWheres[1].replace(')','')
cadenaPar = cadenaWheres[1].replace('(','')
cadenaWhere = cadenaWheres[0].strip()
f.write(posIdentado + "t" + str(ilabel) + "='" + cadenaWhere.strip() + "' + str(" + str(cadenaPar) + "\n")
else:
f.write(posIdentado + "t" + str(ilabel) + "='" + cadenaWhere.strip() + "'\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-4) + ",t" + str(ilabel-2) + "[0])\n")
f.write(posIdentado + "if t" + str(ilabel) + " is False :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel+1) + " \n")
f.write(posIdentado + "else :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel) + " \n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= selectC3D(" + "t" + str(ilabel-5) + ",t" + str(ilabel-4) + ",t" + str(ilabel-3) + ",t" + str(ilabel-2) + ")\n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
#f.write(posIdentado + "print(t" + str(ilabel-1) + ") \n")
f.write(" \n")
#f.write("main()\n")
f.close()
return 0
except:
return 1
# CREACION DE FUNCIONES
def createFunctionC3D(database: str, nombre : str, arg1: any, arg2 : any, arg3 : any, arg4 : any, cuerpo : any) -> int:
try:
if not database.isidentifier():
raise Exception()
listaparamf = []
listaparamVars=[]
contadorParam = 0;
ilabel = tempos.incTemporal()
for parametrof in arg2:
paramf = {}
paramf['nombre'] = parametrof.table.upper()
listaparamVars.append(parametrof.table.upper())
parTipoUp = parametrof.column['ast'].type.upper()
parTipo = parTipoUp.find("TEXT")
if parTipo >=0 :
paramf['tipo'] = 'str'
else :
parTipo = parTipoUp.find("VARCHAR")
if parTipo >=0:
paramf['tipo'] = 'str'
else:
parTipo = parTipoUp.find("INTEGER")
if parTipo >=0:
paramf['tipo'] = 'int'
else:
paramf['tipo'] = parTipoUp
paramf['temporal'] = "pt" + str(ilabel)
listaparamf.append(paramf)
cadenaE = arg3
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" @with_goto \n" )
f.write(" def " + nombre + "(" )
contadorParam = 0
for prm in listaparamf:
if contadorParam==0:
f.write(str(prm['nombre']) + ":" + str(prm['tipo']))
else:
f.write("," + str(prm['nombre']) + ":" + str(prm['tipo']))
contadorParam+=1
f.write("):\n")
# cadenaBloque1 = cadenaE.split('BEGIN')
# cadenaBloque2 = cadenaBloque1[1].rsplit('END')
# cadenabloque= cadenaBloque2[0].upper()
# cadenabloque= cadenabloque.replace('\n','')
# cadenabloque="["+ str(cadenabloque) +"]"
# operaciones = json.loads(cadenabloque)
# for operacion in operaciones:
# if(operacion['BLOQUE']=='RETURN'):
# varReturn = operacion['CADENA'].replace('RETURN','')
# varReturn = varReturn.replace(';','')
# varReturn = varReturn.strip()
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "=" + str(varReturn) + "\n")
# f.write(" return t" + str(ilabel) + "\n")
for operacion in cuerpo.instrucciones:
if hasattr(operacion,'paramReturn'):
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=" + str(operacion.paramArg.column.upper()) + "\n")
f.write(" return t" + str(ilabel) + "\n")
else:
if hasattr(operacion[0].asignaciones,'operador'):
x = 'peracion[0].variable.column'
cadenaS = operacion[0].asignaciones.operador.arg1.upper()
cadenaS = cadenaS.strip()
cadenaConParams = cadenaS
for cadPar in listaparamVars:
cadenaConParamslist = cadenaConParams.split(cadPar)
#for rcad in cadenaConParamslist
ipar = 0
while ipar<(len(cadenaConParamslist)-1):
cadenaConParams = cadenaConParamslist[ipar] + " str(" + cadPar + ") " + cadenaConParamslist[ipar+1]
ipar+=1
cadenaConParamslist[ipar] = cadenaConParams
f.write("\n")
f.close()
valRetSel = selectC3D(database, 'tabla', cadenaConParams.upper(), ' ')
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel-1) + "\n")
f.write("\n")
else:
if (hasattr(operacion[0].asignaciones.leftOperator,'column') or hasattr(operacion[0].asignaciones.leftOperator,'val')) and (hasattr(operacion[0].asignaciones.rightOperator,'column') or hasattr(operacion[0].asignaciones.rightOperator,'val')) :
ilabel = tempos.incTemporal()
#if es_numero(operacion[0].asignaciones.leftOperator.column):
if 1==1:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.val) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.val) + "\n")
ilabel = tempos.incTemporal()
#if es_numero(operacion[0].asignaciones.rightOperator.column):
if 1==1:
if hasattr(operacion[0].asignaciones.rightOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.val) + "\n")
else:
if hasattr(operacion[0].asignaciones.rightOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.sign + " t" + str(ilabel-1) + "\n")
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator.leftOperator,'column'):
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.leftOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.leftOperator.column.upper()) + "\n")
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.rightOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.rightOperator.column.upper()) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.leftOperator.sign + " t" + str(ilabel-1) + "\n")
#f.write(" " + str(operacion[0].leftOperator.variable.column.upper()) + "=t" + str(ilabel) + "\n")
if hasattr(operacion[0].asignaciones.rightOperator,'tipofuncionTrigonometrica'):
ilabel = tempos.incTemporal()
#f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.tipofuncionTrigonometrica.upper()) + "(" + str(operacion[0].asignaciones.rightOperator.arg1.val) + ")\n")
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.arg1.val) + "\n")
# operador operacion
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.sign + " t" + str(ilabel-1) + "\n")
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel) + "\n")
f.write("\n")
f.close()
return 0
except:
return 1
# CREACION DE PROCEDIMIENTOS
def createProcedureC3D(database: str, nombre : str, arg2 : any, cuerpo : any) -> int:
try:
if not database.isidentifier():
raise Exception()
listaparamf = []
if arg2 == None:
arg2 = []
contadorParam = 0;
ilabel = tempos.incTemporal()
for parametrof in arg2:
paramf = {}
paramf['nombre'] = parametrof.table.upper()
parTipoUp = parametrof.column['ast'].type.upper()
parTipo = parTipoUp.find("TEXT")
if parTipo >=0 :
paramf['tipo'] = 'str'
else :
parTipo = parTipoUp.find("VARCHAR")
if parTipo >=0:
paramf['tipo'] = 'str'
else:
parTipo = parTipoUp.find("INTEGER")
if parTipo >=0:
paramf['tipo'] = 'int'
else:
paramf['tipo'] = parTipoUp
paramf['temporal'] = "pt" + str(ilabel)
listaparamf.append(paramf)
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" @with_goto \n" )
f.write(" def " + nombre + "(" )
contadorParam = 0
for prm in listaparamf:
if contadorParam==0:
f.write(str(prm['nombre']) + ":" + str(prm['tipo']))
else:
f.write("," + str(prm['nombre']) + ":" + str(prm['tipo']))
contadorParam+=1
f.write("):\n")
# cadenaBloque1 = cadenaE.split('BEGIN')
# cadenaBloque2 = cadenaBloque1[1].rsplit('END')
# cadenabloque= cadenaBloque2[0].upper()
# cadenabloque= cadenabloque.replace('\n','')
# cadenabloque="["+ str(cadenabloque) +"]"
# operaciones = json.loads(cadenabloque)
# for operacion in operaciones:
# if(operacion['BLOQUE']=='RETURN'):
# varReturn = operacion['CADENA'].replace('RETURN','')
# varReturn = varReturn.replace(';','')
# varReturn = varReturn.strip()
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "=" + str(varReturn) + "\n")
# f.write(" return t" + str(ilabel) + "\n")
for operacion in cuerpo.instrucciones:
if operacion.arg0.upper() == 'RETURN':
ilabel = tempos.incTemporal()
returnVal = operacion.arg1.upper().split('RETURN')
returnVal = returnVal[1].strip()
returnVal = returnVal.replace(';','')
f.write(" t" + str(ilabel) + "=" + str(returnVal) + "\n")
f.write(" return t" + str(ilabel) + "\n")
else:
if operacion.arg0.upper() == 'INSERT':
x = 'peracion[0].variable.column'
listaCamposIns = []
for lscamp in operacion.values:
if hasattr(lscamp,'tipofuncionfehca'):
listaCamposIns.append(lscamp.tipofuncionfehca.upper()+"()")
else:
listaCamposIns.append(lscamp.val)
# cadenaS = operacion.arg1.upper()
# cadenaS = 'INSERT ' + cadenaS
# cadenaS = cadenaS.strip()
f.write("\n")
f.close()
insertC3D(database, operacion.tableid.upper(), listaCamposIns, ' ')
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
#f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel-1) + "\n")
f.write("\n")
f.write("\n")
f.close()
return 0
except:
return 1
# SELECT function
def select_functionC3D(nombre: str, parametros: any) -> int:
try:
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
listadeParametros = []
contadorParams = 0
if parametros==None:
parametros = []
for parmf in parametros:
ilabel = tempos.incTemporal()
contadorParams += 1
if str(parmf.type) =='string':
f.write(" t" + str(ilabel) + "='" + str(parmf.val) + "'\n")
else:
f.write(" t" + str(ilabel) + "=" + str(parmf.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= " + nombre + "(")
i=0
while i < contadorParams :
if i==0:
f.write("t" + str(ilabel-contadorParams+i) )
else:
f.write(",t" + str(ilabel-contadorParams+i) )
i += 1
f.write(")\n")
f.write(" \n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= print(t" + str(ilabel-1) + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
# SELECT procedure
def select_procedureC3D(nombre: str, parametros: any) -> int:
try:
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
listadeParametros = []
contadorParams = 0
if parametros==None:
parametros = []
for parmf in parametros:
ilabel = tempos.incTemporal()
contadorParams += 1
if str(parmf.type) =='string':
f.write(" t" + str(ilabel) + "='" + str(parmf.val) + "'\n")
else:
f.write(" t" + str(ilabel) + "=" + str(parmf.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= " + nombre + "(")
i=0
while i < contadorParams :
if i==0:
f.write("t" + str(ilabel-contadorParams+i) )
else:
f.write(",t" + str(ilabel-contadorParams+i) )
i += 1
f.write(")\n")
f.write(" \n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= print(t" + str(ilabel-1) + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
# def selectC3D1( instruccionsel : any) -> int:
# try:
# selTable = []
# fromData = instruccionsel.fromopcional
# where = fromData.whereopcional
# directorioTablas = {}
# tablasFromTemporales = []
# columnasFromTemporales = {}
# for tablasSeleccionadas in fromData.parametros:
# tablasFromTemporales = []
# tablasFromTemporales.append(tablasSeleccionadas.parametros.operador.upper())
# tablasFromTemporales.append(tablasSeleccionadas.asop)
# selTable.append(tablasFromTemporales)
# listaCampos = []
# for parametros in instruccionsel.parametros.listadeseleccion:
# listaCamposTemp = []
# listaCamposTemp.append(parametros.listaseleccionados.column)
# listaCamposTemp.append(parametros.listaseleccionados.table)
# listaCampos.append(listaCamposTemp)
# if not database.isidentifier() \
# or not table.isidentifier() :
# raise Exception()
# ilabel = tempos.incTemporal()
# f = open("./c3d/codigo3Dgenerado.py", "a")
# f.write(" t" + str(ilabel) + "='" + database + "'\n")
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "='" + table + "'\n")
# contadorCol = 0
# ilabel = tempos.incTemporal()
# #f.write(" t" + str(ilabel) + "='" + str(register) + "'\n")
# ilabel = tempos.incTemporal()
# for valInsert in register :
# if contadorCol==0 :
# f.write(" t" + str(ilabel) + "=[]" "\n")
# if es_numero(valInsert):
# f.write(" t" + str(ilabel) + ".append(" + str(valInsert) + ")\n")
# else:
# f.write(" t" + str(ilabel) + ".append('" + str(valInsert) + "')\n")
# contadorCol +=1
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-5) + ",t" + str(ilabel-4) + ")\n")
# f.write(" if t" + str(ilabel) + " is False :\n")
# f.write(" goto .labelt" + str(ilabel+1) + " \n")
# f.write(" else :\n")
# f.write(" goto .labelt" + str(ilabel) + " \n")
# f.write(" label .labelt" + str(ilabel) + "\n")
# ilabel = tempos.incTemporal()
# f.write(" t" + str(ilabel) + "= insertC3D(" + "t" + str(ilabel-6) + ",t" + str(ilabel-5) + ",t" + str(ilabel-2) + ")\n")
# f.write(" label .labelt" + str(ilabel) + "\n")
# f#.write(" print(t" + str(ilabel-1) + ") \n")
# f.write(" \n")
# #f.write("main()\n")
# f.close()
# return 0
# except:
# return 1
def es_numero(variable : any):
try:
float(variable)
return True
except :
return False
| 40.383648 | 263 | 0.446099 |
import os
import json
import temporalesp as temposg
tempos = temposg.temporalesp()
path = 'c3d/'
os.incTemporal()
f.write(" t" + str(ilabel) + "= createDB(" + "t" + str(ilabel-1) + "" + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
def useC3D(database: str) -> int:
try:
if not database.isidentifier():
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= useDatabase(" + "t" + str(ilabel-1) + "" + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
on()
cadenaCreate = cadenaE.split('TABLE')
cadenaTabla = cadenaCreate[1].split('(',1)
cadenaCampos = cadenaTabla[1].split(',')
ultimoCampo = cadenaCampos[len(cadenaCampos)-1]
ultimoCampo = ultimoCampo.replace(')','')
ultimoCampo = ultimoCampo.replace(';','')
cadenaCampos[len(cadenaCampos)-1] = ultimoCampo
listaTablas = []
cadenaWhere = None
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=[]\n")
for camposel in cadenaCampos:
f.write(" t" + str(ilabel) + ".append('" + camposel.strip() + "')\n")
contadorCol += 1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= createTbl(" + "t" + str(ilabel-3) + ",t" + str(ilabel-2) + ",t" + str(ilabel-1) + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
def insertC3D(database: str, table: str, register: list, posIdentado: str) -> int:
try:
if not database.isidentifier() or not table.isidentifier() :
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(posIdentado + "t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
ilabel = tempos.incTemporal()
for valInsert in register :
if contadorCol==0 :
f.write(posIdentado + "t" + str(ilabel) + "=[]" "\n")
if es_numero(valInsert):
f.write(posIdentado + "t" + str(ilabel) + ".append(" + str(valInsert) + ")\n")
else:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + str(valInsert) + "')\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-5) + ",t" + str(ilabel-4) + ")\n")
f.write(posIdentado + "if t" + str(ilabel) + " is False :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel+1) + " \n")
f.write(posIdentado + "else :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel) + " \n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= insertC3D(" + "t" + str(ilabel-6) + ",t" + str(ilabel-5) + ",t" + str(ilabel-2) + ")\n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
f
f.write(" \n")
f.close()
return 0
except:
return 1
def updateC3D(database: str, table: str, register: dict, columns: list) -> int:
try:
if not database.isidentifier() or not table.isidentifier() :
raise Exception()
ilabel = tempos.incTemporal()
f = open("./c3d/codigo3Dgenerado.py", "a")
f.write(" t" + str(ilabel) + "='" + database + "'\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "='" + table + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
ilabel = tempos.incTemporal()
for valInsert in register :
if contadorCol==0 :
f.write(" t" + str(ilabel) + "={}" "\n")
if es_numero(register[valInsert]):
f.write(" t" + str(ilabel) + "[" + str(valInsert) + "] = " + str(register[valInsert]) + "\n")
else:
f.write(" t" + str(ilabel) + "[" + str(valInsert) + "] = '" + str(register[valInsert]) + "'\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
contadorCol=0
for valInsert in columns :
if contadorCol==0 :
f.write(" t" + str(ilabel) + "=[]" "\n")
if es_numero(valInsert):
f.write(" t" + str(ilabel) + ".append(" + str(valInsert) + ")\n")
else:
f.write(" t" + str(ilabel) + ".append('" + str(valInsert) + "')\n")
contadorCol +=1
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-1) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-7) + ",t" + str(ilabel-6) + ")\n")
f.write(" if t" + str(ilabel) + " is False :\n")
f.write(" goto .labelt" + str(ilabel+1) + " \n")
f.write(" else :\n")
f.write(" goto .labelt" + str(ilabel) + " \n")
f.write(" label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= updateC3D(" + "t" + str(ilabel-8) + ",t" + str(ilabel-7) + ",t" + str(ilabel-4) + ",t" + str(ilabel-2) + ")\n")
f.write(" label .labelt" + str(ilabel) + "\n")
f
f.write(" \n")
f.close()
return 0
except:
return 1
def selectC3D(database: str, table: str, cadenaE: str, posIdentado: str ) -> int:
try:
cadenaSelect = cadenaE.split('SELECT')
listaTablas = []
cadenaWhere = None
cadenaCampos = cadenaSelect[1].split('FROM')
listacampos = cadenaCampos[0].split(',')
if len(cadenaCampos)>1:
cadenaTablas = cadenaCampos[1].split('WHERE')
listaTablas = cadenaTablas[0].split(',')
if len(cadenaTablas)>1:
cadenaWhere = cadenaTablas[1]
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "='" + database + "'\n")
contadorCol = 0
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=[]\n")
for camposel in listacampos:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + camposel.strip() + "')\n")
contadorCol += 1
contadorTablas = 0
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "=[]\n")
for tabsel in listaTablas:
f.write(posIdentado + "t" + str(ilabel) + ".append('" + tabsel.strip() + "')\n")
contadorTablas += 1
ilabel = tempos.incTemporal()
if cadenaWhere == None:
f.write(posIdentado + "t" + str(ilabel) + "=''\n")
else:
fexpar=cadenaWhere.find('STR')
cadenaPar = ''
if fexpar>=0:
cadenaWheres = cadenaWhere.split('STR')
cadenaPar = cadenaWheres[1].replace(')','')
cadenaPar = cadenaWheres[1].replace('(','')
cadenaWhere = cadenaWheres[0].strip()
f.write(posIdentado + "t" + str(ilabel) + "='" + cadenaWhere.strip() + "' + str(" + str(cadenaPar) + "\n")
else:
f.write(posIdentado + "t" + str(ilabel) + "='" + cadenaWhere.strip() + "'\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= existTableC3D(" + "t" + str(ilabel-4) + ",t" + str(ilabel-2) + "[0])\n")
f.write(posIdentado + "if t" + str(ilabel) + " is False :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel+1) + " \n")
f.write(posIdentado + "else :\n")
f.write(posIdentado + " goto .labelt" + str(ilabel) + " \n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
ilabel = tempos.incTemporal()
f.write(posIdentado + "t" + str(ilabel) + "= selectC3D(" + "t" + str(ilabel-5) + ",t" + str(ilabel-4) + ",t" + str(ilabel-3) + ",t" + str(ilabel-2) + ")\n")
f.write(posIdentado + "label .labelt" + str(ilabel) + "\n")
f.write(" \n")
f.close()
return 0
except:
return 1
def createFunctionC3D(database: str, nombre : str, arg1: any, arg2 : any, arg3 : any, arg4 : any, cuerpo : any) -> int:
try:
if not database.isidentifier():
raise Exception()
listaparamf = []
listaparamVars=[]
contadorParam = 0;
ilabel = tempos.incTemporal()
for parametrof in arg2:
paramf = {}
paramf['nombre'] = parametrof.table.upper()
listaparamVars.append(parametrof.table.upper())
parTipoUp = parametrof.column['ast'].type.upper()
parTipo = parTipoUp.find("TEXT")
if parTipo >=0 :
paramf['tipo'] = 'str'
else :
parTipo = parTipoUp.find("VARCHAR")
if parTipo >=0:
paramf['tipo'] = 'str'
else:
parTipo = parTipoUp.find("INTEGER")
if parTipo >=0:
paramf['tipo'] = 'int'
else:
paramf['tipo'] = parTipoUp
paramf['temporal'] = "pt" + str(ilabel)
listaparamf.append(paramf)
cadenaE = arg3
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" @with_goto \n" )
f.write(" def " + nombre + "(" )
contadorParam = 0
for prm in listaparamf:
if contadorParam==0:
f.write(str(prm['nombre']) + ":" + str(prm['tipo']))
else:
f.write("," + str(prm['nombre']) + ":" + str(prm['tipo']))
contadorParam+=1
f.write("):\n")
for operacion in cuerpo.instrucciones:
if hasattr(operacion,'paramReturn'):
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=" + str(operacion.paramArg.column.upper()) + "\n")
f.write(" return t" + str(ilabel) + "\n")
else:
if hasattr(operacion[0].asignaciones,'operador'):
x = 'peracion[0].variable.column'
cadenaS = operacion[0].asignaciones.operador.arg1.upper()
cadenaS = cadenaS.strip()
cadenaConParams = cadenaS
for cadPar in listaparamVars:
cadenaConParamslist = cadenaConParams.split(cadPar)
ipar = 0
while ipar<(len(cadenaConParamslist)-1):
cadenaConParams = cadenaConParamslist[ipar] + " str(" + cadPar + ") " + cadenaConParamslist[ipar+1]
ipar+=1
cadenaConParamslist[ipar] = cadenaConParams
f.write("\n")
f.close()
valRetSel = selectC3D(database, 'tabla', cadenaConParams.upper(), ' ')
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel-1) + "\n")
f.write("\n")
else:
if (hasattr(operacion[0].asignaciones.leftOperator,'column') or hasattr(operacion[0].asignaciones.leftOperator,'val')) and (hasattr(operacion[0].asignaciones.rightOperator,'column') or hasattr(operacion[0].asignaciones.rightOperator,'val')) :
ilabel = tempos.incTemporal()
if 1==1:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.val) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.val) + "\n")
ilabel = tempos.incTemporal()
if 1==1:
if hasattr(operacion[0].asignaciones.rightOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.val) + "\n")
else:
if hasattr(operacion[0].asignaciones.rightOperator,'column'):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.sign + " t" + str(ilabel-1) + "\n")
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator,'column'):
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.column.upper()) + "\n")
else:
if hasattr(operacion[0].asignaciones.leftOperator.leftOperator,'column'):
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.leftOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.leftOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.leftOperator.column.upper()) + "\n")
ilabel = tempos.incTemporal()
if es_numero(operacion[0].asignaciones.leftOperator.rightOperator.column):
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.rightOperator.column.upper()) + "\n")
else:
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.leftOperator.rightOperator.column.upper()) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.leftOperator.sign + " t" + str(ilabel-1) + "\n")
if hasattr(operacion[0].asignaciones.rightOperator,'tipofuncionTrigonometrica'):
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=" + str(operacion[0].asignaciones.rightOperator.arg1.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "=t" + str(ilabel-2) + " " + operacion[0].asignaciones.sign + " t" + str(ilabel-1) + "\n")
f.write(" " + str(operacion[0].variable.column.upper()) + "=t" + str(ilabel) + "\n")
f.write("\n")
f.close()
return 0
except:
return 1
def createProcedureC3D(database: str, nombre : str, arg2 : any, cuerpo : any) -> int:
try:
if not database.isidentifier():
raise Exception()
listaparamf = []
if arg2 == None:
arg2 = []
contadorParam = 0;
ilabel = tempos.incTemporal()
for parametrof in arg2:
paramf = {}
paramf['nombre'] = parametrof.table.upper()
parTipoUp = parametrof.column['ast'].type.upper()
parTipo = parTipoUp.find("TEXT")
if parTipo >=0 :
paramf['tipo'] = 'str'
else :
parTipo = parTipoUp.find("VARCHAR")
if parTipo >=0:
paramf['tipo'] = 'str'
else:
parTipo = parTipoUp.find("INTEGER")
if parTipo >=0:
paramf['tipo'] = 'int'
else:
paramf['tipo'] = parTipoUp
paramf['temporal'] = "pt" + str(ilabel)
listaparamf.append(paramf)
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write(" @with_goto \n" )
f.write(" def " + nombre + "(" )
contadorParam = 0
for prm in listaparamf:
if contadorParam==0:
f.write(str(prm['nombre']) + ":" + str(prm['tipo']))
else:
f.write("," + str(prm['nombre']) + ":" + str(prm['tipo']))
contadorParam+=1
f.write("):\n")
for operacion in cuerpo.instrucciones:
if operacion.arg0.upper() == 'RETURN':
ilabel = tempos.incTemporal()
returnVal = operacion.arg1.upper().split('RETURN')
returnVal = returnVal[1].strip()
returnVal = returnVal.replace(';','')
f.write(" t" + str(ilabel) + "=" + str(returnVal) + "\n")
f.write(" return t" + str(ilabel) + "\n")
else:
if operacion.arg0.upper() == 'INSERT':
x = 'peracion[0].variable.column'
listaCamposIns = []
for lscamp in operacion.values:
if hasattr(lscamp,'tipofuncionfehca'):
listaCamposIns.append(lscamp.tipofuncionfehca.upper()+"()")
else:
listaCamposIns.append(lscamp.val)
f.write("\n")
f.close()
insertC3D(database, operacion.tableid.upper(), listaCamposIns, ' ')
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
f.write("\n")
f.write("\n")
f.close()
return 0
except:
return 1
def select_functionC3D(nombre: str, parametros: any) -> int:
try:
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
listadeParametros = []
contadorParams = 0
if parametros==None:
parametros = []
for parmf in parametros:
ilabel = tempos.incTemporal()
contadorParams += 1
if str(parmf.type) =='string':
f.write(" t" + str(ilabel) + "='" + str(parmf.val) + "'\n")
else:
f.write(" t" + str(ilabel) + "=" + str(parmf.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= " + nombre + "(")
i=0
while i < contadorParams :
if i==0:
f.write("t" + str(ilabel-contadorParams+i) )
else:
f.write(",t" + str(ilabel-contadorParams+i) )
i += 1
f.write(")\n")
f.write(" \n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= print(t" + str(ilabel-1) + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
def select_procedureC3D(nombre: str, parametros: any) -> int:
try:
f = open("./c3d/codigo3Dgenerado.py", "a")
ilabel = tempos.incTemporal()
listadeParametros = []
contadorParams = 0
if parametros==None:
parametros = []
for parmf in parametros:
ilabel = tempos.incTemporal()
contadorParams += 1
if str(parmf.type) =='string':
f.write(" t" + str(ilabel) + "='" + str(parmf.val) + "'\n")
else:
f.write(" t" + str(ilabel) + "=" + str(parmf.val) + "\n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= " + nombre + "(")
i=0
while i < contadorParams :
if i==0:
f.write("t" + str(ilabel-contadorParams+i) )
else:
f.write(",t" + str(ilabel-contadorParams+i) )
i += 1
f.write(")\n")
f.write(" \n")
ilabel = tempos.incTemporal()
f.write(" t" + str(ilabel) + "= print(t" + str(ilabel-1) + ")\n")
f.write(" \n")
f.close()
return 0
except:
return 1
float(variable)
return True
except :
return False
| true | true |
f7f90558156e48d2db7a90ffba5223496a2ef6c0 | 458 | py | Python | www/mobile/pcap2har/har.py | AutomationConsultant/webpagetest | 37aff455ea1b99ba319f6558a676c0e72ba6e1eb | [
"BSD-3-Clause"
] | 1 | 2017-09-24T13:59:56.000Z | 2017-09-24T13:59:56.000Z | www/mobile/pcap2har/har.py | AutomationConsultant/webpagetest | 37aff455ea1b99ba319f6558a676c0e72ba6e1eb | [
"BSD-3-Clause"
] | null | null | null | www/mobile/pcap2har/har.py | AutomationConsultant/webpagetest | 37aff455ea1b99ba319f6558a676c0e72ba6e1eb | [
"BSD-3-Clause"
] | null | null | null | """
functions and classes for generating HAR data from parsed http data
"""
import http
import logging
import simplejson as json
# custom json encoder
class JsonReprEncoder(json.JSONEncoder):
'''
Custom Json Encoder that attempts to call json_repr on every object it
encounters.
'''
def default(self, obj):
if hasattr(obj, 'json_repr'):
return obj.json_repr()
return json.JSONEncoder.default(self, obj) # should call super instead?
| 24.105263 | 75 | 0.733624 | import http
import logging
import simplejson as json
class JsonReprEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'json_repr'):
return obj.json_repr()
return json.JSONEncoder.default(self, obj)
| true | true |
f7f905ac5f3247bfbdabb9536e9345ff41b87012 | 7,057 | py | Python | eval_multipro.py | chenjun2hao/segmentation.pytorch | a319d0f006559dd58bd853065e6fe79ae8c23791 | [
"BSD-3-Clause"
] | 2 | 2021-03-30T12:51:15.000Z | 2021-03-31T02:39:14.000Z | eval_multipro.py | chenjun2hao/segmentation.pytorch | a319d0f006559dd58bd853065e6fe79ae8c23791 | [
"BSD-3-Clause"
] | null | null | null | eval_multipro.py | chenjun2hao/segmentation.pytorch | a319d0f006559dd58bd853065e6fe79ae8c23791 | [
"BSD-3-Clause"
] | null | null | null | # System libs
import os
import argparse
from distutils.version import LooseVersion
from multiprocessing import Queue, Process
# Numerical libs
import numpy as np
import math
import torch
import torch.nn as nn
from scipy.io import loadmat
# Our libs
from mit_semseg.config import cfg
from mit_semseg.dataset import ValDataset
from mit_semseg.models import ModelBuilder, SegmentationModule
from mit_semseg.utils import AverageMeter, colorEncode, accuracy, intersectionAndUnion, parse_devices, setup_logger
from mit_semseg.lib.nn import user_scattered_collate, async_copy_to
from mit_semseg.lib.utils import as_numpy
from PIL import Image
from tqdm import tqdm
colors = loadmat('data/color150.mat')['colors']
def visualize_result(data, pred, dir_result):
(img, seg, info) = data
# segmentation
seg_color = colorEncode(seg, colors)
# prediction
pred_color = colorEncode(pred, colors)
# aggregate images and save
im_vis = np.concatenate((img, seg_color, pred_color),
axis=1).astype(np.uint8)
img_name = info.split('/')[-1]
Image.fromarray(im_vis).save(os.path.join(dir_result, img_name.replace('.jpg', '.png')))
def evaluate(segmentation_module, loader, cfg, gpu_id, result_queue):
segmentation_module.eval()
for batch_data in loader:
# process data
batch_data = batch_data[0]
seg_label = as_numpy(batch_data['seg_label'][0])
img_resized_list = batch_data['img_data']
with torch.no_grad():
segSize = (seg_label.shape[0], seg_label.shape[1])
scores = torch.zeros(1, cfg.DATASET.num_class, segSize[0], segSize[1])
scores = async_copy_to(scores, gpu_id)
for img in img_resized_list:
feed_dict = batch_data.copy()
feed_dict['img_data'] = img
del feed_dict['img_ori']
del feed_dict['info']
feed_dict = async_copy_to(feed_dict, gpu_id)
# forward pass
scores_tmp = segmentation_module(feed_dict, segSize=segSize)
scores = scores + scores_tmp / len(cfg.DATASET.imgSizes)
_, pred = torch.max(scores, dim=1)
pred = as_numpy(pred.squeeze(0).cpu())
# calculate accuracy and SEND THEM TO MASTER
acc, pix = accuracy(pred, seg_label)
intersection, union = intersectionAndUnion(pred, seg_label, cfg.DATASET.num_class)
result_queue.put_nowait((acc, pix, intersection, union))
# visualization
if cfg.VAL.visualize:
visualize_result(
(batch_data['img_ori'], seg_label, batch_data['info']),
pred,
os.path.join(cfg.DIR, 'result')
)
def worker(cfg, gpu_id, start_idx, end_idx, result_queue):
torch.cuda.set_device(gpu_id)
# Dataset and Loader
dataset_val = ValDataset(
cfg.DATASET.root_dataset,
cfg.DATASET.list_val,
cfg.DATASET,
start_idx=start_idx, end_idx=end_idx)
loader_val = torch.utils.data.DataLoader(
dataset_val,
batch_size=cfg.VAL.batch_size,
shuffle=False,
collate_fn=user_scattered_collate,
num_workers=2)
# Network Builders
net_encoder = ModelBuilder.build_encoder(
arch=cfg.MODEL.arch_encoder.lower(),
fc_dim=cfg.MODEL.fc_dim,
weights=cfg.MODEL.weights_encoder)
net_decoder = ModelBuilder.build_decoder(
arch=cfg.MODEL.arch_decoder.lower(),
fc_dim=cfg.MODEL.fc_dim,
num_class=cfg.DATASET.num_class,
weights=cfg.MODEL.weights_decoder,
use_softmax=True)
crit = nn.NLLLoss(ignore_index=-1)
segmentation_module = SegmentationModule(net_encoder, net_decoder, crit)
segmentation_module.cuda()
# Main loop
evaluate(segmentation_module, loader_val, cfg, gpu_id, result_queue)
def main(cfg, gpus):
with open(cfg.DATASET.list_val, 'r') as f:
lines = f.readlines()
num_files = len(lines)
num_files_per_gpu = math.ceil(num_files / len(gpus))
pbar = tqdm(total=num_files)
acc_meter = AverageMeter()
intersection_meter = AverageMeter()
union_meter = AverageMeter()
result_queue = Queue(500)
procs = []
for idx, gpu_id in enumerate(gpus):
start_idx = idx * num_files_per_gpu
end_idx = min(start_idx + num_files_per_gpu, num_files)
proc = Process(target=worker, args=(cfg, gpu_id, start_idx, end_idx, result_queue))
print('gpu:{}, start_idx:{}, end_idx:{}'.format(gpu_id, start_idx, end_idx))
proc.start()
procs.append(proc)
# master fetches results
processed_counter = 0
while processed_counter < num_files:
if result_queue.empty():
continue
(acc, pix, intersection, union) = result_queue.get()
acc_meter.update(acc, pix)
intersection_meter.update(intersection)
union_meter.update(union)
processed_counter += 1
pbar.update(1)
for p in procs:
p.join()
# summary
iou = intersection_meter.sum / (union_meter.sum + 1e-10)
for i, _iou in enumerate(iou):
print('class [{}], IoU: {:.4f}'.format(i, _iou))
print('[Eval Summary]:')
print('Mean IoU: {:.4f}, Accuracy: {:.2f}%'
.format(iou.mean(), acc_meter.average()*100))
print('Evaluation Done!')
if __name__ == '__main__':
assert LooseVersion(torch.__version__) >= LooseVersion('0.4.0'), \
'PyTorch>=0.4.0 is required'
parser = argparse.ArgumentParser(
description="PyTorch Semantic Segmentation Validation"
)
parser.add_argument(
"--cfg",
default="config/ade20k-resnet50dilated-ppm_deepsup.yaml",
metavar="FILE",
help="path to config file",
type=str,
)
parser.add_argument(
"--gpus",
default="0",
help="gpus to use, e.g. 0-3 or 0,1,2,3"
)
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
cfg.merge_from_file(args.cfg)
cfg.merge_from_list(args.opts)
# cfg.freeze()
logger = setup_logger(distributed_rank=0) # TODO
logger.info("Loaded configuration file {}".format(args.cfg))
logger.info("Running with config:\n{}".format(cfg))
# absolute paths of model weights
cfg.MODEL.weights_encoder = os.path.join(
cfg.DIR, 'encoder_' + cfg.VAL.checkpoint)
cfg.MODEL.weights_decoder = os.path.join(
cfg.DIR, 'decoder_' + cfg.VAL.checkpoint)
assert os.path.exists(cfg.MODEL.weights_encoder) and \
os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"
if not os.path.isdir(os.path.join(cfg.DIR, "result")):
os.makedirs(os.path.join(cfg.DIR, "result"))
# Parse gpu ids
gpus = parse_devices(args.gpus)
gpus = [x.replace('gpu', '') for x in gpus]
gpus = [int(x) for x in gpus]
main(cfg, gpus)
| 31.504464 | 115 | 0.646167 |
import os
import argparse
from distutils.version import LooseVersion
from multiprocessing import Queue, Process
import numpy as np
import math
import torch
import torch.nn as nn
from scipy.io import loadmat
from mit_semseg.config import cfg
from mit_semseg.dataset import ValDataset
from mit_semseg.models import ModelBuilder, SegmentationModule
from mit_semseg.utils import AverageMeter, colorEncode, accuracy, intersectionAndUnion, parse_devices, setup_logger
from mit_semseg.lib.nn import user_scattered_collate, async_copy_to
from mit_semseg.lib.utils import as_numpy
from PIL import Image
from tqdm import tqdm
colors = loadmat('data/color150.mat')['colors']
def visualize_result(data, pred, dir_result):
(img, seg, info) = data
seg_color = colorEncode(seg, colors)
pred_color = colorEncode(pred, colors)
im_vis = np.concatenate((img, seg_color, pred_color),
axis=1).astype(np.uint8)
img_name = info.split('/')[-1]
Image.fromarray(im_vis).save(os.path.join(dir_result, img_name.replace('.jpg', '.png')))
def evaluate(segmentation_module, loader, cfg, gpu_id, result_queue):
segmentation_module.eval()
for batch_data in loader:
batch_data = batch_data[0]
seg_label = as_numpy(batch_data['seg_label'][0])
img_resized_list = batch_data['img_data']
with torch.no_grad():
segSize = (seg_label.shape[0], seg_label.shape[1])
scores = torch.zeros(1, cfg.DATASET.num_class, segSize[0], segSize[1])
scores = async_copy_to(scores, gpu_id)
for img in img_resized_list:
feed_dict = batch_data.copy()
feed_dict['img_data'] = img
del feed_dict['img_ori']
del feed_dict['info']
feed_dict = async_copy_to(feed_dict, gpu_id)
scores_tmp = segmentation_module(feed_dict, segSize=segSize)
scores = scores + scores_tmp / len(cfg.DATASET.imgSizes)
_, pred = torch.max(scores, dim=1)
pred = as_numpy(pred.squeeze(0).cpu())
acc, pix = accuracy(pred, seg_label)
intersection, union = intersectionAndUnion(pred, seg_label, cfg.DATASET.num_class)
result_queue.put_nowait((acc, pix, intersection, union))
if cfg.VAL.visualize:
visualize_result(
(batch_data['img_ori'], seg_label, batch_data['info']),
pred,
os.path.join(cfg.DIR, 'result')
)
def worker(cfg, gpu_id, start_idx, end_idx, result_queue):
torch.cuda.set_device(gpu_id)
dataset_val = ValDataset(
cfg.DATASET.root_dataset,
cfg.DATASET.list_val,
cfg.DATASET,
start_idx=start_idx, end_idx=end_idx)
loader_val = torch.utils.data.DataLoader(
dataset_val,
batch_size=cfg.VAL.batch_size,
shuffle=False,
collate_fn=user_scattered_collate,
num_workers=2)
net_encoder = ModelBuilder.build_encoder(
arch=cfg.MODEL.arch_encoder.lower(),
fc_dim=cfg.MODEL.fc_dim,
weights=cfg.MODEL.weights_encoder)
net_decoder = ModelBuilder.build_decoder(
arch=cfg.MODEL.arch_decoder.lower(),
fc_dim=cfg.MODEL.fc_dim,
num_class=cfg.DATASET.num_class,
weights=cfg.MODEL.weights_decoder,
use_softmax=True)
crit = nn.NLLLoss(ignore_index=-1)
segmentation_module = SegmentationModule(net_encoder, net_decoder, crit)
segmentation_module.cuda()
evaluate(segmentation_module, loader_val, cfg, gpu_id, result_queue)
def main(cfg, gpus):
with open(cfg.DATASET.list_val, 'r') as f:
lines = f.readlines()
num_files = len(lines)
num_files_per_gpu = math.ceil(num_files / len(gpus))
pbar = tqdm(total=num_files)
acc_meter = AverageMeter()
intersection_meter = AverageMeter()
union_meter = AverageMeter()
result_queue = Queue(500)
procs = []
for idx, gpu_id in enumerate(gpus):
start_idx = idx * num_files_per_gpu
end_idx = min(start_idx + num_files_per_gpu, num_files)
proc = Process(target=worker, args=(cfg, gpu_id, start_idx, end_idx, result_queue))
print('gpu:{}, start_idx:{}, end_idx:{}'.format(gpu_id, start_idx, end_idx))
proc.start()
procs.append(proc)
processed_counter = 0
while processed_counter < num_files:
if result_queue.empty():
continue
(acc, pix, intersection, union) = result_queue.get()
acc_meter.update(acc, pix)
intersection_meter.update(intersection)
union_meter.update(union)
processed_counter += 1
pbar.update(1)
for p in procs:
p.join()
iou = intersection_meter.sum / (union_meter.sum + 1e-10)
for i, _iou in enumerate(iou):
print('class [{}], IoU: {:.4f}'.format(i, _iou))
print('[Eval Summary]:')
print('Mean IoU: {:.4f}, Accuracy: {:.2f}%'
.format(iou.mean(), acc_meter.average()*100))
print('Evaluation Done!')
if __name__ == '__main__':
assert LooseVersion(torch.__version__) >= LooseVersion('0.4.0'), \
'PyTorch>=0.4.0 is required'
parser = argparse.ArgumentParser(
description="PyTorch Semantic Segmentation Validation"
)
parser.add_argument(
"--cfg",
default="config/ade20k-resnet50dilated-ppm_deepsup.yaml",
metavar="FILE",
help="path to config file",
type=str,
)
parser.add_argument(
"--gpus",
default="0",
help="gpus to use, e.g. 0-3 or 0,1,2,3"
)
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
cfg.merge_from_file(args.cfg)
cfg.merge_from_list(args.opts)
logger = setup_logger(distributed_rank=0)
logger.info("Loaded configuration file {}".format(args.cfg))
logger.info("Running with config:\n{}".format(cfg))
cfg.MODEL.weights_encoder = os.path.join(
cfg.DIR, 'encoder_' + cfg.VAL.checkpoint)
cfg.MODEL.weights_decoder = os.path.join(
cfg.DIR, 'decoder_' + cfg.VAL.checkpoint)
assert os.path.exists(cfg.MODEL.weights_encoder) and \
os.path.exists(cfg.MODEL.weights_decoder), "checkpoint does not exitst!"
if not os.path.isdir(os.path.join(cfg.DIR, "result")):
os.makedirs(os.path.join(cfg.DIR, "result"))
gpus = parse_devices(args.gpus)
gpus = [x.replace('gpu', '') for x in gpus]
gpus = [int(x) for x in gpus]
main(cfg, gpus)
| true | true |
f7f905fdb9be139b0f079498f55a01dfc8360e6c | 3,840 | py | Python | backend/app/init_test_data.py | wu-clan/fastapi_mysql_demo | efa3bdff73aa4d366da5f12dbb58c0221205e39b | [
"MIT"
] | null | null | null | backend/app/init_test_data.py | wu-clan/fastapi_mysql_demo | efa3bdff73aa4d366da5f12dbb58c0221205e39b | [
"MIT"
] | null | null | null | backend/app/init_test_data.py | wu-clan/fastapi_mysql_demo | efa3bdff73aa4d366da5f12dbb58c0221205e39b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import asyncio
from email_validator import EmailNotValidError, validate_email
from faker import Faker
from backend.app.datebase.db_mysql import db_session
from backend.app.models import User
from backend.app.common.log import log
from backend.app.api.jwt_security import get_hash_password
db = db_session()
class InitData:
""" 初始化数据 """
def __init__(self):
self.fake = Faker('zh_CN')
@staticmethod
async def create_superuser_by_yourself():
""" 手动创建管理员账户 """
print('请输入用户名:')
username = input()
print('请输入密码:')
password = input()
print('请输入邮箱:')
while True:
email = input()
try:
success_email = validate_email(email).email
except EmailNotValidError:
print('邮箱不符合规范,请重新输入:')
continue
new_email = success_email
break
user_obj = User(
username=username,
password=get_hash_password(password),
email=new_email,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
print(f'管理员用户创建成功,账号:{username},密码:{password}')
async def fake_user(self):
""" 自动创建普通用户 """
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_superuser=False,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"普通用户创建成功,账号:{username},密码:{password}")
async def fake_no_active_user(self):
""" 自动创建锁定普通用户 """
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_active=False,
is_superuser=False,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"普通锁定用户创建成功,账号:{username},密码:{password}")
async def fake_superuser(self):
""" 自动创建管理员用户 """
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"管理员用户创建成功,账号:{username},密码:{password}")
async def fake_no_active_superuser(self):
""" 自动创建锁定管理员用户 """
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_active=False,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"管理员锁定用户创建成功,账号:{username},密码:{password}")
async def init_data(self):
""" 自动创建数据 """
log.info('----------------开始初始化数据----------------')
await self.create_superuser_by_yourself()
await self.fake_user()
await self.fake_no_active_user()
await self.fake_superuser()
await self.fake_no_active_superuser()
log.info('----------------数据初始化完成----------------')
if __name__ == '__main__':
init = InitData()
loop = asyncio.get_event_loop()
loop.run_until_complete(init.init_data())
| 29.312977 | 62 | 0.569271 |
import asyncio
from email_validator import EmailNotValidError, validate_email
from faker import Faker
from backend.app.datebase.db_mysql import db_session
from backend.app.models import User
from backend.app.common.log import log
from backend.app.api.jwt_security import get_hash_password
db = db_session()
class InitData:
def __init__(self):
self.fake = Faker('zh_CN')
@staticmethod
async def create_superuser_by_yourself():
print('请输入用户名:')
username = input()
print('请输入密码:')
password = input()
print('请输入邮箱:')
while True:
email = input()
try:
success_email = validate_email(email).email
except EmailNotValidError:
print('邮箱不符合规范,请重新输入:')
continue
new_email = success_email
break
user_obj = User(
username=username,
password=get_hash_password(password),
email=new_email,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
print(f'管理员用户创建成功,账号:{username},密码:{password}')
async def fake_user(self):
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_superuser=False,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"普通用户创建成功,账号:{username},密码:{password}")
async def fake_no_active_user(self):
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_active=False,
is_superuser=False,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"普通锁定用户创建成功,账号:{username},密码:{password}")
async def fake_superuser(self):
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"管理员用户创建成功,账号:{username},密码:{password}")
async def fake_no_active_superuser(self):
username = self.fake.user_name()
password = self.fake.password()
email = self.fake.email()
user_obj = User(
username=username,
password=get_hash_password(password),
email=email,
is_active=False,
is_superuser=True,
)
db.add(user_obj)
await db.commit()
await db.refresh(user_obj)
log.info(f"管理员锁定用户创建成功,账号:{username},密码:{password}")
async def init_data(self):
log.info('----------------开始初始化数据----------------')
await self.create_superuser_by_yourself()
await self.fake_user()
await self.fake_no_active_user()
await self.fake_superuser()
await self.fake_no_active_superuser()
log.info('----------------数据初始化完成----------------')
if __name__ == '__main__':
init = InitData()
loop = asyncio.get_event_loop()
loop.run_until_complete(init.init_data())
| true | true |
f7f9072287ea5e9b9965c70a954474c6ae29a058 | 5,004 | py | Python | ginga/cvw/CvHelp.py | chyan26/ginga | e00c887d8660e0a4178f9681ca7ea7784b7ca129 | [
"BSD-3-Clause"
] | 1 | 2019-04-27T01:34:27.000Z | 2019-04-27T01:34:27.000Z | ginga/cvw/CvHelp.py | chyan26/ginga | e00c887d8660e0a4178f9681ca7ea7784b7ca129 | [
"BSD-3-Clause"
] | null | null | null | ginga/cvw/CvHelp.py | chyan26/ginga | e00c887d8660e0a4178f9681ca7ea7784b7ca129 | [
"BSD-3-Clause"
] | null | null | null | #
# CvHelp.py -- help classes for the Cv drawing
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
import numpy as np
import cv2
from ginga import colors
from ginga.fonts import font_asst
def get_cached_font(fontname, fontsize):
key = (fontname, fontsize)
try:
return font_asst.get_cache(key)
except KeyError:
# see if we can build the font
info = font_asst.get_font_info(fontname, subst_ok=True)
font = cv2.freetype.createFreeType2()
font.loadFontData(info.font_path, id=0)
font_asst.add_cache(key, font)
return font
def load_font(font_name, font_file):
if not font_asst.have_font(font_name):
font_asst.add_font(font_file, font_name=font_name)
return font_name
class Pen(object):
def __init__(self, color='black', linewidth=1, alpha=1.0):
self.color = color
self.linewidth = linewidth
self.alpha = alpha
class Brush(object):
def __init__(self, color='black', fill=False, alpha=1.0):
self.color = color
self.fill = fill
self.alpha = alpha
class Font(object):
def __init__(self, fontname='ariel', fontsize=12.0, color='black',
linewidth=1, alpha=1.0):
fontname = font_asst.resolve_alias(fontname, fontname)
self.fontname = fontname
self.fontsize = fontsize
self.color = color
# text is not filled unless linewidth value is negative
self.linewidth = -linewidth
# fonts are scaled by specifying a height--this should be
# related to the fontsize more accurately here
self.scale = int(round(fontsize * 1.5))
self.alpha = alpha
# note: opencv scales the fonts dynamically, so always
# specify a 0 for caching
self.font = get_cached_font(self.fontname, 0)
class CvContext(object):
def __init__(self, canvas):
self.canvas = canvas
def set_canvas(self, canvas):
self.canvas = canvas
def get_color(self, color, alpha=1.0):
if isinstance(color, str) or isinstance(color, type(u"")):
r, g, b = colors.lookup_color(color)
elif isinstance(color, tuple):
# color is assumed to be a 3-tuple of RGB values as floats
# between 0 and 1
r, g, b = color
else:
r, g, b = 1.0, 1.0, 1.0
# According to documentation, OpenCV expects colors as BGRA tuple
# BUT, seems we need to specify RGBA--I suppose we need to match
# what is defined as rgb_order attribute in ImageViewCv class
#return (int(alpha*255), int(b*255), int(g*255), int(r*255))
return (int(r * 255), int(g * 255), int(b * 255), int(alpha * 255))
def get_pen(self, color, linewidth=1, alpha=1.0):
# TODO: support line styles
# if hasattr(self, 'linestyle'):
# if self.linestyle == 'dash':
# cr.set_dash([ 3.0, 4.0, 6.0, 4.0], 5.0)
color = self.get_color(color, alpha=alpha)
return Pen(color=color, linewidth=linewidth, alpha=alpha)
def get_brush(self, color, alpha=1.0):
color = self.get_color(color, alpha=alpha)
return Brush(color=color, fill=True, alpha=alpha)
def get_font(self, name, size, color, linewidth=1, alpha=1.0):
color = self.get_color(color, alpha=alpha)
return Font(fontname=name, fontsize=size, color=color,
linewidth=linewidth, alpha=alpha)
def text_extents(self, text, font):
retval, baseline = font.font.getTextSize(text, font.scale,
font.linewidth)
wd, ht = retval
return wd, ht
def text(self, pt, text, font):
x, y = pt
font.font.putText(self.canvas, text, (x, y), font.scale,
font.color, thickness=font.linewidth,
line_type=cv2.LINE_AA, bottomLeftOrigin=True)
def line(self, pt1, pt2, pen):
x1, y1 = int(round(pt1[0])), int(round(pt1[1]))
x2, y2 = int(round(pt2[0])), int(round(pt2[1]))
cv2.line(self.canvas, (x1, y1), (x2, y2), pen.color, pen.linewidth)
def circle(self, pt, radius, pen, brush):
x, y = pt
radius = int(radius)
if (brush is not None) and brush.fill:
cv2.circle(self.canvas, (x, y), radius, brush.color, -1)
cv2.circle(self.canvas, (x, y), radius, pen.color, pen.linewidth)
def polygon(self, points, pen, brush):
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(self.canvas, [pts], True, pen.color, pen.linewidth)
if (brush is not None) and brush.fill:
cv2.fillPoly(self.canvas, [pts], brush.color)
def path(self, points, pen):
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(self.canvas, [pts], False, pen.color, pen.linewidth)
# END
| 34.040816 | 75 | 0.605715 |
import numpy as np
import cv2
from ginga import colors
from ginga.fonts import font_asst
def get_cached_font(fontname, fontsize):
key = (fontname, fontsize)
try:
return font_asst.get_cache(key)
except KeyError:
info = font_asst.get_font_info(fontname, subst_ok=True)
font = cv2.freetype.createFreeType2()
font.loadFontData(info.font_path, id=0)
font_asst.add_cache(key, font)
return font
def load_font(font_name, font_file):
if not font_asst.have_font(font_name):
font_asst.add_font(font_file, font_name=font_name)
return font_name
class Pen(object):
def __init__(self, color='black', linewidth=1, alpha=1.0):
self.color = color
self.linewidth = linewidth
self.alpha = alpha
class Brush(object):
def __init__(self, color='black', fill=False, alpha=1.0):
self.color = color
self.fill = fill
self.alpha = alpha
class Font(object):
def __init__(self, fontname='ariel', fontsize=12.0, color='black',
linewidth=1, alpha=1.0):
fontname = font_asst.resolve_alias(fontname, fontname)
self.fontname = fontname
self.fontsize = fontsize
self.color = color
self.linewidth = -linewidth
self.scale = int(round(fontsize * 1.5))
self.alpha = alpha
self.font = get_cached_font(self.fontname, 0)
class CvContext(object):
def __init__(self, canvas):
self.canvas = canvas
def set_canvas(self, canvas):
self.canvas = canvas
def get_color(self, color, alpha=1.0):
if isinstance(color, str) or isinstance(color, type(u"")):
r, g, b = colors.lookup_color(color)
elif isinstance(color, tuple):
r, g, b = color
else:
r, g, b = 1.0, 1.0, 1.0
return (int(r * 255), int(g * 255), int(b * 255), int(alpha * 255))
def get_pen(self, color, linewidth=1, alpha=1.0):
color = self.get_color(color, alpha=alpha)
return Pen(color=color, linewidth=linewidth, alpha=alpha)
def get_brush(self, color, alpha=1.0):
color = self.get_color(color, alpha=alpha)
return Brush(color=color, fill=True, alpha=alpha)
def get_font(self, name, size, color, linewidth=1, alpha=1.0):
color = self.get_color(color, alpha=alpha)
return Font(fontname=name, fontsize=size, color=color,
linewidth=linewidth, alpha=alpha)
def text_extents(self, text, font):
retval, baseline = font.font.getTextSize(text, font.scale,
font.linewidth)
wd, ht = retval
return wd, ht
def text(self, pt, text, font):
x, y = pt
font.font.putText(self.canvas, text, (x, y), font.scale,
font.color, thickness=font.linewidth,
line_type=cv2.LINE_AA, bottomLeftOrigin=True)
def line(self, pt1, pt2, pen):
x1, y1 = int(round(pt1[0])), int(round(pt1[1]))
x2, y2 = int(round(pt2[0])), int(round(pt2[1]))
cv2.line(self.canvas, (x1, y1), (x2, y2), pen.color, pen.linewidth)
def circle(self, pt, radius, pen, brush):
x, y = pt
radius = int(radius)
if (brush is not None) and brush.fill:
cv2.circle(self.canvas, (x, y), radius, brush.color, -1)
cv2.circle(self.canvas, (x, y), radius, pen.color, pen.linewidth)
def polygon(self, points, pen, brush):
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(self.canvas, [pts], True, pen.color, pen.linewidth)
if (brush is not None) and brush.fill:
cv2.fillPoly(self.canvas, [pts], brush.color)
def path(self, points, pen):
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(self.canvas, [pts], False, pen.color, pen.linewidth)
| true | true |
f7f9078107c1939db791d56780a432ef994073ea | 3,572 | py | Python | tensorflow/python/autograph/operators/__init__.py | fwtan/tensorflow | efa3fb28d94b7937edaafb5874c191ad0e2149ca | [
"Apache-2.0"
] | 1 | 2020-05-14T03:53:01.000Z | 2020-05-14T03:53:01.000Z | tensorflow/python/autograph/operators/__init__.py | fwtan/tensorflow | efa3fb28d94b7937edaafb5874c191ad0e2149ca | [
"Apache-2.0"
] | 2 | 2021-08-25T16:05:52.000Z | 2022-02-10T01:51:12.000Z | tensorflow/python/autograph/operators/__init__.py | taotesea/tensorflow | 5e6479904941624cf7ce58ab3d236375c8012ef4 | [
"Apache-2.0"
] | 1 | 2020-08-07T12:49:50.000Z | 2020-08-07T12:49:50.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This module implements operators that AutoGraph overloads.
Note that "operator" is used loosely here, and includes control structures like
conditionals and loops, implemented in functional form, using for example
closures for the body.
"""
# Naming conventions:
# * operator names match the name usually used for the respective Python
# idiom; examples: for_stmt, list_append
# * operator arguments match either of:
# - the corresponding Python AST attribute (e.g. the condition of an if
# statement is called test) if the operator represents an AST construct
# - the names used in the Python docs, if the operator is a function (e.g.
# list_ and x for append, see
# https://docs.python.org/3.7/tutorial/datastructures.html)
#
# All operators may accept a final argument named "opts", of a type that
# subclasses namedtuple and contains any arguments that are only required
# for some specializations of the operator.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.operators.control_flow import for_stmt
from tensorflow.python.autograph.operators.control_flow import if_stmt
from tensorflow.python.autograph.operators.control_flow import while_stmt
from tensorflow.python.autograph.operators.data_structures import list_append
from tensorflow.python.autograph.operators.data_structures import list_pop
from tensorflow.python.autograph.operators.data_structures import list_stack
from tensorflow.python.autograph.operators.data_structures import ListPopOpts
from tensorflow.python.autograph.operators.data_structures import ListStackOpts
from tensorflow.python.autograph.operators.data_structures import new_list
from tensorflow.python.autograph.operators.exceptions import assert_stmt
from tensorflow.python.autograph.operators.logical import and_
from tensorflow.python.autograph.operators.logical import eq
from tensorflow.python.autograph.operators.logical import not_
from tensorflow.python.autograph.operators.logical import not_eq
from tensorflow.python.autograph.operators.logical import or_
from tensorflow.python.autograph.operators.py_builtins import float_
from tensorflow.python.autograph.operators.py_builtins import int_
from tensorflow.python.autograph.operators.py_builtins import len_
from tensorflow.python.autograph.operators.py_builtins import print_
from tensorflow.python.autograph.operators.py_builtins import range_
from tensorflow.python.autograph.operators.slices import get_item
from tensorflow.python.autograph.operators.slices import GetItemOpts
from tensorflow.python.autograph.operators.slices import set_item
from tensorflow.python.autograph.operators.variables import ld
from tensorflow.python.autograph.operators.variables import Undefined
from tensorflow.python.autograph.operators.variables import UndefinedReturnValue
| 54.121212 | 80 | 0.81047 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.operators.control_flow import for_stmt
from tensorflow.python.autograph.operators.control_flow import if_stmt
from tensorflow.python.autograph.operators.control_flow import while_stmt
from tensorflow.python.autograph.operators.data_structures import list_append
from tensorflow.python.autograph.operators.data_structures import list_pop
from tensorflow.python.autograph.operators.data_structures import list_stack
from tensorflow.python.autograph.operators.data_structures import ListPopOpts
from tensorflow.python.autograph.operators.data_structures import ListStackOpts
from tensorflow.python.autograph.operators.data_structures import new_list
from tensorflow.python.autograph.operators.exceptions import assert_stmt
from tensorflow.python.autograph.operators.logical import and_
from tensorflow.python.autograph.operators.logical import eq
from tensorflow.python.autograph.operators.logical import not_
from tensorflow.python.autograph.operators.logical import not_eq
from tensorflow.python.autograph.operators.logical import or_
from tensorflow.python.autograph.operators.py_builtins import float_
from tensorflow.python.autograph.operators.py_builtins import int_
from tensorflow.python.autograph.operators.py_builtins import len_
from tensorflow.python.autograph.operators.py_builtins import print_
from tensorflow.python.autograph.operators.py_builtins import range_
from tensorflow.python.autograph.operators.slices import get_item
from tensorflow.python.autograph.operators.slices import GetItemOpts
from tensorflow.python.autograph.operators.slices import set_item
from tensorflow.python.autograph.operators.variables import ld
from tensorflow.python.autograph.operators.variables import Undefined
from tensorflow.python.autograph.operators.variables import UndefinedReturnValue
| true | true |
f7f909d58b8a77d036f474d253f03615abba9827 | 121,740 | py | Python | src/olympia/addons/tests/test_models.py | gponimansky/addons-server | 7013e50e0687b48bbd88c89fb8af867c1eca4843 | [
"BSD-3-Clause"
] | null | null | null | src/olympia/addons/tests/test_models.py | gponimansky/addons-server | 7013e50e0687b48bbd88c89fb8af867c1eca4843 | [
"BSD-3-Clause"
] | null | null | null | src/olympia/addons/tests/test_models.py | gponimansky/addons-server | 7013e50e0687b48bbd88c89fb8af867c1eca4843 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import os
import time
from datetime import datetime, timedelta
from waffle.testutils import override_switch
from django import forms
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.db import IntegrityError
from django.utils import translation
import pytest
from mock import Mock, patch
from olympia import amo, core
from olympia.activity.models import ActivityLog, AddonLog
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonCategory, AddonDependency,
AddonFeatureCompatibility, AddonReviewerFlags, AddonUser, AppSupport,
Category, CompatOverride, CompatOverrideRange, DeniedGuid, DeniedSlug,
FrozenAddon, IncompatibleVersions, Persona, Preview,
track_addon_status_change)
from olympia.amo.templatetags.jinja_helpers import absolutify, user_media_url
from olympia.amo.tests import (
TestCase, addon_factory, collection_factory, version_factory)
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import Collection, FeaturedCollection
from olympia.constants.categories import CATEGORIES
from olympia.devhub.models import RssKey
from olympia.files.models import File
from olympia.files.tests.test_models import UploadTest
from olympia.ratings.models import Rating, RatingFlag
from olympia.translations.models import (
Translation, TranslationSequence, delete_translation)
from olympia.users.models import UserProfile
from olympia.versions.compare import version_int
from olympia.versions.models import ApplicationsVersions, Version
class TestCleanSlug(TestCase):
def test_clean_slug_new_object(self):
# Make sure there's at least an addon with the "addon" slug, subsequent
# ones should be "addon-1", "addon-2" ...
a = Addon.objects.create(name='Addon')
assert a.slug == 'addon'
# Start with a first clash. This should give us 'addon-1".
# We're not saving yet, we're testing the slug creation without an id.
b = Addon(name='Addon')
b.clean_slug()
assert b.slug == 'addon1'
# Now save the instance to the database for future clashes.
b.save()
# Test on another object without an id.
c = Addon(name='Addon')
c.clean_slug()
assert c.slug == 'addon2'
# Even if an addon is deleted, don't clash with its slug.
c.status = amo.STATUS_DELETED
# Now save the instance to the database for future clashes.
c.save()
# And yet another object without an id. Make sure we're not trying to
# assign the 'addon-2' slug from the deleted addon.
d = Addon(name='Addon')
d.clean_slug()
assert d.slug == 'addon3'
def test_clean_slug_no_name(self):
# Create an addon and save it to have an id.
a = Addon.objects.create()
# Start over: don't use the name nor the id to generate the slug.
a.slug = a.name = ''
a.clean_slug()
# Slugs that are generated from add-ons without an name use
# uuid without the node bit so have the length 20.
assert len(a.slug) == 20
def test_clean_slug_with_name(self):
# Make sure there's at least an addon with the 'fooname' slug,
# subsequent ones should be 'fooname-1', 'fooname-2' ...
a = Addon.objects.create(name='fooname')
assert a.slug == 'fooname'
b = Addon(name='fooname')
b.clean_slug()
assert b.slug == 'fooname1'
def test_clean_slug_with_slug(self):
# Make sure there's at least an addon with the 'fooslug' slug,
# subsequent ones should be 'fooslug-1', 'fooslug-2' ...
a = Addon.objects.create(name='fooslug')
assert a.slug == 'fooslug'
b = Addon(name='fooslug')
b.clean_slug()
assert b.slug == 'fooslug1'
def test_clean_slug_denied_slug(self):
denied_slug = 'foodenied'
DeniedSlug.objects.create(name=denied_slug)
a = Addon(slug=denied_slug)
a.clean_slug()
# Blacklisted slugs (like 'activate" or IDs) have a "~" appended to
# avoid clashing with URLs.
assert a.slug == '%s~' % denied_slug
# Now save the instance to the database for future clashes.
a.save()
b = Addon(slug=denied_slug)
b.clean_slug()
assert b.slug == '%s~1' % denied_slug
def test_clean_slug_denied_slug_long_slug(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
DeniedSlug.objects.create(name=long_slug[:30])
# If there's no clashing slug, just append a '~'.
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == '%s~' % long_slug[:29]
# If there's a clash, use the standard clash resolution.
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == '%s1' % long_slug[:28]
def test_clean_slug_long_slug(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
# If there's no clashing slug, don't over-shorten it.
a = Addon.objects.create(slug=long_slug)
assert a.slug == long_slug[:30]
# Now that there is a clash, test the clash resolution.
b = Addon(slug=long_slug)
b.clean_slug()
assert b.slug == '%s1' % long_slug[:28]
def test_clean_slug_always_slugify(self):
illegal_chars = 'some spaces and !?@'
# Slugify if there's a slug provided.
a = Addon(slug=illegal_chars)
a.clean_slug()
assert a.slug.startswith('some-spaces-and'), a.slug
# Also slugify if there's no slug provided.
b = Addon(name=illegal_chars)
b.clean_slug()
assert b.slug.startswith('some-spaces-and'), b.slug
def test_clean_slug_worst_case_scenario(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
# Generate 100 addons with this very long slug. We should encounter the
# worst case scenario where all the available clashes have been
# avoided. Check the comment in addons.models.clean_slug, in the 'else'
# part of the 'for" loop checking for available slugs not yet assigned.
for i in range(100):
Addon.objects.create(slug=long_slug)
with self.assertRaises(RuntimeError): # Fail on the 100th clash.
Addon.objects.create(slug=long_slug)
def test_clean_slug_ends_with_dash(self):
"""Addon name ending with a dash should still work: See bug 1206063."""
a = Addon.objects.create(name='ends with dash -')
assert a.slug == 'ends-with-dash-'
assert a.slug == amo.utils.slugify(a.slug)
b = Addon.objects.create(name='ends with dash -')
assert b.slug == 'ends-with-dash-1'
assert b.slug == amo.utils.slugify(b.slug)
def test_clean_slug_unicode(self):
addon = Addon.objects.create(name=u'Addön 1')
assert addon.slug == u'addön-1'
class TestAddonManager(TestCase):
fixtures = ['base/appversion', 'base/users',
'base/addon_3615', 'addons/featured', 'addons/test_manager',
'base/collections', 'base/featured',
'bandwagon/featured_collections', 'base/addon_5299_gcal']
def setUp(self):
super(TestAddonManager, self).setUp()
core.set_user(None)
self.addon = Addon.objects.get(pk=3615)
def test_managers_public(self):
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted(self):
self.make_addon_unlisted(self.addon)
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted_deleted(self):
self.make_addon_unlisted(self.addon)
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_featured(self):
assert Addon.objects.featured(amo.FIREFOX).count() == 3
def test_listed(self):
# We need this for the fixtures, but it messes up the tests.
self.addon.update(disabled_by_user=True)
# Now continue as normal.
Addon.objects.filter(id=5299).update(disabled_by_user=True)
q = Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC)
assert len(q.all()) == 4
# Pick one of the listed addons.
addon = Addon.objects.get(pk=2464)
assert addon in q.all()
# Disabling hides it.
addon.disabled_by_user = True
addon.save()
# Should be 3 now, since the one is now disabled.
assert q.count() == 3
# If we search for public or unreviewed we find it.
addon.disabled_by_user = False
addon.status = amo.STATUS_NOMINATED
addon.save()
assert q.count() == 3
assert Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED).count() == 4
# Can't find it without a file.
addon.versions.get().files.get().delete()
assert q.count() == 3
def test_public(self):
for a in Addon.objects.public():
assert a.status == amo.STATUS_PUBLIC
def test_valid(self):
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
objs = Addon.objects.valid()
for addon in objs:
assert addon.status in amo.VALID_ADDON_STATUSES
assert not addon.disabled_by_user
def test_valid_disabled_by_user(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_valid_disabled_by_admin(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DISABLED)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_invalid_deleted(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DELETED)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before - 1)
def test_valid_disabled_pending(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
amo.tests.addon_factory(status=amo.STATUS_PENDING)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_valid_disabled_version(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
# Add-on, no version. Doesn't count.
addon = amo.tests.addon_factory()
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
# Theme, no version. Counts.
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_new_featured(self):
f = Addon.objects.featured(amo.FIREFOX)
assert f.count() == 3
assert sorted(x.id for x in f) == (
[2464, 7661, 15679])
f = Addon.objects.featured(amo.THUNDERBIRD)
assert not f.exists()
def test_filter_for_many_to_many(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
collection = self.addon.collections.first()
assert collection.addons.get() == self.addon
# Addon shouldn't be listed in collection.addons if it's deleted.
# Unlisted.
self.make_addon_unlisted(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.get() == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
# Only deleted.
self.make_addon_listed(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
def test_no_filter_for_relations(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
version = self.addon.versions.first()
assert version.addon == self.addon
# Deleted or unlisted, version.addon should still work.
# Unlisted.
self.make_addon_unlisted(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Only deleted.
self.make_addon_listed(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
class TestAddonModels(TestCase):
fixtures = ['base/appversion',
'base/collections',
'base/featured',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple.json',
'base/addon_4594_a9',
'base/addon_4664_twitterbar',
'base/thunderbird',
'addons/featured',
'addons/invalid_latest_version',
'addons/denied',
'bandwagon/featured_collections']
def setUp(self):
super(TestAddonModels, self).setUp()
TranslationSequence.objects.create(id=99243)
self.old_version = amo.FIREFOX.latest_version
amo.FIREFOX.latest_version = '3.6.15'
def tearDown(self):
amo.FIREFOX.latest_version = self.old_version
super(TestAddonModels, self).tearDown()
def test_current_version(self):
"""
Tests that we get the current (latest public) version of an addon.
"""
a = Addon.objects.get(pk=3615)
assert a.current_version.id == 81551
def test_current_version_listed(self):
a = Addon.objects.get(pk=3723)
assert a.current_version.id == 89774
def test_current_version_listed_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
a = Addon.objects.get(pk=3723)
assert a.current_version is None
def test_latest_unlisted_version(self):
addon = Addon.objects.get(pk=3615)
an_unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
an_unlisted_version.update(created=self.days_ago(2))
a_newer_unlisted_version = version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
a_newer_unlisted_version.update(created=self.days_ago(1))
version_factory(
addon=addon, version='5.0', channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_DISABLED})
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure the property is cached.
an_even_newer_unlisted_version = version_factory(
addon=addon, version='6.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure it can be deleted to reset it.
del addon.latest_unlisted_version
assert addon.latest_unlisted_version == an_even_newer_unlisted_version
# Make sure it's writeable.
addon.latest_unlisted_version = an_unlisted_version
assert addon.latest_unlisted_version == an_unlisted_version
def test_find_latest_version(self):
"""
Tests that we get the latest version of an addon.
"""
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
assert addon.find_latest_version(None) == new_version
another_new_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.find_latest_version(None) == another_new_version
def test_find_latest_version_different_channel(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_LISTED) ==
new_version)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_UNLISTED) ==
unlisted_version)
def test_find_latest_version_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
addon = Addon.objects.get(pk=3723)
assert addon.find_latest_version(None) is None
def test_find_latest_version_ignore_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0beta',
file_kw={'status': amo.STATUS_BETA})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_ignore_disabled(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_only_exclude_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v2 since we don't exclude disabled, but do exclude beta.
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v2.id
@override_switch('beta-versions', active=True)
def test_find_latest_version_dont_exclude_anything_with_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(None, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v3 since we don't exclude anything.
assert addon.find_latest_version(None, exclude=()).id == v3.id
def test_find_latest_version_dont_exclude_anything(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(None, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
# Should be v2 since we don't exclude anything.
assert addon.find_latest_version(None, exclude=()).id == v2.id
@override_switch('beta-versions', active=True)
def test_find_latest_version_dont_exclude_anything_w_channel_w_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(3))
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(2))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
v2.update(created=self.days_ago(1))
version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
# Should be v3 since we don't exclude anything, but do have a channel
# set to listed, and version 4.0 is unlisted.
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v3.id
def test_find_latest_version_dont_exclude_anything_with_channel(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(3))
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
# Should be v2 since we don't exclude anything, but do have a channel
# set to listed, and version 4.0 is unlisted.
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v2.id
def test_current_version_unsaved(self):
addon = Addon()
addon._current_version = Version()
assert addon.current_version is None
def test_find_latest_version_unsaved(self):
addon = Addon()
assert addon.find_latest_version(None) is None
@override_switch('beta-versions', active=True)
def test_current_beta_version_with_beta(self):
addon = Addon.objects.get(pk=5299)
assert addon.current_beta_version.id == 50000
def test_current_beta_version(self):
addon = Addon.objects.get(pk=5299)
assert addon.current_beta_version is None
def test_transformer(self):
addon = Addon.objects.get(pk=3615)
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
assert addon.current_version
def _delete(self, addon_id):
"""Test deleting add-ons."""
core.set_user(UserProfile.objects.last())
addon_count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=addon_id)
guid = addon.guid
addon.delete('bye')
assert addon_count == Addon.unfiltered.count() # Soft deletion.
assert addon.status == amo.STATUS_DELETED
assert addon.slug is None
assert addon.current_version is None
assert addon.guid == guid # We don't clear it anymore.
deleted_count = Addon.unfiltered.filter(
status=amo.STATUS_DELETED).count()
assert len(mail.outbox) == deleted_count
log = AddonLog.objects.order_by('-id').first().activity_log
assert log.action == amo.LOG.DELETE_ADDON.id
assert log.to_string() == (
'Addon id {0} with GUID {1} has been deleted'.format(
addon_id, guid))
def test_delete(self):
addon = Addon.unfiltered.get(pk=3615)
addon.name = u'é' # Make sure we don't have encoding issues.
addon.save()
self._delete(3615)
# Delete another add-on, and make sure we don't have integrity errors
# with unique constraints on fields that got nullified.
self._delete(5299)
def test_delete_persona(self):
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
assert addon.guid is None # Personas don't have GUIDs.
self._delete(addon.pk)
def _delete_url(self):
"""Test deleting addon has URL in the email."""
a = Addon.objects.get(pk=4594)
url = a.get_url_path()
a.delete('bye')
assert absolutify(url) in mail.outbox[0].body
def test_delete_url(self):
count = Addon.unfiltered.count()
self._delete_url()
assert count == Addon.unfiltered.count()
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
reason = u'trêason'
a = Addon.objects.get(pk=3615)
a.name = u'é'
assert len(mail.outbox) == 0
a.delete(msg='bye', reason=reason)
assert len(mail.outbox) == 1
assert reason in mail.outbox[0].body
def test_delete_incomplete_no_versions(self):
"""Test deleting incomplete add-ons."""
count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=3615)
addon.current_version.delete(hard=True)
# The addon status will have been changed when we deleted the version,
# and the instance should be the same, so we shouldn't need to reload.
assert addon.status == amo.STATUS_NULL
addon.delete(None)
assert len(mail.outbox) == 0
assert Addon.unfiltered.count() == (count - 1)
def test_delete_incomplete_with_versions(self):
"""Test deleting incomplete add-ons."""
count = Addon.unfiltered.count()
a = Addon.objects.get(pk=3615)
a.status = 0
a.save()
a.delete('oh looky here')
assert len(mail.outbox) == 1
assert count == Addon.unfiltered.count()
def test_delete_searchengine(self):
"""
Test deleting searchengines (which have no guids) should not barf up
the deletion machine.
"""
a = Addon.objects.get(pk=4594)
a.delete('bye')
assert len(mail.outbox) == 1
def test_delete_disabled_addon_is_added_to_deniedguids(self):
addon = Addon.unfiltered.get(pk=3615)
addon.update(status=amo.STATUS_DISABLED)
self._delete(3615)
assert DeniedGuid.objects.filter(guid=addon.guid).exists()
def test_delete_disabled_addon_when_guid_is_already_in_deniedguids(self):
addon = Addon.unfiltered.get(pk=3615)
DeniedGuid.objects.create(guid=addon.guid)
addon.update(status=amo.STATUS_DISABLED)
self._delete(3615)
assert DeniedGuid.objects.filter(guid=addon.guid).exists()
def test_incompatible_latest_apps(self):
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.get(pk=97) # Firefox 2.0
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == [amo.FIREFOX]
# Check a search engine addon.
a = Addon.objects.get(pk=4594)
assert a.incompatible_latest_apps() == []
def test_incompatible_asterix(self):
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.create(application=amo.FIREFOX.id,
version_int=version_int('5.*'),
version='5.*')
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
def test_icon_url(self):
"""
Tests for various icons.
1. Test for an icon that is set.
2. Test for an icon that is set, with an icon hash
3. Test for default THEME icon.
4. Test for default non-THEME icon.
"""
addon = Addon.objects.get(pk=3615)
assert addon.icon_url.endswith('/3/3615-32.png?modified=1275037317')
addon.icon_hash = 'somehash'
assert addon.icon_url.endswith('/3/3615-32.png?modified=somehash')
addon = Addon.objects.get(pk=6704)
addon.icon_type = None
assert addon.icon_url.endswith('/icons/default-theme.png'), (
'No match for %s' % addon.icon_url)
addon = Addon.objects.get(pk=3615)
addon.icon_type = None
assert addon.icon_url.endswith('icons/default-32.png')
def test_icon_url_default(self):
a = Addon.objects.get(pk=3615)
a.update(icon_type='')
default = 'icons/default-32.png'
assert a.icon_url.endswith(default)
assert a.get_icon_url(32).endswith(default)
assert a.get_icon_url(32, use_default=True).endswith(default)
assert a.get_icon_url(32, use_default=False) is None
def test_thumbnail_url(self):
"""
Test for the actual thumbnail URL if it should exist, or the no-preview
url.
"""
a = Addon.objects.get(pk=4664)
a.thumbnail_url.index('/previews/thumbs/20/20397.png?modified=')
a = Addon.objects.get(pk=5299)
assert a.thumbnail_url.endswith('/icons/no-preview.png'), (
'No match for %s' % a.thumbnail_url)
def test_is_unreviewed(self):
"""Test if add-on is unreviewed or not"""
# public add-on
a = Addon.objects.get(pk=3615)
assert not a.is_unreviewed(), 'public add-on: is_unreviewed=False'
a.status = amo.STATUS_NOMINATED
assert a.is_unreviewed(), 'pending add-on: is_unreviewed=True'
def test_is_public(self):
# Public add-on.
addon = Addon.objects.get(pk=3615)
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_public()
# Should be public by status, but since it's disabled add-on it's not.
addon.disabled_by_user = True
assert not addon.is_public()
def test_is_restart_required(self):
addon = Addon.objects.get(pk=3615)
file_ = addon.current_version.all_files[0]
assert not file_.is_restart_required
assert not addon.is_restart_required
file_.update(is_restart_required=True)
assert Addon.objects.get(pk=3615).is_restart_required
addon.versions.all().delete()
addon._current_version = None
assert not addon.is_restart_required
def test_is_featured(self):
"""Test if an add-on is globally featured"""
a = Addon.objects.get(pk=1003)
assert a.is_featured(amo.FIREFOX, 'en-US'), (
'globally featured add-on not recognized')
def test_get_featured_by_app(self):
addon = Addon.objects.get(pk=1003)
featured_coll = addon.collections.get().featuredcollection_set.get()
assert featured_coll.locale is None
# Get the applications this addon is featured for.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {None}}
featured_coll.update(locale='fr')
# Check the locale works.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {'fr'}}
pt_coll = collection_factory()
pt_coll.add_addon(addon)
FeaturedCollection.objects.create(collection=pt_coll,
application=amo.FIREFOX.id,
locale='pt-PT')
# Add another featured collection for the same application.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {'fr', 'pt-PT'}}
mobile_coll = collection_factory()
mobile_coll.add_addon(addon)
FeaturedCollection.objects.create(collection=mobile_coll,
application=amo.ANDROID.id,
locale='pt-PT')
# Add a featured collection for the a different application.
assert addon.get_featured_by_app() == {
amo.FIREFOX.id: {'fr', 'pt-PT'},
amo.ANDROID.id: {'pt-PT'}}
def newlines_helper(self, string_before):
addon = Addon.objects.get(pk=3615)
addon.privacy_policy = string_before
addon.save()
return addon.privacy_policy.localized_string_clean
def test_newlines_normal(self):
before = ("Paragraph one.\n"
"This should be on the very next line.\n\n"
"Should be two nl's before this line.\n\n\n"
"Should be three nl's before this line.\n\n\n\n"
"Should be four nl's before this line.")
after = before # Nothing special; this shouldn't change.
assert self.newlines_helper(before) == after
def test_newlines_ul(self):
before = ("<ul>\n\n"
"<li>No nl's between the ul and the li.</li>\n\n"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>\n\n"
"</ul>")
after = ("<ul>"
"<li>No nl's between the ul and the li.</li>"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>"
"</ul>")
assert self.newlines_helper(before) == after
def test_newlines_ul_tight(self):
before = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be no nl's above this line.")
after = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>"
"There should be no nl's above this line.")
assert self.newlines_helper(before) == after
def test_newlines_ul_loose(self):
before = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n\n"
"There should be one nl above this line.")
after = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be one nl above this line.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_tight(self):
before = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>\n"
"There should be no nl's above this.")
after = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>"
"There should be no nl's above this.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_loose(self):
before = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n\n"
"There should be one nl above this.")
after = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n"
"There should be one nl above this.")
assert self.newlines_helper(before) == after
def test_newlines_inline(self):
before = ("If we end a paragraph w/ a <b>non-block-level tag</b>\n\n"
"<b>The newlines</b> should be kept")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_code_inline(self):
before = ("Code tags aren't blocks.\n\n"
"<code>alert(test);</code>\n\n"
"See?")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_li_newlines(self):
before = ("<ul><li>\nxx</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\n</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\nxx</li></ul>")
after = ("<ul><li>xx\nxx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li></li></ul>")
after = ("<ul><li></li></ul>")
assert self.newlines_helper(before) == after
# All together now
before = ("<ul><li>\nxx</li> <li>xx\n</li> <li>xx\nxx</li> "
"<li></li>\n</ul>")
after = ("<ul><li>xx</li> <li>xx</li> <li>xx\nxx</li> "
"<li></li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_empty_tag(self):
before = ("This is a <b></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_nested(self):
before = ("This is a <b><i></i></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_block_nested(self):
b = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>\ntest.")
a = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(b) == a
def test_newlines_empty_tag_block_nested_spaced(self):
before = ("Test.\n\n<blockquote>\n\n<ul>\n\n<li>"
"</li>\n\n</ul>\n\n</blockquote>\ntest.")
after = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(before) == after
def test_newlines_li_newlines_inline(self):
before = ("<ul><li>\n<b>test\ntest\n\ntest</b>\n</li>"
"<li>Test <b>test</b> test.</li></ul>")
after = ("<ul><li><b>test\ntest\n\ntest</b></li>"
"<li>Test <b>test</b> test.</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_li_all_inline(self):
before = ("Test with <b>no newlines</b> and <code>block level "
"stuff</code> to see what happens.")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_spaced_blocks(self):
before = ("<blockquote>\n\n<ul>\n\n<li>\n\ntest\n\n</li>\n\n"
"</ul>\n\n</blockquote>")
after = "<blockquote><ul><li>test</li></ul></blockquote>"
assert self.newlines_helper(before) == after
def test_newlines_spaced_inline(self):
before = "Line.\n\n<b>\nThis line is bold.\n</b>\n\nThis isn't."
after = before
assert self.newlines_helper(before) == after
def test_newlines_nested_inline(self):
before = "<b>\nThis line is bold.\n\n<i>This is also italic</i></b>"
after = before
assert self.newlines_helper(before) == after
def test_newlines_xss_script(self):
before = "<script>\n\nalert('test');\n</script>"
after = "<script>\n\nalert('test');\n</script>"
assert self.newlines_helper(before) == after
def test_newlines_xss_inline(self):
before = "<b onclick=\"alert('test');\">test</b>"
after = "<b>test</b>"
assert self.newlines_helper(before) == after
@patch(
'olympia.amo.templatetags.jinja_helpers.urlresolvers.get_outgoing_url')
def test_newlines_attribute_link_doublequote(self, mock_get_outgoing_url):
mock_get_outgoing_url.return_value = 'http://google.com'
before = '<a href="http://google.com">test</a>'
parsed = self.newlines_helper(before)
assert 'rel="nofollow"' in parsed
def test_newlines_attribute_singlequote(self):
before = "<abbr title='laugh out loud'>lol</abbr>"
after = '<abbr title="laugh out loud">lol</abbr>'
assert self.newlines_helper(before) == after
def test_newlines_attribute_doublequote(self):
before = '<abbr title="laugh out loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_doublesingle(self):
before = '<abbr title="laugh \'out\' loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_singledouble(self):
before = '<abbr title=\'laugh "out" loud\'>lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b(self):
before = ("<b>test")
after = ("<b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b_wrapped(self):
before = ("This is a <b>test")
after = ("This is a <b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_li(self):
before = ("<ul><li>test</ul>")
after = ("<ul><li>test</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag(self):
before = "<madonna"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_correct_faketag(self):
before = "<madonna>"
after = "<madonna>"
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag(self):
before = "<strong"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag_surrounded(self):
before = "This is a <test of bleach"
after = 'This is a'
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag_surrounded(self):
before = "This is a <strong of bleach"
after = "This is a"
assert self.newlines_helper(before) == after
def test_newlines_less_than(self):
before = "3 < 5"
after = "3 < 5"
assert self.newlines_helper(before) == after
def test_newlines_less_than_tight(self):
before = "abc 3<5 def"
after = "abc 3<5 def"
assert self.newlines_helper(before) == after
def test_app_categories(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Let's add a thunderbird category.
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['tags'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# Test that the thunderbird category was added correctly.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_app_categories_ignore_unknown_cats(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Associate this add-on with a couple more categories, including
# one that does not exist in the constants.
unknown_cat = Category.objects.create(
application=amo.SUNBIRD.id, id=123456, type=amo.ADDON_EXTENSION,
name='Sunny D')
AddonCategory.objects.create(addon=addon, category=unknown_cat)
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['appearance'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# The sunbird category should not be present since it does not match
# an existing static category, thunderbird one should have been added.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_review_replies(self):
"""
Make sure that developer replies are not returned as if they were
original reviews.
"""
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=999)
version = addon.current_version
new_rating = Rating(version=version, user=u, rating=2, body='hello',
addon=addon)
new_rating.save()
new_reply = Rating(version=version, user=addon.authors.all()[0],
addon=addon, reply_to=new_rating,
rating=2, body='my reply')
new_reply.save()
review_list = [rating.pk for rating in addon.ratings]
assert new_rating.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_show_beta(self):
# Addon.current_beta_version will be empty, so show_beta is False.
a = Addon(status=amo.STATUS_PUBLIC)
assert not a.show_beta
@patch('olympia.addons.models.Addon.current_beta_version')
def test_show_beta_with_beta_version(self, beta_mock):
beta_mock.return_value = object()
# Fake current_beta_version to return something truthy.
a = Addon(status=amo.STATUS_PUBLIC)
assert a.show_beta
# We have a beta version but status has to be public.
a.status = amo.STATUS_NOMINATED
assert not a.show_beta
def test_update_logs(self):
addon = Addon.objects.get(id=3615)
core.set_user(UserProfile.objects.all()[0])
addon.versions.all().delete()
entries = ActivityLog.objects.all()
assert entries[0].action == amo.LOG.CHANGE_STATUS.id
def setup_files(self, status):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
File.objects.create(status=status, version=version)
return addon, version
def test_no_change_disabled_user(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
addon.update(disabled_by_user=True)
version.save()
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_disabled
def test_no_change_disabled(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DISABLED)
version.save()
assert addon.status == amo.STATUS_DISABLED
assert addon.is_disabled
def test_no_change_deleted(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DELETED)
version.save()
assert addon.status == amo.STATUS_DELETED
assert addon.is_deleted
def test_removing_public(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
version.save()
assert addon.status == amo.STATUS_NOMINATED
def test_can_request_review_no_files(self):
addon = Addon.objects.get(pk=3615)
addon.versions.all()[0].files.all().delete()
assert addon.can_request_review() is False
def test_can_request_review_rejected(self):
addon = Addon.objects.get(pk=3615)
latest_version = addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.update(status=amo.STATUS_DISABLED)
assert addon.can_request_review() is False
def check_can_request_review(self, status, expected, extra_update_kw=None):
if extra_update_kw is None:
extra_update_kw = {}
addon = Addon.objects.get(pk=3615)
changes = {'status': status, 'disabled_by_user': False}
changes.update(**extra_update_kw)
addon.update(**changes)
assert addon.can_request_review() == expected
def test_can_request_review_null(self):
self.check_can_request_review(amo.STATUS_NULL, True)
def test_can_request_review_null_disabled(self):
self.check_can_request_review(
amo.STATUS_NULL, False, extra_update_kw={'disabled_by_user': True})
def test_can_request_review_nominated(self):
self.check_can_request_review(amo.STATUS_NOMINATED, False)
def test_can_request_review_public(self):
self.check_can_request_review(amo.STATUS_PUBLIC, False)
def test_can_request_review_disabled(self):
self.check_can_request_review(amo.STATUS_DISABLED, False)
def test_can_request_review_deleted(self):
self.check_can_request_review(amo.STATUS_DELETED, False)
def test_none_homepage(self):
# There was an odd error when a translation was set to None.
Addon.objects.create(homepage=None, type=amo.ADDON_EXTENSION)
def test_slug_isdigit(self):
a = Addon.objects.create(type=1, name='xx', slug='123')
assert a.slug == '123~'
a.slug = '44'
a.save()
assert a.slug == '44~'
def test_slug_isdenied(self):
# When an addon is uploaded, it doesn't use the form validation,
# so we'll just mangle the slug if its denied.
a = Addon.objects.create(type=1, name='xx', slug='validate')
assert a.slug == 'validate~'
a.slug = 'validate'
a.save()
assert a.slug == 'validate~'
def delete(self):
addon = Addon.objects.get(id=3615)
assert len(mail.outbox) == 0
addon.delete('so long and thanks for all the fish')
assert len(mail.outbox) == 1
def test_delete_to(self):
self.delete()
assert mail.outbox[0].to == [settings.FLIGTAR]
def test_delete_by(self):
try:
user = Addon.objects.get(id=3615).authors.all()[0]
core.set_user(user)
self.delete()
assert 'DELETED BY: 55021' in mail.outbox[0].body
finally:
core.set_user(None)
def test_delete_by_unknown(self):
self.delete()
assert 'DELETED BY: Unknown' in mail.outbox[0].body
def test_delete_mail_not_localized(self):
"""Don't localize the email sent to the admins using the user's
locale."""
with self.activate('pl'):
self.delete()
admin_mail = mail.outbox[0]
# Make sure the type (EXTENSION) isn't localized.
assert 'Deleting EXTENSION a3615 (3615)' in admin_mail.subject
assert 'The following EXTENSION was deleted' in admin_mail.body
def test_view_source(self):
# view_source should default to True.
a = Addon.objects.create(type=1)
assert a.view_source
@patch('olympia.files.models.File.hide_disabled_file')
def test_admin_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.status = amo.STATUS_PUBLIC
a.save()
assert not hide_mock.called
a.status = amo.STATUS_DISABLED
a.save()
assert hide_mock.called
@patch('olympia.files.models.File.hide_disabled_file')
def test_user_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.disabled_by_user = False
a.save()
assert not hide_mock.called
a.disabled_by_user = True
a.save()
assert hide_mock.called
def test_category_transform(self):
addon = Addon.objects.get(id=3615)
cats = addon.categories.filter(application=amo.FIREFOX.id)
names = [c.name for c in cats]
assert addon.get_category(amo.FIREFOX.id).name in names
def test_binary_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary=True)
assert addon.binary
def test_binary_components_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary_components=True)
assert addon.binary_components
def test_listed_has_complete_metadata_no_categories(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
AddonCategory.objects.filter(addon=addon).delete()
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(has_listed_versions=False)
def test_listed_has_complete_metadata_no_summary(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_listed_has_complete_metadata_no_license(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
addon.current_version.update(license=None)
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_unlisted_has_complete_metadata(self):
addon = Addon.objects.get(id=3615)
self.make_addon_unlisted(addon)
assert addon.has_complete_metadata() # Confirm complete already.
# Clear everything
addon.versions.update(license=None)
AddonCategory.objects.filter(addon=addon).delete()
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Still complete
assert not addon.has_complete_metadata(has_listed_versions=True)
def test_can_review(self):
user = AnonymousUser()
addon = Addon.objects.get(id=3615)
assert addon.can_review(user)
user = addon.addonuser_set.all()[0].user
assert not addon.can_review(user)
user = UserProfile.objects.get(pk=2519)
assert addon.can_review(user)
def test_has_author(self):
addon = Addon.objects.get(id=3615)
user = addon.addonuser_set.all()[0].user
assert addon.has_author(user)
user = UserProfile.objects.get(pk=2519)
assert not addon.has_author(user)
def test_auto_approval_disabled_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.auto_approval_disabled is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.auto_approval_disabled is False
assert addon.auto_approval_disabled is False
# Flag present, value is True: True.
flags.update(auto_approval_disabled=True)
assert addon.auto_approval_disabled is True
def test_needs_admin_code_review_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.needs_admin_code_review is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.needs_admin_code_review is False
assert addon.needs_admin_code_review is False
# Flag present, value is True: True.
flags.update(needs_admin_code_review=True)
assert addon.needs_admin_code_review is True
def test_needs_admin_content_review_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.needs_admin_content_review is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.needs_admin_content_review is False
assert addon.needs_admin_content_review is False
# Flag present, value is True: True.
flags.update(needs_admin_content_review=True)
assert addon.needs_admin_content_review is True
def test_pending_info_request_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.pending_info_request is None
# Flag present, value is None (default): None.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.pending_info_request is None
assert addon.pending_info_request is None
# Flag present, value is a date.
in_the_past = self.days_ago(1)
flags.update(pending_info_request=in_the_past)
assert addon.pending_info_request == in_the_past
def test_expired_info_request_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.expired_info_request is None
# Flag present, value is None (default): None.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.pending_info_request is None
assert addon.expired_info_request is None
# Flag present, value is a date in the past.
in_the_past = self.days_ago(1)
flags.update(pending_info_request=in_the_past)
assert addon.expired_info_request
# Flag present, value is a date in the future.
in_the_future = datetime.now() + timedelta(days=2)
flags.update(pending_info_request=in_the_future)
assert not addon.expired_info_request
class TestShouldRedirectToSubmitFlow(TestCase):
fixtures = ['base/addon_3615']
def test_no_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
ver.delete()
assert not addon.should_redirect_to_submit_flow()
def test_disabled_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
for file_ in ver.all_files:
file_.update(status=amo.STATUS_DISABLED)
assert not addon.should_redirect_to_submit_flow()
def test_only_null_redirects(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
status_exc_null = dict(amo.STATUS_CHOICES_ADDON)
status_exc_null.pop(amo.STATUS_NULL)
for status in status_exc_null:
assert not addon.should_redirect_to_submit_flow()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
class TestHasListedAndUnlistedVersions(TestCase):
def setUp(self):
self.addon = addon_factory()
latest_version = self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
latest_version.delete(hard=True)
assert self.addon.versions.count() == 0
def test_no_versions(self):
assert not self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_listed_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_unlisted_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert not self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
def test_unlisted_and_listed_versions(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
class TestAddonNomination(TestCase):
fixtures = ['base/addon_3615']
def test_set_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
a.versions.latest().update(nomination=None)
a.update(status=amo.STATUS_NOMINATED)
assert a.versions.latest().nomination
def test_new_version_inherits_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
a.update(status=amo.STATUS_NOMINATED)
old_ver = a.versions.latest()
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination == old_ver.nomination
ver += 1
@override_switch('beta-versions', active=True)
def test_beta_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
v = Version.objects.create(addon=a, version='1.0')
v.nomination = None
v.save()
a.update(status=amo.STATUS_NOMINATED)
File.objects.create(version=v, status=amo.STATUS_BETA,
filename='foobar.xpi')
v.version = '1.1'
v.save()
assert v.nomination is None
def test_lone_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
Version.objects.all().delete()
v = Version.objects.create(addon=a, version='1.0')
assert v.nomination is None
def test_reviewed_addon_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
for st in (amo.STATUS_PUBLIC, amo.STATUS_BETA, amo.STATUS_NULL):
a.update(status=st)
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination is None
ver += 1
def test_nomination_no_version(self):
# Check that the on_change method still works if there are no versions.
a = Addon.objects.get(id=3615)
a.versions.all().delete()
a.update(status=amo.STATUS_NOMINATED)
def test_nomination_already_set(self):
addon = Addon.objects.get(id=3615)
earlier = datetime.today() - timedelta(days=2)
addon.versions.latest().update(nomination=earlier)
addon.update(status=amo.STATUS_NOMINATED)
assert addon.versions.latest().nomination.date() == earlier.date()
def setup_nomination(self, addon_status=amo.STATUS_NOMINATED,
file_status=amo.STATUS_AWAITING_REVIEW):
addon = Addon.objects.create()
version = Version.objects.create(addon=addon)
File.objects.create(status=file_status, version=version)
# Cheating date to make sure we don't have a date on the same second
# the code we test is running.
past = self.days_ago(1)
version.update(nomination=past, created=past, modified=past)
addon.update(status=addon_status)
nomination = addon.versions.latest().nomination
assert nomination
return addon, nomination
def test_new_version_of_under_review_addon_does_not_reset_nomination(self):
addon, nomination = self.setup_nomination()
version = Version.objects.create(addon=addon, version='0.2')
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def test_nomination_not_reset_if_adding_new_versions_and_files(self):
"""
When under review, adding new versions and files should not
reset nomination.
"""
addon, nomination = self.setup_nomination()
# Switching it to a public status.
version = Version.objects.create(addon=addon, version="0.1")
File.objects.create(status=amo.STATUS_PUBLIC, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.2")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.3")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def check_nomination_reset_with_new_version(self, addon, nomination):
version = Version.objects.create(addon=addon, version="0.2")
assert version.nomination is None
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination != nomination
def test_new_version_of_approved_addon_should_reset_nomination(self):
addon, nomination = self.setup_nomination(
addon_status=amo.STATUS_PUBLIC, file_status=amo.STATUS_PUBLIC)
# Now create a new version with an attached file, and update status.
self.check_nomination_reset_with_new_version(addon, nomination)
class TestThemeDelete(TestCase):
def setUp(self):
super(TestThemeDelete, self).setUp()
self.addon = addon_factory(type=amo.ADDON_PERSONA)
# Taking the creation and modified time back 1 day
self.addon.update(created=self.days_ago(1), modified=self.days_ago(1))
def test_remove_theme_update_m_time(self):
m_time_before = self.addon.modified
self.addon.delete('enough', 'no reason at all')
m_time_after = self.addon.modified
assert m_time_before != m_time_after
class TestAddonDelete(TestCase):
def test_cascades(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonCategory.objects.create(
addon=addon,
category=Category.objects.create(type=amo.ADDON_EXTENSION))
AddonDependency.objects.create(
addon=addon, dependent_addon=addon)
AddonUser.objects.create(
addon=addon, user=UserProfile.objects.create())
AppSupport.objects.create(addon=addon, app=1)
CompatOverride.objects.create(addon=addon)
FrozenAddon.objects.create(addon=addon)
Persona.objects.create(addon=addon, persona_id=0)
Preview.objects.create(addon=addon)
AddonLog.objects.create(
addon=addon, activity_log=ActivityLog.objects.create(action=0))
RssKey.objects.create(addon=addon)
# This should not throw any FK errors if all the cascades work.
addon.delete()
# Make sure it was actually a hard delete.
assert not Addon.unfiltered.filter(pk=addon.pk).exists()
def test_review_delete(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_PUBLIC)
rating = Rating.objects.create(addon=addon, rating=1, body='foo',
user=UserProfile.objects.create())
flag = RatingFlag(rating=rating)
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
assert not Rating.objects.filter(pk=rating.pk).exists()
assert not RatingFlag.objects.filter(pk=flag.pk).exists()
def test_delete_with_deleted_versions(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon, version="1.0")
version.delete()
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
class TestAddonFeatureCompatibility(TestCase):
fixtures = ['base/addon_3615']
def test_feature_compatibility_not_present(self):
addon = Addon.objects.get(pk=3615)
assert addon.feature_compatibility
assert not addon.feature_compatibility.pk
def test_feature_compatibility_present(self):
addon = Addon.objects.get(pk=3615)
AddonFeatureCompatibility.objects.create(addon=addon)
assert addon.feature_compatibility
assert addon.feature_compatibility.pk
class TestUpdateStatus(TestCase):
def test_no_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
Version.objects.create(addon=addon)
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_no_valid_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon)
f = File.objects.create(status=amo.STATUS_AWAITING_REVIEW,
version=version)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
f.status = amo.STATUS_DISABLED
f.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_unlisted_versions_ignored(self):
addon = addon_factory(status=amo.STATUS_PUBLIC)
addon.update_status()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_PUBLIC)
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# update_status will have been called via versions.models.update_status
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL) # No listed versions so now NULL
class TestGetVersion(TestCase):
fixtures = ['base/addon_3615', ]
def setUp(self):
super(TestGetVersion, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
def test_public_new_public_version(self):
new_version = version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_PUBLIC})
assert self.addon.find_latest_public_listed_version() == new_version
def test_public_new_unreviewed_version(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_AWAITING_REVIEW})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_promote_previous_valid_version_if_latest_is_disabled(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_DISABLED})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_be_listed(self):
new_version = version_factory(
addon=self.addon,
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
assert new_version != self.version
# Since the new version is unlisted, find_latest_public_listed_version
# should still find the current one.
assert self.addon.find_latest_public_listed_version() == self.version
class TestAddonGetURLPath(TestCase):
def test_get_url_path(self):
addon = addon_factory(slug='woo')
assert addon.get_url_path() == '/en-US/firefox/addon/woo/'
def test_get_url_path_more(self):
addon = addon_factory(slug='yeah')
assert addon.get_url_path(more=True) == (
'/en-US/firefox/addon/yeah/more')
def test_unlisted_addon_get_url_path(self):
addon = addon_factory(
slug='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED})
assert addon.get_url_path() == ''
class TestAddonModelsFeatured(TestCase):
fixtures = ['base/appversion', 'base/users',
'addons/featured', 'bandwagon/featured_collections',
'base/addon_3615', 'base/collections', 'base/featured']
def setUp(self):
super(TestAddonModelsFeatured, self).setUp()
# Addon._featured keeps an in-process cache we need to clear.
if hasattr(Addon, '_featured'):
del Addon._featured
def _test_featured_random(self):
f = Addon.featured_random(amo.FIREFOX, 'en-US')
assert sorted(f) == [1001, 1003, 2464, 3481, 7661, 15679]
f = Addon.featured_random(amo.FIREFOX, 'fr')
assert sorted(f) == [1001, 1003, 2464, 7661, 15679]
f = Addon.featured_random(amo.THUNDERBIRD, 'en-US')
assert f == []
def test_featured_random(self):
self._test_featured_random()
class TestBackupVersion(TestCase):
fixtures = ['addons/update', 'base/appversion']
def setUp(self):
super(TestBackupVersion, self).setUp()
self.version_1_2_0 = 105387
self.addon = Addon.objects.get(pk=1865)
core.set_user(None)
def setup_new_version(self):
for version in Version.objects.filter(pk__gte=self.version_1_2_0):
appversion = version.apps.all()[0]
appversion.min = AppVersion.objects.get(version='4.0b1')
appversion.save()
def test_no_current_version(self):
for v in Version.objects.all():
v.delete()
self.addon.update(_current_version=None)
assert self.addon.current_version is None
def test_current_version_listed_only(self):
version = self.addon.current_version
version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# The call above should have triggerred update_version().
assert self.addon.current_version != version
# new current_version should be version 1.2.1, since 1.2.2 is unlisted.
assert self.addon.current_version == Version.objects.get(pk=112396)
def test_firefox_versions(self):
self.setup_new_version()
self.addon.update_version()
current = self.addon.current_version.compatible_apps[amo.FIREFOX]
assert current.max.version == '4.0b8pre'
assert current.min.version == '3.0.12'
def test_version_signals(self):
self.addon.update(_current_version=None)
self.setup_new_version()
version = self.addon.versions.all()[0]
assert not self.addon.current_version
version.save()
assert Addon.objects.get(pk=1865).current_version
def test_update_version_theme(self):
# Test versions do not get deleted when calling with theme.
self.addon.update(type=amo.ADDON_PERSONA)
assert not self.addon.update_version()
assert self.addon._current_version
# Test latest version copied to current version if no current version.
self.addon.update(_current_version=None, _signal=False)
assert self.addon.update_version()
assert self.addon._current_version == (
self.addon.find_latest_version(None))
class TestCategoryModel(TestCase):
def test_category_url(self):
"""Every type must have a url path for its categories."""
for t in amo.ADDON_TYPE.keys():
if t == amo.ADDON_DICT:
continue # Language packs don't have categories.
cat = Category(type=t, slug='omg')
assert cat.get_url_path()
@pytest.mark.needs_locales_compilation
def test_name_from_constants(self):
category = Category(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='alerts-updates')
assert category.name == u'Alerts & Updates'
with translation.override('fr'):
assert category.name == u'Alertes et mises à jour'
def test_name_fallback_to_db(self):
category = Category.objects.create(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='this-cat-does-not-exist', db_name=u'ALAAAAAAARM')
assert category.name == u'ALAAAAAAARM'
with translation.override('fr'):
assert category.name == u'ALAAAAAAARM'
class TestPersonaModel(TestCase):
fixtures = ['addons/persona']
def setUp(self):
super(TestPersonaModel, self).setUp()
self.addon = Addon.objects.get(id=15663)
self.persona = self.addon.persona
self.persona.header = 'header.png'
self.persona.footer = 'footer.png'
self.persona.popularity = 12345
self.persona.save()
def _expected_url(self, img_name, modified_suffix):
return '/15663/%s?modified=%s' % (img_name, modified_suffix)
def test_image_urls(self):
self.persona.persona_id = 0
self.persona.checksum = 'fakehash'
self.persona.save()
modified = 'fakehash'
assert self.persona.thumb_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.icon_url.endswith(
self._expected_url('icon.png', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_image_urls_no_checksum(self):
# AMO-uploaded themes have `persona_id=0`.
self.persona.persona_id = 0
self.persona.save()
modified = int(time.mktime(self.persona.addon.modified.timetuple()))
assert self.persona.thumb_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.icon_url.endswith(
self._expected_url('icon.png', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_old_image_urls(self):
self.persona.addon.modified = None
modified = 0
assert self.persona.thumb_url.endswith(
self._expected_url('preview.jpg', modified))
assert self.persona.icon_url.endswith(
self._expected_url('preview_small.jpg', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview_large.jpg', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_update_url(self):
with self.settings(LANGUAGE_CODE='fr', LANGUAGE_URL_MAP={}):
url_ = self.persona.update_url
assert url_.endswith('/fr/themes/update-check/15663')
def test_json_data(self):
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview_large.jpg?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/preview_small.jpg?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_json_data_new_persona(self):
self.persona.persona_id = 0 # Make this a "new" theme.
self.persona.save()
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview.png?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/icon.png?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_image_urls_without_footer(self):
self.persona.footer = ''
self.persona.save()
assert self.persona.footer_url == ''
def test_json_data_without_footer(self):
self.persona.footer = ''
self.persona.save()
data = self.persona.theme_data
assert data['footerURL'] == ''
assert data['footer'] == ''
def test_theme_data_with_null_description(self):
addon = addon_factory(type=amo.ADDON_PERSONA, description=None)
assert addon.persona.theme_data['description'] is None
class TestPreviewModel(TestCase):
fixtures = ['base/previews']
def test_filename(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_path
assert 'png' in preview.image_path
def test_filename_in_url(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_url
assert 'png' in preview.image_url
def check_delete(self, preview, filename):
"""
Test that when the Preview object is deleted, its image and thumb
are deleted from the filesystem.
"""
try:
with storage.open(filename, 'w') as f:
f.write('sample data\n')
assert storage.exists(filename)
preview.delete()
assert not storage.exists(filename)
finally:
if storage.exists(filename):
storage.delete(filename)
def test_delete_image(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.image_path)
def test_delete_thumbnail(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.thumbnail_path)
class TestAddonDependencies(TestCase):
fixtures = ['base/appversion',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple',
'base/addon_4664_twitterbar']
def test_dependencies(self):
ids = [3615, 3723, 4664, 6704]
addon = Addon.objects.get(id=5299)
dependencies = Addon.objects.in_bulk(ids)
for dependency in dependencies.values():
AddonDependency(addon=addon, dependent_addon=dependency).save()
# Make sure all dependencies were saved correctly.
assert sorted([a.id for a in addon.dependencies.all()]) == sorted(ids)
# Add-on 3723 is disabled and won't show up in `all_dependencies`
# property.
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Adding another dependency won't change anything because we're already
# at the maximum (3).
new_dep = amo.tests.addon_factory()
AddonDependency.objects.create(addon=addon, dependent_addon=new_dep)
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Removing the first dependency will allow the one we just created to
# be visible.
dependencies[3615].delete()
assert addon.all_dependencies == [
dependencies[4664], dependencies[6704], new_dep]
def test_unique_dependencies(self):
a = Addon.objects.get(id=5299)
b = Addon.objects.get(id=3615)
AddonDependency.objects.create(addon=a, dependent_addon=b)
assert list(a.dependencies.values_list('id', flat=True)) == [3615]
with self.assertRaises(IntegrityError):
AddonDependency.objects.create(addon=a, dependent_addon=b)
class TestListedAddonTwoVersions(TestCase):
fixtures = ['addons/listed-two-versions']
def test_listed_two_versions(self):
Addon.objects.get(id=2795) # bug 563967
class TestAddonFromUpload(UploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestAddonFromUpload, self).setUp()
u = UserProfile.objects.get(pk=999)
core.set_user(u)
self.platform = amo.PLATFORM_MAC.id
for version in ('3.0', '3.6.*'):
AppVersion.objects.create(application=1, version=version)
self.addCleanup(translation.deactivate)
def manifest(self, basename):
return os.path.join(
settings.ROOT, 'src', 'olympia', 'devhub', 'tests', 'addons',
basename)
def test_denied_guid(self):
"""Add-ons that have been disabled by Mozilla are added toDeniedGuid
in order to prevent resubmission after deletion """
DeniedGuid.objects.create(guid='guid@xpi')
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid).
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid_same_author(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
# Claim the add-on.
AddonUser(addon=deleted, user=UserProfile.objects.get(pk=999)).save()
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid), checking no
# validationError is raised this time.
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.reload()
assert addon.guid == 'guid@xpi'
assert deleted.guid == 'guid-reused-by-pk-%s' % addon.pk
def test_old_soft_deleted_addons_and_upload_non_extension(self):
"""We used to just null out GUIDs on soft deleted addons. This test
makes sure we don't fail badly when uploading an add-on which isn't an
extension (has no GUID).
See https://github.com/mozilla/addons-server/issues/1659."""
# Upload a couple of addons so we can pretend they were soft deleted.
deleted1 = Addon.from_upload(
self.get_upload('extension.xpi'), [self.platform])
deleted2 = Addon.from_upload(
self.get_upload('alt-rdf.xpi'), [self.platform])
AddonUser(addon=deleted1, user=UserProfile.objects.get(pk=999)).save()
AddonUser(addon=deleted2, user=UserProfile.objects.get(pk=999)).save()
# Soft delete them like they were before, by nullifying their GUIDs.
deleted1.update(status=amo.STATUS_PUBLIC, guid=None)
deleted2.update(status=amo.STATUS_PUBLIC, guid=None)
# Now upload a new add-on which isn't an extension, and has no GUID.
# This fails if we try to reclaim the GUID from deleted add-ons: the
# GUID is None, so it'll try to get the add-on that has a GUID which is
# None, but many are returned. So make sure we're not trying to reclaim
# the GUID.
Addon.from_upload(
self.get_upload('search.xml'), [self.platform])
def test_xpi_attributes(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert addon.name == 'xpi name'
assert addon.guid == 'guid@xpi'
assert addon.type == amo.ADDON_EXTENSION
assert addon.status == amo.STATUS_NULL
assert addon.homepage == 'http://homepage.com'
assert addon.summary == 'xpi description'
assert addon.description is None
assert addon.slug == 'xpi-name'
def test_xpi_version(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
v = addon.versions.get()
assert v.version == '0.1'
assert v.files.get().platform == self.platform
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_xpi_for_multiple_platforms(self):
platforms = [amo.PLATFORM_LINUX.id, amo.PLATFORM_MAC.id]
addon = Addon.from_upload(self.get_upload('extension.xpi'),
platforms)
v = addon.versions.get()
assert sorted([f.platform for f in v.all_files]) == (
sorted(platforms))
def test_search_attributes(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.name == 'search tool'
assert addon.guid is None
assert addon.type == amo.ADDON_SEARCH
assert addon.status == amo.STATUS_NULL
assert addon.homepage is None
assert addon.description is None
assert addon.slug == 'search-tool'
assert addon.summary == 'Search Engine for Firefox'
def test_search_version(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
v = addon.versions.get()
assert v.version == datetime.now().strftime('%Y%m%d')
assert v.files.get().platform == amo.PLATFORM_ALL.id
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_no_homepage(self):
addon = Addon.from_upload(self.get_upload('extension-no-homepage.xpi'),
[self.platform])
assert addon.homepage is None
def test_default_locale(self):
# Make sure default_locale follows the active translation.
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'en-US'
translation.activate('es')
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'es'
def test_validation_completes(self):
upload = self.get_upload('extension.xpi')
assert not upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert not addon.needs_admin_code_review
def test_validation_timeout(self):
upload = self.get_upload('extension.xpi')
validation = json.loads(upload.validation)
timeout_message = {
'id': ['validator', 'unexpected_exception', 'validation_timeout'],
}
validation['messages'] = [timeout_message] + validation['messages']
upload.validation = json.dumps(validation)
assert upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert addon.needs_admin_code_review
def test_webextension_generate_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert addon.guid is not None
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
# Uploading the same addon without a id works.
new_addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert new_addon.guid is not None
assert new_addon.guid != addon.guid
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
def test_webextension_reuse_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid == '@webextension-guid'
# Uploading the same addon with pre-existing id fails
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('webextension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_basic_extension_is_marked_as_e10s_unknown(self):
# extension.xpi does not have multiprocessCompatible set to true, so
# it's marked as not-compatible.
addon = Addon.from_upload(
self.get_upload('extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_UNKNOWN
def test_extension_is_marked_as_e10s_incompatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_incompatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_INCOMPATIBLE
def test_multiprocess_extension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_compatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE
def test_webextension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE_WEBEXTENSION
def test_webextension_resolve_translations(self):
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
assert addon.name == 'Notify link clicks i18n'
assert addon.summary == (
'Shows a notification when the user clicks on links.')
# Make sure we set the correct slug
assert addon.slug == 'notify-link-clicks-i18n'
translation.activate('de')
addon.reload()
assert addon.name == 'Meine Beispielerweiterung'
assert addon.summary == u'Benachrichtigt den Benutzer über Linkklicks'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_corrects_locale(self, parse_addon):
"""Make sure we correct invalid `default_locale` values"""
parse_addon.return_value = {
'default_locale': u'sv',
'e10s_compatibility': 2,
'guid': u'notify-link-clicks-i18n@notzilla.org',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `sv` to `sv-SE`
assert addon.default_locale == 'sv-SE'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_unknown_locale(self, parse_addon):
"""Make sure we use our default language as default
for invalid locales
"""
parse_addon.return_value = {
'default_locale': u'xxx',
'e10s_compatibility': 2,
'guid': u'notify-link-clicks-i18n@notzilla.org',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
REDIRECT_URL = 'https://outgoing.prod.mozaws.net/v1/'
class TestFrozenAddons(TestCase):
def test_immediate_freeze(self):
# Adding a FrozenAddon should immediately drop the addon's hotness.
a = Addon.objects.create(type=1, hotness=22)
FrozenAddon.objects.create(addon=a)
assert Addon.objects.get(id=a.id).hotness == 0
class TestRemoveLocale(TestCase):
def test_remove(self):
a = Addon.objects.create(type=1)
a.name = {'en-US': 'woo', 'el': 'yeah'}
a.description = {'en-US': 'woo', 'el': 'yeah', 'he': 'ola'}
a.save()
a.remove_locale('el')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
assert sorted(qs.filter(id=a.name_id)) == ['en-US']
assert sorted(qs.filter(id=a.description_id)) == ['en-US', 'he']
def test_remove_version_locale(self):
addon = Addon.objects.create(type=amo.ADDON_THEME)
version = Version.objects.create(addon=addon)
version.releasenotes = {'fr': 'oui'}
version.save()
addon.remove_locale('fr')
assert not (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
class TestAddonWatchDisabled(TestCase):
def setUp(self):
super(TestAddonWatchDisabled, self).setUp()
self.addon = Addon(type=amo.ADDON_THEME, disabled_by_user=False,
status=amo.STATUS_PUBLIC)
self.addon.save()
@patch('olympia.addons.models.File.objects.filter')
def test_no_disabled_change(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.save()
assert not mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(disabled_by_user=True)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_admin_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_enable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
mock.reset_mock()
self.addon.update(status=amo.STATUS_PUBLIC)
assert mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
class TestTrackAddonStatusChange(TestCase):
def create_addon(self, **kwargs):
return addon_factory(kwargs.pop('status', amo.STATUS_NULL), **kwargs)
def test_increment_new_status(self):
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon = Addon()
addon.save()
mock_.assert_called_with(addon)
def test_increment_updated_status(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(status=amo.STATUS_PUBLIC)
addon.reload()
mock_.call_args[0][0].status == addon.status
def test_ignore_non_status_changes(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(type=amo.ADDON_THEME)
assert not mock_.called, (
'Unexpected call: {}'.format(self.mock_incr.call_args)
)
def test_increment_all_addon_statuses(self):
addon = self.create_addon(status=amo.STATUS_PUBLIC)
with patch('olympia.addons.models.statsd.incr') as mock_incr:
track_addon_status_change(addon)
mock_incr.assert_any_call(
'addon_status_change.all.status_{}'.format(amo.STATUS_PUBLIC)
)
class TestSearchSignals(amo.tests.ESTestCase):
def setUp(self):
super(TestSearchSignals, self).setUp()
self.addCleanup(self.cleanup)
def cleanup(self):
self.empty_index('default')
def test_no_addons(self):
assert Addon.search_public().count() == 0
def test_create(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo')[0].id == addon.id
def test_update(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.name = 'yeah'
addon.save()
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo').count() == 0
assert Addon.search_public().query(name='yeah')[0].id == addon.id
def test_user_disable(self):
"""Test that add-ons are removed from search results after being
disabled by their developers."""
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.update(disabled_by_user=True)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_unlisted(self):
"""Test that add-ons are removed from search results after being
switched to unlisted."""
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_listed(self):
"""Test that add-ons created as unlisted do not appear in search
results until switched to listed."""
addon = addon_factory(
name='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED},
status=amo.STATUS_NULL)
self.refresh()
assert Addon.search_public().count() == 0
latest_version = addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
latest_version.update(channel=amo.RELEASE_CHANNEL_LISTED)
addon.update(status=amo.STATUS_PUBLIC)
self.refresh()
assert Addon.search_public().count() == 1
def test_delete(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.delete('woo')
self.refresh()
assert Addon.search_public().count() == 0
class TestLanguagePack(TestCase, amo.tests.AMOPaths):
def setUp(self):
super(TestLanguagePack, self).setUp()
self.addon = amo.tests.addon_factory(type=amo.ADDON_LPAPP,
status=amo.STATUS_PUBLIC)
self.platform_all = amo.PLATFORM_ALL.id
self.platform_mob = amo.PLATFORM_ANDROID.id
self.version = self.addon.current_version
def test_extract(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
assert 'title=Select a language' in self.addon.get_localepicker()
def test_extract_no_file(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
def test_extract_no_files(self):
assert self.addon.get_localepicker() == ''
def test_extract_not_language_pack(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
self.addon.update(type=amo.ADDON_EXTENSION)
assert self.addon.get_localepicker() == ''
def test_extract_not_platform_mobile(self):
File.objects.create(platform=self.platform_all, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
class TestCompatOverride(TestCase):
def setUp(self):
super(TestCompatOverride, self).setUp()
self.app = amo.APP_IDS[1]
one = CompatOverride.objects.create(guid='one')
CompatOverrideRange.objects.create(compat=one, app=self.app.id)
two = CompatOverride.objects.create(guid='two')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='4')
def check(self, obj, **kw):
"""Check that key/value pairs in kw match attributes of obj."""
for key, expected in kw.items():
actual = getattr(obj, key)
assert actual == expected
def test_is_hosted(self):
c = CompatOverride.objects.create(guid='a')
assert not c.is_hosted()
Addon.objects.create(type=1, guid='b')
c = CompatOverride.objects.create(guid='b')
assert c.is_hosted()
def test_override_type(self):
one = CompatOverride.objects.get(guid='one')
# The default is incompatible.
c = CompatOverrideRange.objects.create(compat=one, app=1)
assert c.override_type() == 'incompatible'
c = CompatOverrideRange.objects.create(compat=one, app=1, type=0)
assert c.override_type() == 'compatible'
def test_guid_match(self):
# We hook up the add-on automatically if we see a matching guid.
addon = Addon.objects.create(id=1, guid='oh yeah', type=1)
c = CompatOverride.objects.create(guid=addon.guid)
assert c.addon_id == addon.id
c = CompatOverride.objects.create(guid='something else')
assert c.addon is None
def test_transformer(self):
compats = list(CompatOverride.objects
.transform(CompatOverride.transformer))
ranges = list(CompatOverrideRange.objects.all())
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
for c in compats:
assert c.compat_ranges == (
[r for r in ranges if r.compat_id == c.id])
def test_collapsed_ranges(self):
# Test that we get back the right structures from collapsed_ranges().
c = CompatOverride.objects.get(guid='one')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='0', max='*')
assert len(compat_range.apps) == 1
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_versions(self):
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[1], type='incompatible', min='1', max='2')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='3', max='3.*')
def test_collapsed_ranges_different_types(self):
# If the override ranges have different types they should be separate
# entries.
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1, type=0,
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='compatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='3', max='3.*')
self.check(r[1], type='incompatible', min='0', max='*')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_apps(self):
c = CompatOverride.objects.get(guid='two')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='1', max='2')
assert len(compat_range.apps) == 2
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(compat_range.apps[1], app=amo.FIREFOX, min='3', max='4')
def test_collapsed_ranges_multiple_versions_and_apps(self):
c = CompatOverride.objects.get(guid='two')
CompatOverrideRange.objects.create(min_version='5', max_version='6',
compat=c, app=1)
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='1', max='2')
assert len(r[0].apps) == 2
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[0].apps[1], app=amo.FIREFOX, min='3', max='4')
self.check(r[1], type='incompatible', min='5', max='6')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
class TestIncompatibleVersions(TestCase):
def setUp(self):
super(TestIncompatibleVersions, self).setUp()
self.app = amo.APP_IDS[amo.FIREFOX.id]
self.addon = Addon.objects.create(guid='r@b', type=amo.ADDON_EXTENSION)
def test_signals_min(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='0',
max_version='1.0')
# Test the max version matched.
version1 = Version.objects.create(id=2, addon=self.addon,
version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the lower range.
version2 = Version.objects.create(id=1, addon=self.addon,
version='0.5')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
def test_signals_max(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='1.0',
max_version='*')
# Test the min_version matched.
version1 = Version.objects.create(addon=self.addon, version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the upper range.
version2 = Version.objects.create(addon=self.addon, version='99.0')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
class TestAddonApprovalsCounter(TestCase):
def setUp(self):
self.addon = addon_factory()
def test_increment_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 1
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
approval_counter.update(
last_human_review=self.days_ago(100),
last_content_review=self.days_ago(100))
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 2
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
def test_increment_non_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=0)
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 1
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
def test_reset_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=42,
last_content_review=self.days_ago(60),
last_human_review=self.days_ago(30))
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 0
# Dates were not touched.
self.assertCloseToNow(
approval_counter.last_human_review, now=self.days_ago(30))
self.assertCloseToNow(
approval_counter.last_content_review, now=self.days_ago(60))
def test_reset_non_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 0
def test_approve_content_non_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.approve_content_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 0
assert approval_counter.last_human_review is None
self.assertCloseToNow(approval_counter.last_content_review)
def test_approve_content_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=42,
last_content_review=self.days_ago(367),
last_human_review=self.days_ago(10))
AddonApprovalsCounter.approve_content_for_addon(self.addon)
approval_counter.reload()
# This was updated to now.
self.assertCloseToNow(approval_counter.last_content_review)
# Those fields were not touched.
assert approval_counter.counter == 42
self.assertCloseToNow(
approval_counter.last_human_review, now=self.days_ago(10))
| 39.195106 | 79 | 0.639256 |
import json
import os
import time
from datetime import datetime, timedelta
from waffle.testutils import override_switch
from django import forms
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.db import IntegrityError
from django.utils import translation
import pytest
from mock import Mock, patch
from olympia import amo, core
from olympia.activity.models import ActivityLog, AddonLog
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonCategory, AddonDependency,
AddonFeatureCompatibility, AddonReviewerFlags, AddonUser, AppSupport,
Category, CompatOverride, CompatOverrideRange, DeniedGuid, DeniedSlug,
FrozenAddon, IncompatibleVersions, Persona, Preview,
track_addon_status_change)
from olympia.amo.templatetags.jinja_helpers import absolutify, user_media_url
from olympia.amo.tests import (
TestCase, addon_factory, collection_factory, version_factory)
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import Collection, FeaturedCollection
from olympia.constants.categories import CATEGORIES
from olympia.devhub.models import RssKey
from olympia.files.models import File
from olympia.files.tests.test_models import UploadTest
from olympia.ratings.models import Rating, RatingFlag
from olympia.translations.models import (
Translation, TranslationSequence, delete_translation)
from olympia.users.models import UserProfile
from olympia.versions.compare import version_int
from olympia.versions.models import ApplicationsVersions, Version
class TestCleanSlug(TestCase):
def test_clean_slug_new_object(self):
# ones should be "addon-1", "addon-2" ...
a = Addon.objects.create(name='Addon')
assert a.slug == 'addon'
# Start with a first clash. This should give us 'addon-1".
# We're not saving yet, we're testing the slug creation without an id.
b = Addon(name='Addon')
b.clean_slug()
assert b.slug == 'addon1'
# Now save the instance to the database for future clashes.
b.save()
# Test on another object without an id.
c = Addon(name='Addon')
c.clean_slug()
assert c.slug == 'addon2'
# Even if an addon is deleted, don't clash with its slug.
c.status = amo.STATUS_DELETED
# Now save the instance to the database for future clashes.
c.save()
# And yet another object without an id. Make sure we're not trying to
# assign the 'addon-2' slug from the deleted addon.
d = Addon(name='Addon')
d.clean_slug()
assert d.slug == 'addon3'
def test_clean_slug_no_name(self):
# Create an addon and save it to have an id.
a = Addon.objects.create()
# Start over: don't use the name nor the id to generate the slug.
a.slug = a.name = ''
a.clean_slug()
# Slugs that are generated from add-ons without an name use
# uuid without the node bit so have the length 20.
assert len(a.slug) == 20
def test_clean_slug_with_name(self):
# Make sure there's at least an addon with the 'fooname' slug,
# subsequent ones should be 'fooname-1', 'fooname-2' ...
a = Addon.objects.create(name='fooname')
assert a.slug == 'fooname'
b = Addon(name='fooname')
b.clean_slug()
assert b.slug == 'fooname1'
def test_clean_slug_with_slug(self):
# Make sure there's at least an addon with the 'fooslug' slug,
# subsequent ones should be 'fooslug-1', 'fooslug-2' ...
a = Addon.objects.create(name='fooslug')
assert a.slug == 'fooslug'
b = Addon(name='fooslug')
b.clean_slug()
assert b.slug == 'fooslug1'
def test_clean_slug_denied_slug(self):
denied_slug = 'foodenied'
DeniedSlug.objects.create(name=denied_slug)
a = Addon(slug=denied_slug)
a.clean_slug()
# Blacklisted slugs (like 'activate" or IDs) have a "~" appended to
assert a.slug == '%s~' % denied_slug
a.save()
b = Addon(slug=denied_slug)
b.clean_slug()
assert b.slug == '%s~1' % denied_slug
def test_clean_slug_denied_slug_long_slug(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
DeniedSlug.objects.create(name=long_slug[:30])
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == '%s~' % long_slug[:29]
# If there's a clash, use the standard clash resolution.
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == '%s1' % long_slug[:28]
def test_clean_slug_long_slug(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
a = Addon.objects.create(slug=long_slug)
assert a.slug == long_slug[:30]
b = Addon(slug=long_slug)
b.clean_slug()
assert b.slug == '%s1' % long_slug[:28]
def test_clean_slug_always_slugify(self):
illegal_chars = 'some spaces and !?@'
a = Addon(slug=illegal_chars)
a.clean_slug()
assert a.slug.startswith('some-spaces-and'), a.slug
# Also slugify if there's no slug provided.
b = Addon(name=illegal_chars)
b.clean_slug()
assert b.slug.startswith('some-spaces-and'), b.slug
def test_clean_slug_worst_case_scenario(self):
long_slug = 'this_is_a_very_long_slug_that_is_longer_than_thirty_chars'
for i in range(100):
Addon.objects.create(slug=long_slug)
with self.assertRaises(RuntimeError): # Fail on the 100th clash.
Addon.objects.create(slug=long_slug)
def test_clean_slug_ends_with_dash(self):
a = Addon.objects.create(name='ends with dash -')
assert a.slug == 'ends-with-dash-'
assert a.slug == amo.utils.slugify(a.slug)
b = Addon.objects.create(name='ends with dash -')
assert b.slug == 'ends-with-dash-1'
assert b.slug == amo.utils.slugify(b.slug)
def test_clean_slug_unicode(self):
addon = Addon.objects.create(name=u'Addön 1')
assert addon.slug == u'addön-1'
class TestAddonManager(TestCase):
fixtures = ['base/appversion', 'base/users',
'base/addon_3615', 'addons/featured', 'addons/test_manager',
'base/collections', 'base/featured',
'bandwagon/featured_collections', 'base/addon_5299_gcal']
def setUp(self):
super(TestAddonManager, self).setUp()
core.set_user(None)
self.addon = Addon.objects.get(pk=3615)
def test_managers_public(self):
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted(self):
self.make_addon_unlisted(self.addon)
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted_deleted(self):
self.make_addon_unlisted(self.addon)
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_featured(self):
assert Addon.objects.featured(amo.FIREFOX).count() == 3
def test_listed(self):
# We need this for the fixtures, but it messes up the tests.
self.addon.update(disabled_by_user=True)
# Now continue as normal.
Addon.objects.filter(id=5299).update(disabled_by_user=True)
q = Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC)
assert len(q.all()) == 4
# Pick one of the listed addons.
addon = Addon.objects.get(pk=2464)
assert addon in q.all()
# Disabling hides it.
addon.disabled_by_user = True
addon.save()
# Should be 3 now, since the one is now disabled.
assert q.count() == 3
# If we search for public or unreviewed we find it.
addon.disabled_by_user = False
addon.status = amo.STATUS_NOMINATED
addon.save()
assert q.count() == 3
assert Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED).count() == 4
# Can't find it without a file.
addon.versions.get().files.get().delete()
assert q.count() == 3
def test_public(self):
for a in Addon.objects.public():
assert a.status == amo.STATUS_PUBLIC
def test_valid(self):
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
objs = Addon.objects.valid()
for addon in objs:
assert addon.status in amo.VALID_ADDON_STATUSES
assert not addon.disabled_by_user
def test_valid_disabled_by_user(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_valid_disabled_by_admin(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DISABLED)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_invalid_deleted(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DELETED)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before - 1)
def test_valid_disabled_pending(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
amo.tests.addon_factory(status=amo.STATUS_PENDING)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_valid_disabled_version(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
# Add-on, no version. Doesn't count.
addon = amo.tests.addon_factory()
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
# Theme, no version. Counts.
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_new_featured(self):
f = Addon.objects.featured(amo.FIREFOX)
assert f.count() == 3
assert sorted(x.id for x in f) == (
[2464, 7661, 15679])
f = Addon.objects.featured(amo.THUNDERBIRD)
assert not f.exists()
def test_filter_for_many_to_many(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
collection = self.addon.collections.first()
assert collection.addons.get() == self.addon
# Addon shouldn't be listed in collection.addons if it's deleted.
# Unlisted.
self.make_addon_unlisted(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.get() == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
# Only deleted.
self.make_addon_listed(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
def test_no_filter_for_relations(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
version = self.addon.versions.first()
assert version.addon == self.addon
# Deleted or unlisted, version.addon should still work.
# Unlisted.
self.make_addon_unlisted(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Only deleted.
self.make_addon_listed(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
class TestAddonModels(TestCase):
fixtures = ['base/appversion',
'base/collections',
'base/featured',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple.json',
'base/addon_4594_a9',
'base/addon_4664_twitterbar',
'base/thunderbird',
'addons/featured',
'addons/invalid_latest_version',
'addons/denied',
'bandwagon/featured_collections']
def setUp(self):
super(TestAddonModels, self).setUp()
TranslationSequence.objects.create(id=99243)
self.old_version = amo.FIREFOX.latest_version
amo.FIREFOX.latest_version = '3.6.15'
def tearDown(self):
amo.FIREFOX.latest_version = self.old_version
super(TestAddonModels, self).tearDown()
def test_current_version(self):
a = Addon.objects.get(pk=3615)
assert a.current_version.id == 81551
def test_current_version_listed(self):
a = Addon.objects.get(pk=3723)
assert a.current_version.id == 89774
def test_current_version_listed_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
a = Addon.objects.get(pk=3723)
assert a.current_version is None
def test_latest_unlisted_version(self):
addon = Addon.objects.get(pk=3615)
an_unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
an_unlisted_version.update(created=self.days_ago(2))
a_newer_unlisted_version = version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
a_newer_unlisted_version.update(created=self.days_ago(1))
version_factory(
addon=addon, version='5.0', channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_DISABLED})
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure the property is cached.
an_even_newer_unlisted_version = version_factory(
addon=addon, version='6.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure it can be deleted to reset it.
del addon.latest_unlisted_version
assert addon.latest_unlisted_version == an_even_newer_unlisted_version
# Make sure it's writeable.
addon.latest_unlisted_version = an_unlisted_version
assert addon.latest_unlisted_version == an_unlisted_version
def test_find_latest_version(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
assert addon.find_latest_version(None) == new_version
another_new_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.find_latest_version(None) == another_new_version
def test_find_latest_version_different_channel(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_LISTED) ==
new_version)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_UNLISTED) ==
unlisted_version)
def test_find_latest_version_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
addon = Addon.objects.get(pk=3723)
assert addon.find_latest_version(None) is None
def test_find_latest_version_ignore_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0beta',
file_kw={'status': amo.STATUS_BETA})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_ignore_disabled(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_only_exclude_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v2 since we don't exclude disabled, but do exclude beta.
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v2.id
@override_switch('beta-versions', active=True)
def test_find_latest_version_dont_exclude_anything_with_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(None, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v3 since we don't exclude anything.
assert addon.find_latest_version(None, exclude=()).id == v3.id
def test_find_latest_version_dont_exclude_anything(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(None, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
# Should be v2 since we don't exclude anything.
assert addon.find_latest_version(None, exclude=()).id == v2.id
@override_switch('beta-versions', active=True)
def test_find_latest_version_dont_exclude_anything_w_channel_w_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(3))
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(2))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
v2.update(created=self.days_ago(1))
version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
# Should be v3 since we don't exclude anything, but do have a channel
# set to listed, and version 4.0 is unlisted.
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v3.id
def test_find_latest_version_dont_exclude_anything_with_channel(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(3))
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
# Should be v2 since we don't exclude anything, but do have a channel
# set to listed, and version 4.0 is unlisted.
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v2.id
def test_current_version_unsaved(self):
addon = Addon()
addon._current_version = Version()
assert addon.current_version is None
def test_find_latest_version_unsaved(self):
addon = Addon()
assert addon.find_latest_version(None) is None
@override_switch('beta-versions', active=True)
def test_current_beta_version_with_beta(self):
addon = Addon.objects.get(pk=5299)
assert addon.current_beta_version.id == 50000
def test_current_beta_version(self):
addon = Addon.objects.get(pk=5299)
assert addon.current_beta_version is None
def test_transformer(self):
addon = Addon.objects.get(pk=3615)
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
assert addon.current_version
def _delete(self, addon_id):
core.set_user(UserProfile.objects.last())
addon_count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=addon_id)
guid = addon.guid
addon.delete('bye')
assert addon_count == Addon.unfiltered.count() # Soft deletion.
assert addon.status == amo.STATUS_DELETED
assert addon.slug is None
assert addon.current_version is None
assert addon.guid == guid # We don't clear it anymore.
deleted_count = Addon.unfiltered.filter(
status=amo.STATUS_DELETED).count()
assert len(mail.outbox) == deleted_count
log = AddonLog.objects.order_by('-id').first().activity_log
assert log.action == amo.LOG.DELETE_ADDON.id
assert log.to_string() == (
'Addon id {0} with GUID {1} has been deleted'.format(
addon_id, guid))
def test_delete(self):
addon = Addon.unfiltered.get(pk=3615)
addon.name = u'é' # Make sure we don't have encoding issues.
addon.save()
self._delete(3615)
# Delete another add-on, and make sure we don't have integrity errors
# with unique constraints on fields that got nullified.
self._delete(5299)
def test_delete_persona(self):
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
assert addon.guid is None # Personas don't have GUIDs.
self._delete(addon.pk)
def _delete_url(self):
a = Addon.objects.get(pk=4594)
url = a.get_url_path()
a.delete('bye')
assert absolutify(url) in mail.outbox[0].body
def test_delete_url(self):
count = Addon.unfiltered.count()
self._delete_url()
assert count == Addon.unfiltered.count()
def test_delete_reason(self):
reason = u'trêason'
a = Addon.objects.get(pk=3615)
a.name = u'é'
assert len(mail.outbox) == 0
a.delete(msg='bye', reason=reason)
assert len(mail.outbox) == 1
assert reason in mail.outbox[0].body
def test_delete_incomplete_no_versions(self):
count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=3615)
addon.current_version.delete(hard=True)
# The addon status will have been changed when we deleted the version,
# and the instance should be the same, so we shouldn't need to reload.
assert addon.status == amo.STATUS_NULL
addon.delete(None)
assert len(mail.outbox) == 0
assert Addon.unfiltered.count() == (count - 1)
def test_delete_incomplete_with_versions(self):
count = Addon.unfiltered.count()
a = Addon.objects.get(pk=3615)
a.status = 0
a.save()
a.delete('oh looky here')
assert len(mail.outbox) == 1
assert count == Addon.unfiltered.count()
def test_delete_searchengine(self):
a = Addon.objects.get(pk=4594)
a.delete('bye')
assert len(mail.outbox) == 1
def test_delete_disabled_addon_is_added_to_deniedguids(self):
addon = Addon.unfiltered.get(pk=3615)
addon.update(status=amo.STATUS_DISABLED)
self._delete(3615)
assert DeniedGuid.objects.filter(guid=addon.guid).exists()
def test_delete_disabled_addon_when_guid_is_already_in_deniedguids(self):
addon = Addon.unfiltered.get(pk=3615)
DeniedGuid.objects.create(guid=addon.guid)
addon.update(status=amo.STATUS_DISABLED)
self._delete(3615)
assert DeniedGuid.objects.filter(guid=addon.guid).exists()
def test_incompatible_latest_apps(self):
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.get(pk=97) # Firefox 2.0
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == [amo.FIREFOX]
# Check a search engine addon.
a = Addon.objects.get(pk=4594)
assert a.incompatible_latest_apps() == []
def test_incompatible_asterix(self):
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.create(application=amo.FIREFOX.id,
version_int=version_int('5.*'),
version='5.*')
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
def test_icon_url(self):
addon = Addon.objects.get(pk=3615)
assert addon.icon_url.endswith('/3/3615-32.png?modified=1275037317')
addon.icon_hash = 'somehash'
assert addon.icon_url.endswith('/3/3615-32.png?modified=somehash')
addon = Addon.objects.get(pk=6704)
addon.icon_type = None
assert addon.icon_url.endswith('/icons/default-theme.png'), (
'No match for %s' % addon.icon_url)
addon = Addon.objects.get(pk=3615)
addon.icon_type = None
assert addon.icon_url.endswith('icons/default-32.png')
def test_icon_url_default(self):
a = Addon.objects.get(pk=3615)
a.update(icon_type='')
default = 'icons/default-32.png'
assert a.icon_url.endswith(default)
assert a.get_icon_url(32).endswith(default)
assert a.get_icon_url(32, use_default=True).endswith(default)
assert a.get_icon_url(32, use_default=False) is None
def test_thumbnail_url(self):
a = Addon.objects.get(pk=4664)
a.thumbnail_url.index('/previews/thumbs/20/20397.png?modified=')
a = Addon.objects.get(pk=5299)
assert a.thumbnail_url.endswith('/icons/no-preview.png'), (
'No match for %s' % a.thumbnail_url)
def test_is_unreviewed(self):
# public add-on
a = Addon.objects.get(pk=3615)
assert not a.is_unreviewed(), 'public add-on: is_unreviewed=False'
a.status = amo.STATUS_NOMINATED
assert a.is_unreviewed(), 'pending add-on: is_unreviewed=True'
def test_is_public(self):
# Public add-on.
addon = Addon.objects.get(pk=3615)
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_public()
# Should be public by status, but since it's disabled add-on it's not.
addon.disabled_by_user = True
assert not addon.is_public()
def test_is_restart_required(self):
addon = Addon.objects.get(pk=3615)
file_ = addon.current_version.all_files[0]
assert not file_.is_restart_required
assert not addon.is_restart_required
file_.update(is_restart_required=True)
assert Addon.objects.get(pk=3615).is_restart_required
addon.versions.all().delete()
addon._current_version = None
assert not addon.is_restart_required
def test_is_featured(self):
a = Addon.objects.get(pk=1003)
assert a.is_featured(amo.FIREFOX, 'en-US'), (
'globally featured add-on not recognized')
def test_get_featured_by_app(self):
addon = Addon.objects.get(pk=1003)
featured_coll = addon.collections.get().featuredcollection_set.get()
assert featured_coll.locale is None
# Get the applications this addon is featured for.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {None}}
featured_coll.update(locale='fr')
# Check the locale works.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {'fr'}}
pt_coll = collection_factory()
pt_coll.add_addon(addon)
FeaturedCollection.objects.create(collection=pt_coll,
application=amo.FIREFOX.id,
locale='pt-PT')
# Add another featured collection for the same application.
assert addon.get_featured_by_app() == {amo.FIREFOX.id: {'fr', 'pt-PT'}}
mobile_coll = collection_factory()
mobile_coll.add_addon(addon)
FeaturedCollection.objects.create(collection=mobile_coll,
application=amo.ANDROID.id,
locale='pt-PT')
# Add a featured collection for the a different application.
assert addon.get_featured_by_app() == {
amo.FIREFOX.id: {'fr', 'pt-PT'},
amo.ANDROID.id: {'pt-PT'}}
def newlines_helper(self, string_before):
addon = Addon.objects.get(pk=3615)
addon.privacy_policy = string_before
addon.save()
return addon.privacy_policy.localized_string_clean
def test_newlines_normal(self):
before = ("Paragraph one.\n"
"This should be on the very next line.\n\n"
"Should be two nl's before this line.\n\n\n"
"Should be three nl's before this line.\n\n\n\n"
"Should be four nl's before this line.")
after = before # Nothing special; this shouldn't change.
assert self.newlines_helper(before) == after
def test_newlines_ul(self):
before = ("<ul>\n\n"
"<li>No nl's between the ul and the li.</li>\n\n"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>\n\n"
"</ul>")
after = ("<ul>"
"<li>No nl's between the ul and the li.</li>"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>"
"</ul>")
assert self.newlines_helper(before) == after
def test_newlines_ul_tight(self):
before = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be no nl's above this line.")
after = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>"
"There should be no nl's above this line.")
assert self.newlines_helper(before) == after
def test_newlines_ul_loose(self):
before = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n\n"
"There should be one nl above this line.")
after = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be one nl above this line.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_tight(self):
before = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>\n"
"There should be no nl's above this.")
after = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>"
"There should be no nl's above this.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_loose(self):
before = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n\n"
"There should be one nl above this.")
after = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n"
"There should be one nl above this.")
assert self.newlines_helper(before) == after
def test_newlines_inline(self):
before = ("If we end a paragraph w/ a <b>non-block-level tag</b>\n\n"
"<b>The newlines</b> should be kept")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_code_inline(self):
before = ("Code tags aren't blocks.\n\n"
"<code>alert(test);</code>\n\n"
"See?")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_li_newlines(self):
before = ("<ul><li>\nxx</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\n</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\nxx</li></ul>")
after = ("<ul><li>xx\nxx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li></li></ul>")
after = ("<ul><li></li></ul>")
assert self.newlines_helper(before) == after
# All together now
before = ("<ul><li>\nxx</li> <li>xx\n</li> <li>xx\nxx</li> "
"<li></li>\n</ul>")
after = ("<ul><li>xx</li> <li>xx</li> <li>xx\nxx</li> "
"<li></li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_empty_tag(self):
before = ("This is a <b></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_nested(self):
before = ("This is a <b><i></i></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_block_nested(self):
b = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>\ntest.")
a = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(b) == a
def test_newlines_empty_tag_block_nested_spaced(self):
before = ("Test.\n\n<blockquote>\n\n<ul>\n\n<li>"
"</li>\n\n</ul>\n\n</blockquote>\ntest.")
after = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(before) == after
def test_newlines_li_newlines_inline(self):
before = ("<ul><li>\n<b>test\ntest\n\ntest</b>\n</li>"
"<li>Test <b>test</b> test.</li></ul>")
after = ("<ul><li><b>test\ntest\n\ntest</b></li>"
"<li>Test <b>test</b> test.</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_li_all_inline(self):
before = ("Test with <b>no newlines</b> and <code>block level "
"stuff</code> to see what happens.")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_spaced_blocks(self):
before = ("<blockquote>\n\n<ul>\n\n<li>\n\ntest\n\n</li>\n\n"
"</ul>\n\n</blockquote>")
after = "<blockquote><ul><li>test</li></ul></blockquote>"
assert self.newlines_helper(before) == after
def test_newlines_spaced_inline(self):
before = "Line.\n\n<b>\nThis line is bold.\n</b>\n\nThis isn't."
after = before
assert self.newlines_helper(before) == after
def test_newlines_nested_inline(self):
before = "<b>\nThis line is bold.\n\n<i>This is also italic</i></b>"
after = before
assert self.newlines_helper(before) == after
def test_newlines_xss_script(self):
before = "<script>\n\nalert('test');\n</script>"
after = "<script>\n\nalert('test');\n</script>"
assert self.newlines_helper(before) == after
def test_newlines_xss_inline(self):
before = "<b onclick=\"alert('test');\">test</b>"
after = "<b>test</b>"
assert self.newlines_helper(before) == after
@patch(
'olympia.amo.templatetags.jinja_helpers.urlresolvers.get_outgoing_url')
def test_newlines_attribute_link_doublequote(self, mock_get_outgoing_url):
mock_get_outgoing_url.return_value = 'http://google.com'
before = '<a href="http://google.com">test</a>'
parsed = self.newlines_helper(before)
assert 'rel="nofollow"' in parsed
def test_newlines_attribute_singlequote(self):
before = "<abbr title='laugh out loud'>lol</abbr>"
after = '<abbr title="laugh out loud">lol</abbr>'
assert self.newlines_helper(before) == after
def test_newlines_attribute_doublequote(self):
before = '<abbr title="laugh out loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_doublesingle(self):
before = '<abbr title="laugh \'out\' loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_singledouble(self):
before = '<abbr title=\'laugh "out" loud\'>lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b(self):
before = ("<b>test")
after = ("<b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b_wrapped(self):
before = ("This is a <b>test")
after = ("This is a <b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_li(self):
before = ("<ul><li>test</ul>")
after = ("<ul><li>test</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag(self):
before = "<madonna"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_correct_faketag(self):
before = "<madonna>"
after = "<madonna>"
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag(self):
before = "<strong"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag_surrounded(self):
before = "This is a <test of bleach"
after = 'This is a'
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag_surrounded(self):
before = "This is a <strong of bleach"
after = "This is a"
assert self.newlines_helper(before) == after
def test_newlines_less_than(self):
before = "3 < 5"
after = "3 < 5"
assert self.newlines_helper(before) == after
def test_newlines_less_than_tight(self):
before = "abc 3<5 def"
after = "abc 3<5 def"
assert self.newlines_helper(before) == after
def test_app_categories(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Let's add a thunderbird category.
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['tags'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# Test that the thunderbird category was added correctly.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_app_categories_ignore_unknown_cats(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Associate this add-on with a couple more categories, including
# one that does not exist in the constants.
unknown_cat = Category.objects.create(
application=amo.SUNBIRD.id, id=123456, type=amo.ADDON_EXTENSION,
name='Sunny D')
AddonCategory.objects.create(addon=addon, category=unknown_cat)
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['appearance'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# The sunbird category should not be present since it does not match
# an existing static category, thunderbird one should have been added.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_review_replies(self):
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=999)
version = addon.current_version
new_rating = Rating(version=version, user=u, rating=2, body='hello',
addon=addon)
new_rating.save()
new_reply = Rating(version=version, user=addon.authors.all()[0],
addon=addon, reply_to=new_rating,
rating=2, body='my reply')
new_reply.save()
review_list = [rating.pk for rating in addon.ratings]
assert new_rating.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_show_beta(self):
# Addon.current_beta_version will be empty, so show_beta is False.
a = Addon(status=amo.STATUS_PUBLIC)
assert not a.show_beta
@patch('olympia.addons.models.Addon.current_beta_version')
def test_show_beta_with_beta_version(self, beta_mock):
beta_mock.return_value = object()
# Fake current_beta_version to return something truthy.
a = Addon(status=amo.STATUS_PUBLIC)
assert a.show_beta
# We have a beta version but status has to be public.
a.status = amo.STATUS_NOMINATED
assert not a.show_beta
def test_update_logs(self):
addon = Addon.objects.get(id=3615)
core.set_user(UserProfile.objects.all()[0])
addon.versions.all().delete()
entries = ActivityLog.objects.all()
assert entries[0].action == amo.LOG.CHANGE_STATUS.id
def setup_files(self, status):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
File.objects.create(status=status, version=version)
return addon, version
def test_no_change_disabled_user(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
addon.update(disabled_by_user=True)
version.save()
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_disabled
def test_no_change_disabled(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DISABLED)
version.save()
assert addon.status == amo.STATUS_DISABLED
assert addon.is_disabled
def test_no_change_deleted(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DELETED)
version.save()
assert addon.status == amo.STATUS_DELETED
assert addon.is_deleted
def test_removing_public(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
version.save()
assert addon.status == amo.STATUS_NOMINATED
def test_can_request_review_no_files(self):
addon = Addon.objects.get(pk=3615)
addon.versions.all()[0].files.all().delete()
assert addon.can_request_review() is False
def test_can_request_review_rejected(self):
addon = Addon.objects.get(pk=3615)
latest_version = addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.update(status=amo.STATUS_DISABLED)
assert addon.can_request_review() is False
def check_can_request_review(self, status, expected, extra_update_kw=None):
if extra_update_kw is None:
extra_update_kw = {}
addon = Addon.objects.get(pk=3615)
changes = {'status': status, 'disabled_by_user': False}
changes.update(**extra_update_kw)
addon.update(**changes)
assert addon.can_request_review() == expected
def test_can_request_review_null(self):
self.check_can_request_review(amo.STATUS_NULL, True)
def test_can_request_review_null_disabled(self):
self.check_can_request_review(
amo.STATUS_NULL, False, extra_update_kw={'disabled_by_user': True})
def test_can_request_review_nominated(self):
self.check_can_request_review(amo.STATUS_NOMINATED, False)
def test_can_request_review_public(self):
self.check_can_request_review(amo.STATUS_PUBLIC, False)
def test_can_request_review_disabled(self):
self.check_can_request_review(amo.STATUS_DISABLED, False)
def test_can_request_review_deleted(self):
self.check_can_request_review(amo.STATUS_DELETED, False)
def test_none_homepage(self):
# There was an odd error when a translation was set to None.
Addon.objects.create(homepage=None, type=amo.ADDON_EXTENSION)
def test_slug_isdigit(self):
a = Addon.objects.create(type=1, name='xx', slug='123')
assert a.slug == '123~'
a.slug = '44'
a.save()
assert a.slug == '44~'
def test_slug_isdenied(self):
# When an addon is uploaded, it doesn't use the form validation,
# so we'll just mangle the slug if its denied.
a = Addon.objects.create(type=1, name='xx', slug='validate')
assert a.slug == 'validate~'
a.slug = 'validate'
a.save()
assert a.slug == 'validate~'
def delete(self):
addon = Addon.objects.get(id=3615)
assert len(mail.outbox) == 0
addon.delete('so long and thanks for all the fish')
assert len(mail.outbox) == 1
def test_delete_to(self):
self.delete()
assert mail.outbox[0].to == [settings.FLIGTAR]
def test_delete_by(self):
try:
user = Addon.objects.get(id=3615).authors.all()[0]
core.set_user(user)
self.delete()
assert 'DELETED BY: 55021' in mail.outbox[0].body
finally:
core.set_user(None)
def test_delete_by_unknown(self):
self.delete()
assert 'DELETED BY: Unknown' in mail.outbox[0].body
def test_delete_mail_not_localized(self):
with self.activate('pl'):
self.delete()
admin_mail = mail.outbox[0]
# Make sure the type (EXTENSION) isn't localized.
assert 'Deleting EXTENSION a3615 (3615)' in admin_mail.subject
assert 'The following EXTENSION was deleted' in admin_mail.body
def test_view_source(self):
# view_source should default to True.
a = Addon.objects.create(type=1)
assert a.view_source
@patch('olympia.files.models.File.hide_disabled_file')
def test_admin_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.status = amo.STATUS_PUBLIC
a.save()
assert not hide_mock.called
a.status = amo.STATUS_DISABLED
a.save()
assert hide_mock.called
@patch('olympia.files.models.File.hide_disabled_file')
def test_user_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.disabled_by_user = False
a.save()
assert not hide_mock.called
a.disabled_by_user = True
a.save()
assert hide_mock.called
def test_category_transform(self):
addon = Addon.objects.get(id=3615)
cats = addon.categories.filter(application=amo.FIREFOX.id)
names = [c.name for c in cats]
assert addon.get_category(amo.FIREFOX.id).name in names
def test_binary_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary=True)
assert addon.binary
def test_binary_components_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary_components=True)
assert addon.binary_components
def test_listed_has_complete_metadata_no_categories(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
AddonCategory.objects.filter(addon=addon).delete()
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(has_listed_versions=False)
def test_listed_has_complete_metadata_no_summary(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_listed_has_complete_metadata_no_license(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
addon.current_version.update(license=None)
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_unlisted_has_complete_metadata(self):
addon = Addon.objects.get(id=3615)
self.make_addon_unlisted(addon)
assert addon.has_complete_metadata() # Confirm complete already.
# Clear everything
addon.versions.update(license=None)
AddonCategory.objects.filter(addon=addon).delete()
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Still complete
assert not addon.has_complete_metadata(has_listed_versions=True)
def test_can_review(self):
user = AnonymousUser()
addon = Addon.objects.get(id=3615)
assert addon.can_review(user)
user = addon.addonuser_set.all()[0].user
assert not addon.can_review(user)
user = UserProfile.objects.get(pk=2519)
assert addon.can_review(user)
def test_has_author(self):
addon = Addon.objects.get(id=3615)
user = addon.addonuser_set.all()[0].user
assert addon.has_author(user)
user = UserProfile.objects.get(pk=2519)
assert not addon.has_author(user)
def test_auto_approval_disabled_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.auto_approval_disabled is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.auto_approval_disabled is False
assert addon.auto_approval_disabled is False
# Flag present, value is True: True.
flags.update(auto_approval_disabled=True)
assert addon.auto_approval_disabled is True
def test_needs_admin_code_review_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.needs_admin_code_review is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.needs_admin_code_review is False
assert addon.needs_admin_code_review is False
# Flag present, value is True: True.
flags.update(needs_admin_code_review=True)
assert addon.needs_admin_code_review is True
def test_needs_admin_content_review_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.needs_admin_content_review is None
# Flag present, value is False (default): False.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.needs_admin_content_review is False
assert addon.needs_admin_content_review is False
# Flag present, value is True: True.
flags.update(needs_admin_content_review=True)
assert addon.needs_admin_content_review is True
def test_pending_info_request_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.pending_info_request is None
# Flag present, value is None (default): None.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.pending_info_request is None
assert addon.pending_info_request is None
# Flag present, value is a date.
in_the_past = self.days_ago(1)
flags.update(pending_info_request=in_the_past)
assert addon.pending_info_request == in_the_past
def test_expired_info_request_property(self):
addon = Addon.objects.get(pk=3615)
# No flags: None
assert addon.expired_info_request is None
# Flag present, value is None (default): None.
flags = AddonReviewerFlags.objects.create(addon=addon)
assert flags.pending_info_request is None
assert addon.expired_info_request is None
# Flag present, value is a date in the past.
in_the_past = self.days_ago(1)
flags.update(pending_info_request=in_the_past)
assert addon.expired_info_request
# Flag present, value is a date in the future.
in_the_future = datetime.now() + timedelta(days=2)
flags.update(pending_info_request=in_the_future)
assert not addon.expired_info_request
class TestShouldRedirectToSubmitFlow(TestCase):
fixtures = ['base/addon_3615']
def test_no_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
ver.delete()
assert not addon.should_redirect_to_submit_flow()
def test_disabled_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
for file_ in ver.all_files:
file_.update(status=amo.STATUS_DISABLED)
assert not addon.should_redirect_to_submit_flow()
def test_only_null_redirects(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
status_exc_null = dict(amo.STATUS_CHOICES_ADDON)
status_exc_null.pop(amo.STATUS_NULL)
for status in status_exc_null:
assert not addon.should_redirect_to_submit_flow()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
class TestHasListedAndUnlistedVersions(TestCase):
def setUp(self):
self.addon = addon_factory()
latest_version = self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
latest_version.delete(hard=True)
assert self.addon.versions.count() == 0
def test_no_versions(self):
assert not self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_listed_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_unlisted_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert not self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
def test_unlisted_and_listed_versions(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
class TestAddonNomination(TestCase):
fixtures = ['base/addon_3615']
def test_set_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
a.versions.latest().update(nomination=None)
a.update(status=amo.STATUS_NOMINATED)
assert a.versions.latest().nomination
def test_new_version_inherits_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
a.update(status=amo.STATUS_NOMINATED)
old_ver = a.versions.latest()
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination == old_ver.nomination
ver += 1
@override_switch('beta-versions', active=True)
def test_beta_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
v = Version.objects.create(addon=a, version='1.0')
v.nomination = None
v.save()
a.update(status=amo.STATUS_NOMINATED)
File.objects.create(version=v, status=amo.STATUS_BETA,
filename='foobar.xpi')
v.version = '1.1'
v.save()
assert v.nomination is None
def test_lone_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
Version.objects.all().delete()
v = Version.objects.create(addon=a, version='1.0')
assert v.nomination is None
def test_reviewed_addon_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
for st in (amo.STATUS_PUBLIC, amo.STATUS_BETA, amo.STATUS_NULL):
a.update(status=st)
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination is None
ver += 1
def test_nomination_no_version(self):
# Check that the on_change method still works if there are no versions.
a = Addon.objects.get(id=3615)
a.versions.all().delete()
a.update(status=amo.STATUS_NOMINATED)
def test_nomination_already_set(self):
addon = Addon.objects.get(id=3615)
earlier = datetime.today() - timedelta(days=2)
addon.versions.latest().update(nomination=earlier)
addon.update(status=amo.STATUS_NOMINATED)
assert addon.versions.latest().nomination.date() == earlier.date()
def setup_nomination(self, addon_status=amo.STATUS_NOMINATED,
file_status=amo.STATUS_AWAITING_REVIEW):
addon = Addon.objects.create()
version = Version.objects.create(addon=addon)
File.objects.create(status=file_status, version=version)
# Cheating date to make sure we don't have a date on the same second
# the code we test is running.
past = self.days_ago(1)
version.update(nomination=past, created=past, modified=past)
addon.update(status=addon_status)
nomination = addon.versions.latest().nomination
assert nomination
return addon, nomination
def test_new_version_of_under_review_addon_does_not_reset_nomination(self):
addon, nomination = self.setup_nomination()
version = Version.objects.create(addon=addon, version='0.2')
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def test_nomination_not_reset_if_adding_new_versions_and_files(self):
addon, nomination = self.setup_nomination()
# Switching it to a public status.
version = Version.objects.create(addon=addon, version="0.1")
File.objects.create(status=amo.STATUS_PUBLIC, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.2")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.3")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def check_nomination_reset_with_new_version(self, addon, nomination):
version = Version.objects.create(addon=addon, version="0.2")
assert version.nomination is None
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination != nomination
def test_new_version_of_approved_addon_should_reset_nomination(self):
addon, nomination = self.setup_nomination(
addon_status=amo.STATUS_PUBLIC, file_status=amo.STATUS_PUBLIC)
# Now create a new version with an attached file, and update status.
self.check_nomination_reset_with_new_version(addon, nomination)
class TestThemeDelete(TestCase):
def setUp(self):
super(TestThemeDelete, self).setUp()
self.addon = addon_factory(type=amo.ADDON_PERSONA)
# Taking the creation and modified time back 1 day
self.addon.update(created=self.days_ago(1), modified=self.days_ago(1))
def test_remove_theme_update_m_time(self):
m_time_before = self.addon.modified
self.addon.delete('enough', 'no reason at all')
m_time_after = self.addon.modified
assert m_time_before != m_time_after
class TestAddonDelete(TestCase):
def test_cascades(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonCategory.objects.create(
addon=addon,
category=Category.objects.create(type=amo.ADDON_EXTENSION))
AddonDependency.objects.create(
addon=addon, dependent_addon=addon)
AddonUser.objects.create(
addon=addon, user=UserProfile.objects.create())
AppSupport.objects.create(addon=addon, app=1)
CompatOverride.objects.create(addon=addon)
FrozenAddon.objects.create(addon=addon)
Persona.objects.create(addon=addon, persona_id=0)
Preview.objects.create(addon=addon)
AddonLog.objects.create(
addon=addon, activity_log=ActivityLog.objects.create(action=0))
RssKey.objects.create(addon=addon)
# This should not throw any FK errors if all the cascades work.
addon.delete()
# Make sure it was actually a hard delete.
assert not Addon.unfiltered.filter(pk=addon.pk).exists()
def test_review_delete(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_PUBLIC)
rating = Rating.objects.create(addon=addon, rating=1, body='foo',
user=UserProfile.objects.create())
flag = RatingFlag(rating=rating)
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
assert not Rating.objects.filter(pk=rating.pk).exists()
assert not RatingFlag.objects.filter(pk=flag.pk).exists()
def test_delete_with_deleted_versions(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon, version="1.0")
version.delete()
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
class TestAddonFeatureCompatibility(TestCase):
fixtures = ['base/addon_3615']
def test_feature_compatibility_not_present(self):
addon = Addon.objects.get(pk=3615)
assert addon.feature_compatibility
assert not addon.feature_compatibility.pk
def test_feature_compatibility_present(self):
addon = Addon.objects.get(pk=3615)
AddonFeatureCompatibility.objects.create(addon=addon)
assert addon.feature_compatibility
assert addon.feature_compatibility.pk
class TestUpdateStatus(TestCase):
def test_no_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
Version.objects.create(addon=addon)
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_no_valid_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon)
f = File.objects.create(status=amo.STATUS_AWAITING_REVIEW,
version=version)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
f.status = amo.STATUS_DISABLED
f.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_unlisted_versions_ignored(self):
addon = addon_factory(status=amo.STATUS_PUBLIC)
addon.update_status()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_PUBLIC)
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# update_status will have been called via versions.models.update_status
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL) # No listed versions so now NULL
class TestGetVersion(TestCase):
fixtures = ['base/addon_3615', ]
def setUp(self):
super(TestGetVersion, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
def test_public_new_public_version(self):
new_version = version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_PUBLIC})
assert self.addon.find_latest_public_listed_version() == new_version
def test_public_new_unreviewed_version(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_AWAITING_REVIEW})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_promote_previous_valid_version_if_latest_is_disabled(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_DISABLED})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_be_listed(self):
new_version = version_factory(
addon=self.addon,
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
assert new_version != self.version
# Since the new version is unlisted, find_latest_public_listed_version
# should still find the current one.
assert self.addon.find_latest_public_listed_version() == self.version
class TestAddonGetURLPath(TestCase):
def test_get_url_path(self):
addon = addon_factory(slug='woo')
assert addon.get_url_path() == '/en-US/firefox/addon/woo/'
def test_get_url_path_more(self):
addon = addon_factory(slug='yeah')
assert addon.get_url_path(more=True) == (
'/en-US/firefox/addon/yeah/more')
def test_unlisted_addon_get_url_path(self):
addon = addon_factory(
slug='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED})
assert addon.get_url_path() == ''
class TestAddonModelsFeatured(TestCase):
fixtures = ['base/appversion', 'base/users',
'addons/featured', 'bandwagon/featured_collections',
'base/addon_3615', 'base/collections', 'base/featured']
def setUp(self):
super(TestAddonModelsFeatured, self).setUp()
# Addon._featured keeps an in-process cache we need to clear.
if hasattr(Addon, '_featured'):
del Addon._featured
def _test_featured_random(self):
f = Addon.featured_random(amo.FIREFOX, 'en-US')
assert sorted(f) == [1001, 1003, 2464, 3481, 7661, 15679]
f = Addon.featured_random(amo.FIREFOX, 'fr')
assert sorted(f) == [1001, 1003, 2464, 7661, 15679]
f = Addon.featured_random(amo.THUNDERBIRD, 'en-US')
assert f == []
def test_featured_random(self):
self._test_featured_random()
class TestBackupVersion(TestCase):
fixtures = ['addons/update', 'base/appversion']
def setUp(self):
super(TestBackupVersion, self).setUp()
self.version_1_2_0 = 105387
self.addon = Addon.objects.get(pk=1865)
core.set_user(None)
def setup_new_version(self):
for version in Version.objects.filter(pk__gte=self.version_1_2_0):
appversion = version.apps.all()[0]
appversion.min = AppVersion.objects.get(version='4.0b1')
appversion.save()
def test_no_current_version(self):
for v in Version.objects.all():
v.delete()
self.addon.update(_current_version=None)
assert self.addon.current_version is None
def test_current_version_listed_only(self):
version = self.addon.current_version
version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# The call above should have triggerred update_version().
assert self.addon.current_version != version
# new current_version should be version 1.2.1, since 1.2.2 is unlisted.
assert self.addon.current_version == Version.objects.get(pk=112396)
def test_firefox_versions(self):
self.setup_new_version()
self.addon.update_version()
current = self.addon.current_version.compatible_apps[amo.FIREFOX]
assert current.max.version == '4.0b8pre'
assert current.min.version == '3.0.12'
def test_version_signals(self):
self.addon.update(_current_version=None)
self.setup_new_version()
version = self.addon.versions.all()[0]
assert not self.addon.current_version
version.save()
assert Addon.objects.get(pk=1865).current_version
def test_update_version_theme(self):
# Test versions do not get deleted when calling with theme.
self.addon.update(type=amo.ADDON_PERSONA)
assert not self.addon.update_version()
assert self.addon._current_version
# Test latest version copied to current version if no current version.
self.addon.update(_current_version=None, _signal=False)
assert self.addon.update_version()
assert self.addon._current_version == (
self.addon.find_latest_version(None))
class TestCategoryModel(TestCase):
def test_category_url(self):
for t in amo.ADDON_TYPE.keys():
if t == amo.ADDON_DICT:
continue # Language packs don't have categories.
cat = Category(type=t, slug='omg')
assert cat.get_url_path()
@pytest.mark.needs_locales_compilation
def test_name_from_constants(self):
category = Category(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='alerts-updates')
assert category.name == u'Alerts & Updates'
with translation.override('fr'):
assert category.name == u'Alertes et mises à jour'
def test_name_fallback_to_db(self):
category = Category.objects.create(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='this-cat-does-not-exist', db_name=u'ALAAAAAAARM')
assert category.name == u'ALAAAAAAARM'
with translation.override('fr'):
assert category.name == u'ALAAAAAAARM'
class TestPersonaModel(TestCase):
fixtures = ['addons/persona']
def setUp(self):
super(TestPersonaModel, self).setUp()
self.addon = Addon.objects.get(id=15663)
self.persona = self.addon.persona
self.persona.header = 'header.png'
self.persona.footer = 'footer.png'
self.persona.popularity = 12345
self.persona.save()
def _expected_url(self, img_name, modified_suffix):
return '/15663/%s?modified=%s' % (img_name, modified_suffix)
def test_image_urls(self):
self.persona.persona_id = 0
self.persona.checksum = 'fakehash'
self.persona.save()
modified = 'fakehash'
assert self.persona.thumb_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.icon_url.endswith(
self._expected_url('icon.png', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_image_urls_no_checksum(self):
# AMO-uploaded themes have `persona_id=0`.
self.persona.persona_id = 0
self.persona.save()
modified = int(time.mktime(self.persona.addon.modified.timetuple()))
assert self.persona.thumb_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.icon_url.endswith(
self._expected_url('icon.png', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview.png', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_old_image_urls(self):
self.persona.addon.modified = None
modified = 0
assert self.persona.thumb_url.endswith(
self._expected_url('preview.jpg', modified))
assert self.persona.icon_url.endswith(
self._expected_url('preview_small.jpg', modified))
assert self.persona.preview_url.endswith(
self._expected_url('preview_large.jpg', modified))
assert self.persona.header_url.endswith(
self._expected_url('header.png', modified))
assert self.persona.footer_url.endswith(
self._expected_url('footer.png', modified))
def test_update_url(self):
with self.settings(LANGUAGE_CODE='fr', LANGUAGE_URL_MAP={}):
url_ = self.persona.update_url
assert url_.endswith('/fr/themes/update-check/15663')
def test_json_data(self):
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview_large.jpg?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/preview_small.jpg?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_json_data_new_persona(self):
self.persona.persona_id = 0 # Make this a "new" theme.
self.persona.save()
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview.png?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/icon.png?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_image_urls_without_footer(self):
self.persona.footer = ''
self.persona.save()
assert self.persona.footer_url == ''
def test_json_data_without_footer(self):
self.persona.footer = ''
self.persona.save()
data = self.persona.theme_data
assert data['footerURL'] == ''
assert data['footer'] == ''
def test_theme_data_with_null_description(self):
addon = addon_factory(type=amo.ADDON_PERSONA, description=None)
assert addon.persona.theme_data['description'] is None
class TestPreviewModel(TestCase):
fixtures = ['base/previews']
def test_filename(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_path
assert 'png' in preview.image_path
def test_filename_in_url(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_url
assert 'png' in preview.image_url
def check_delete(self, preview, filename):
try:
with storage.open(filename, 'w') as f:
f.write('sample data\n')
assert storage.exists(filename)
preview.delete()
assert not storage.exists(filename)
finally:
if storage.exists(filename):
storage.delete(filename)
def test_delete_image(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.image_path)
def test_delete_thumbnail(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.thumbnail_path)
class TestAddonDependencies(TestCase):
fixtures = ['base/appversion',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple',
'base/addon_4664_twitterbar']
def test_dependencies(self):
ids = [3615, 3723, 4664, 6704]
addon = Addon.objects.get(id=5299)
dependencies = Addon.objects.in_bulk(ids)
for dependency in dependencies.values():
AddonDependency(addon=addon, dependent_addon=dependency).save()
# Make sure all dependencies were saved correctly.
assert sorted([a.id for a in addon.dependencies.all()]) == sorted(ids)
# Add-on 3723 is disabled and won't show up in `all_dependencies`
# property.
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Adding another dependency won't change anything because we're already
# at the maximum (3).
new_dep = amo.tests.addon_factory()
AddonDependency.objects.create(addon=addon, dependent_addon=new_dep)
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Removing the first dependency will allow the one we just created to
# be visible.
dependencies[3615].delete()
assert addon.all_dependencies == [
dependencies[4664], dependencies[6704], new_dep]
def test_unique_dependencies(self):
a = Addon.objects.get(id=5299)
b = Addon.objects.get(id=3615)
AddonDependency.objects.create(addon=a, dependent_addon=b)
assert list(a.dependencies.values_list('id', flat=True)) == [3615]
with self.assertRaises(IntegrityError):
AddonDependency.objects.create(addon=a, dependent_addon=b)
class TestListedAddonTwoVersions(TestCase):
fixtures = ['addons/listed-two-versions']
def test_listed_two_versions(self):
Addon.objects.get(id=2795) # bug 563967
class TestAddonFromUpload(UploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestAddonFromUpload, self).setUp()
u = UserProfile.objects.get(pk=999)
core.set_user(u)
self.platform = amo.PLATFORM_MAC.id
for version in ('3.0', '3.6.*'):
AppVersion.objects.create(application=1, version=version)
self.addCleanup(translation.deactivate)
def manifest(self, basename):
return os.path.join(
settings.ROOT, 'src', 'olympia', 'devhub', 'tests', 'addons',
basename)
def test_denied_guid(self):
DeniedGuid.objects.create(guid='guid@xpi')
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid).
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid_same_author(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
# Claim the add-on.
AddonUser(addon=deleted, user=UserProfile.objects.get(pk=999)).save()
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid), checking no
# validationError is raised this time.
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.reload()
assert addon.guid == 'guid@xpi'
assert deleted.guid == 'guid-reused-by-pk-%s' % addon.pk
def test_old_soft_deleted_addons_and_upload_non_extension(self):
# Upload a couple of addons so we can pretend they were soft deleted.
deleted1 = Addon.from_upload(
self.get_upload('extension.xpi'), [self.platform])
deleted2 = Addon.from_upload(
self.get_upload('alt-rdf.xpi'), [self.platform])
AddonUser(addon=deleted1, user=UserProfile.objects.get(pk=999)).save()
AddonUser(addon=deleted2, user=UserProfile.objects.get(pk=999)).save()
# Soft delete them like they were before, by nullifying their GUIDs.
deleted1.update(status=amo.STATUS_PUBLIC, guid=None)
deleted2.update(status=amo.STATUS_PUBLIC, guid=None)
# Now upload a new add-on which isn't an extension, and has no GUID.
# This fails if we try to reclaim the GUID from deleted add-ons: the
# GUID is None, so it'll try to get the add-on that has a GUID which is
# None, but many are returned. So make sure we're not trying to reclaim
# the GUID.
Addon.from_upload(
self.get_upload('search.xml'), [self.platform])
def test_xpi_attributes(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert addon.name == 'xpi name'
assert addon.guid == 'guid@xpi'
assert addon.type == amo.ADDON_EXTENSION
assert addon.status == amo.STATUS_NULL
assert addon.homepage == 'http://homepage.com'
assert addon.summary == 'xpi description'
assert addon.description is None
assert addon.slug == 'xpi-name'
def test_xpi_version(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
v = addon.versions.get()
assert v.version == '0.1'
assert v.files.get().platform == self.platform
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_xpi_for_multiple_platforms(self):
platforms = [amo.PLATFORM_LINUX.id, amo.PLATFORM_MAC.id]
addon = Addon.from_upload(self.get_upload('extension.xpi'),
platforms)
v = addon.versions.get()
assert sorted([f.platform for f in v.all_files]) == (
sorted(platforms))
def test_search_attributes(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.name == 'search tool'
assert addon.guid is None
assert addon.type == amo.ADDON_SEARCH
assert addon.status == amo.STATUS_NULL
assert addon.homepage is None
assert addon.description is None
assert addon.slug == 'search-tool'
assert addon.summary == 'Search Engine for Firefox'
def test_search_version(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
v = addon.versions.get()
assert v.version == datetime.now().strftime('%Y%m%d')
assert v.files.get().platform == amo.PLATFORM_ALL.id
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_no_homepage(self):
addon = Addon.from_upload(self.get_upload('extension-no-homepage.xpi'),
[self.platform])
assert addon.homepage is None
def test_default_locale(self):
# Make sure default_locale follows the active translation.
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'en-US'
translation.activate('es')
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'es'
def test_validation_completes(self):
upload = self.get_upload('extension.xpi')
assert not upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert not addon.needs_admin_code_review
def test_validation_timeout(self):
upload = self.get_upload('extension.xpi')
validation = json.loads(upload.validation)
timeout_message = {
'id': ['validator', 'unexpected_exception', 'validation_timeout'],
}
validation['messages'] = [timeout_message] + validation['messages']
upload.validation = json.dumps(validation)
assert upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert addon.needs_admin_code_review
def test_webextension_generate_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert addon.guid is not None
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
# Uploading the same addon without a id works.
new_addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert new_addon.guid is not None
assert new_addon.guid != addon.guid
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
def test_webextension_reuse_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid == '@webextension-guid'
# Uploading the same addon with pre-existing id fails
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('webextension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_basic_extension_is_marked_as_e10s_unknown(self):
# extension.xpi does not have multiprocessCompatible set to true, so
# it's marked as not-compatible.
addon = Addon.from_upload(
self.get_upload('extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_UNKNOWN
def test_extension_is_marked_as_e10s_incompatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_incompatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_INCOMPATIBLE
def test_multiprocess_extension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_compatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE
def test_webextension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE_WEBEXTENSION
def test_webextension_resolve_translations(self):
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
assert addon.name == 'Notify link clicks i18n'
assert addon.summary == (
'Shows a notification when the user clicks on links.')
# Make sure we set the correct slug
assert addon.slug == 'notify-link-clicks-i18n'
translation.activate('de')
addon.reload()
assert addon.name == 'Meine Beispielerweiterung'
assert addon.summary == u'Benachrichtigt den Benutzer über Linkklicks'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_corrects_locale(self, parse_addon):
parse_addon.return_value = {
'default_locale': u'sv',
'e10s_compatibility': 2,
'guid': u'notify-link-clicks-i18n@notzilla.org',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `sv` to `sv-SE`
assert addon.default_locale == 'sv-SE'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_unknown_locale(self, parse_addon):
parse_addon.return_value = {
'default_locale': u'xxx',
'e10s_compatibility': 2,
'guid': u'notify-link-clicks-i18n@notzilla.org',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
REDIRECT_URL = 'https://outgoing.prod.mozaws.net/v1/'
class TestFrozenAddons(TestCase):
def test_immediate_freeze(self):
# Adding a FrozenAddon should immediately drop the addon's hotness.
a = Addon.objects.create(type=1, hotness=22)
FrozenAddon.objects.create(addon=a)
assert Addon.objects.get(id=a.id).hotness == 0
class TestRemoveLocale(TestCase):
def test_remove(self):
a = Addon.objects.create(type=1)
a.name = {'en-US': 'woo', 'el': 'yeah'}
a.description = {'en-US': 'woo', 'el': 'yeah', 'he': 'ola'}
a.save()
a.remove_locale('el')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
assert sorted(qs.filter(id=a.name_id)) == ['en-US']
assert sorted(qs.filter(id=a.description_id)) == ['en-US', 'he']
def test_remove_version_locale(self):
addon = Addon.objects.create(type=amo.ADDON_THEME)
version = Version.objects.create(addon=addon)
version.releasenotes = {'fr': 'oui'}
version.save()
addon.remove_locale('fr')
assert not (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
class TestAddonWatchDisabled(TestCase):
def setUp(self):
super(TestAddonWatchDisabled, self).setUp()
self.addon = Addon(type=amo.ADDON_THEME, disabled_by_user=False,
status=amo.STATUS_PUBLIC)
self.addon.save()
@patch('olympia.addons.models.File.objects.filter')
def test_no_disabled_change(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.save()
assert not mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(disabled_by_user=True)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_admin_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_enable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
mock.reset_mock()
self.addon.update(status=amo.STATUS_PUBLIC)
assert mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
class TestTrackAddonStatusChange(TestCase):
def create_addon(self, **kwargs):
return addon_factory(kwargs.pop('status', amo.STATUS_NULL), **kwargs)
def test_increment_new_status(self):
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon = Addon()
addon.save()
mock_.assert_called_with(addon)
def test_increment_updated_status(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(status=amo.STATUS_PUBLIC)
addon.reload()
mock_.call_args[0][0].status == addon.status
def test_ignore_non_status_changes(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(type=amo.ADDON_THEME)
assert not mock_.called, (
'Unexpected call: {}'.format(self.mock_incr.call_args)
)
def test_increment_all_addon_statuses(self):
addon = self.create_addon(status=amo.STATUS_PUBLIC)
with patch('olympia.addons.models.statsd.incr') as mock_incr:
track_addon_status_change(addon)
mock_incr.assert_any_call(
'addon_status_change.all.status_{}'.format(amo.STATUS_PUBLIC)
)
class TestSearchSignals(amo.tests.ESTestCase):
def setUp(self):
super(TestSearchSignals, self).setUp()
self.addCleanup(self.cleanup)
def cleanup(self):
self.empty_index('default')
def test_no_addons(self):
assert Addon.search_public().count() == 0
def test_create(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo')[0].id == addon.id
def test_update(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.name = 'yeah'
addon.save()
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo').count() == 0
assert Addon.search_public().query(name='yeah')[0].id == addon.id
def test_user_disable(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.update(disabled_by_user=True)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_unlisted(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_listed(self):
addon = addon_factory(
name='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED},
status=amo.STATUS_NULL)
self.refresh()
assert Addon.search_public().count() == 0
latest_version = addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
latest_version.update(channel=amo.RELEASE_CHANNEL_LISTED)
addon.update(status=amo.STATUS_PUBLIC)
self.refresh()
assert Addon.search_public().count() == 1
def test_delete(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.delete('woo')
self.refresh()
assert Addon.search_public().count() == 0
class TestLanguagePack(TestCase, amo.tests.AMOPaths):
def setUp(self):
super(TestLanguagePack, self).setUp()
self.addon = amo.tests.addon_factory(type=amo.ADDON_LPAPP,
status=amo.STATUS_PUBLIC)
self.platform_all = amo.PLATFORM_ALL.id
self.platform_mob = amo.PLATFORM_ANDROID.id
self.version = self.addon.current_version
def test_extract(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
assert 'title=Select a language' in self.addon.get_localepicker()
def test_extract_no_file(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
def test_extract_no_files(self):
assert self.addon.get_localepicker() == ''
def test_extract_not_language_pack(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
self.addon.update(type=amo.ADDON_EXTENSION)
assert self.addon.get_localepicker() == ''
def test_extract_not_platform_mobile(self):
File.objects.create(platform=self.platform_all, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
class TestCompatOverride(TestCase):
def setUp(self):
super(TestCompatOverride, self).setUp()
self.app = amo.APP_IDS[1]
one = CompatOverride.objects.create(guid='one')
CompatOverrideRange.objects.create(compat=one, app=self.app.id)
two = CompatOverride.objects.create(guid='two')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='4')
def check(self, obj, **kw):
for key, expected in kw.items():
actual = getattr(obj, key)
assert actual == expected
def test_is_hosted(self):
c = CompatOverride.objects.create(guid='a')
assert not c.is_hosted()
Addon.objects.create(type=1, guid='b')
c = CompatOverride.objects.create(guid='b')
assert c.is_hosted()
def test_override_type(self):
one = CompatOverride.objects.get(guid='one')
# The default is incompatible.
c = CompatOverrideRange.objects.create(compat=one, app=1)
assert c.override_type() == 'incompatible'
c = CompatOverrideRange.objects.create(compat=one, app=1, type=0)
assert c.override_type() == 'compatible'
def test_guid_match(self):
# We hook up the add-on automatically if we see a matching guid.
addon = Addon.objects.create(id=1, guid='oh yeah', type=1)
c = CompatOverride.objects.create(guid=addon.guid)
assert c.addon_id == addon.id
c = CompatOverride.objects.create(guid='something else')
assert c.addon is None
def test_transformer(self):
compats = list(CompatOverride.objects
.transform(CompatOverride.transformer))
ranges = list(CompatOverrideRange.objects.all())
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
for c in compats:
assert c.compat_ranges == (
[r for r in ranges if r.compat_id == c.id])
def test_collapsed_ranges(self):
# Test that we get back the right structures from collapsed_ranges().
c = CompatOverride.objects.get(guid='one')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='0', max='*')
assert len(compat_range.apps) == 1
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_versions(self):
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[1], type='incompatible', min='1', max='2')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='3', max='3.*')
def test_collapsed_ranges_different_types(self):
# If the override ranges have different types they should be separate
# entries.
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1, type=0,
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='compatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='3', max='3.*')
self.check(r[1], type='incompatible', min='0', max='*')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_apps(self):
c = CompatOverride.objects.get(guid='two')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='1', max='2')
assert len(compat_range.apps) == 2
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(compat_range.apps[1], app=amo.FIREFOX, min='3', max='4')
def test_collapsed_ranges_multiple_versions_and_apps(self):
c = CompatOverride.objects.get(guid='two')
CompatOverrideRange.objects.create(min_version='5', max_version='6',
compat=c, app=1)
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='1', max='2')
assert len(r[0].apps) == 2
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[0].apps[1], app=amo.FIREFOX, min='3', max='4')
self.check(r[1], type='incompatible', min='5', max='6')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
class TestIncompatibleVersions(TestCase):
def setUp(self):
super(TestIncompatibleVersions, self).setUp()
self.app = amo.APP_IDS[amo.FIREFOX.id]
self.addon = Addon.objects.create(guid='r@b', type=amo.ADDON_EXTENSION)
def test_signals_min(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='0',
max_version='1.0')
# Test the max version matched.
version1 = Version.objects.create(id=2, addon=self.addon,
version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the lower range.
version2 = Version.objects.create(id=1, addon=self.addon,
version='0.5')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
def test_signals_max(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='1.0',
max_version='*')
# Test the min_version matched.
version1 = Version.objects.create(addon=self.addon, version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the upper range.
version2 = Version.objects.create(addon=self.addon, version='99.0')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
class TestAddonApprovalsCounter(TestCase):
def setUp(self):
self.addon = addon_factory()
def test_increment_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 1
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
approval_counter.update(
last_human_review=self.days_ago(100),
last_content_review=self.days_ago(100))
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 2
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
def test_increment_non_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=0)
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 1
self.assertCloseToNow(approval_counter.last_human_review)
self.assertCloseToNow(approval_counter.last_content_review)
def test_reset_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=42,
last_content_review=self.days_ago(60),
last_human_review=self.days_ago(30))
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 0
# Dates were not touched.
self.assertCloseToNow(
approval_counter.last_human_review, now=self.days_ago(30))
self.assertCloseToNow(
approval_counter.last_content_review, now=self.days_ago(60))
def test_reset_non_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 0
def test_approve_content_non_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.approve_content_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 0
assert approval_counter.last_human_review is None
self.assertCloseToNow(approval_counter.last_content_review)
def test_approve_content_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=42,
last_content_review=self.days_ago(367),
last_human_review=self.days_ago(10))
AddonApprovalsCounter.approve_content_for_addon(self.addon)
approval_counter.reload()
# This was updated to now.
self.assertCloseToNow(approval_counter.last_content_review)
# Those fields were not touched.
assert approval_counter.counter == 42
self.assertCloseToNow(
approval_counter.last_human_review, now=self.days_ago(10))
| true | true |
f7f90a0540bf0fec399a75e22bd5cc243b8cfd32 | 2,495 | py | Python | dj_rest_auth/forms.py | mka142/dj-rest-auth | cf750277edacf1a14ac270bd5e6463a74719ead2 | [
"MIT"
] | null | null | null | dj_rest_auth/forms.py | mka142/dj-rest-auth | cf750277edacf1a14ac270bd5e6463a74719ead2 | [
"MIT"
] | null | null | null | dj_rest_auth/forms.py | mka142/dj-rest-auth | cf750277edacf1a14ac270bd5e6463a74719ead2 | [
"MIT"
] | null | null | null |
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
if 'allauth' in settings.INSTALLED_APPS:
from allauth.account import app_settings
from allauth.account.adapter import get_adapter
from allauth.account.forms import \
ResetPasswordForm as DefaultPasswordResetForm
from allauth.account.forms import default_token_generator
from allauth.account.utils import (filter_users_by_email,
user_pk_to_url_str, user_username)
from allauth.utils import build_absolute_uri
class AllAuthPasswordResetForm(DefaultPasswordResetForm):
def clean_email(self):
"""
Invalid email should not raise error, as this would leak users
for unit test: test_password_reset_with_invalid_email
"""
email = self.cleaned_data["email"]
email = get_adapter().clean_email(email)
self.users = filter_users_by_email(email, is_active=True)
return self.cleaned_data["email"]
def save(self, request, **kwargs):
current_site = get_current_site(request)
email = self.cleaned_data['email']
token_generator = kwargs.get('token_generator', default_token_generator)
for user in self.users:
temp_key = token_generator.make_token(user)
# save it to the password reset model
# password_reset = PasswordReset(user=user, temp_key=temp_key)
# password_reset.save()
# send the password reset email
path = reverse(
'password_reset_confirm',
args=[user_pk_to_url_str(user), temp_key],
)
if getattr(settings, 'REST_AUTH_PW_RESET_USE_SITES_DOMAIN', False) is True:
url = build_absolute_uri(None, path)
else:
url = build_absolute_uri(request, path)
context = {
'current_site': current_site,
'user': user,
'password_reset_url': url,
'request': request,
}
if app_settings.AUTHENTICATION_METHOD != app_settings.AuthenticationMethod.EMAIL:
context['username'] = user_username(user)
get_adapter(request).send_mail(
'account/email/password_reset_key', email, context
)
return self.cleaned_data['email']
| 38.984375 | 94 | 0.626052 |
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
if 'allauth' in settings.INSTALLED_APPS:
from allauth.account import app_settings
from allauth.account.adapter import get_adapter
from allauth.account.forms import \
ResetPasswordForm as DefaultPasswordResetForm
from allauth.account.forms import default_token_generator
from allauth.account.utils import (filter_users_by_email,
user_pk_to_url_str, user_username)
from allauth.utils import build_absolute_uri
class AllAuthPasswordResetForm(DefaultPasswordResetForm):
def clean_email(self):
email = self.cleaned_data["email"]
email = get_adapter().clean_email(email)
self.users = filter_users_by_email(email, is_active=True)
return self.cleaned_data["email"]
def save(self, request, **kwargs):
current_site = get_current_site(request)
email = self.cleaned_data['email']
token_generator = kwargs.get('token_generator', default_token_generator)
for user in self.users:
temp_key = token_generator.make_token(user)
path = reverse(
'password_reset_confirm',
args=[user_pk_to_url_str(user), temp_key],
)
if getattr(settings, 'REST_AUTH_PW_RESET_USE_SITES_DOMAIN', False) is True:
url = build_absolute_uri(None, path)
else:
url = build_absolute_uri(request, path)
context = {
'current_site': current_site,
'user': user,
'password_reset_url': url,
'request': request,
}
if app_settings.AUTHENTICATION_METHOD != app_settings.AuthenticationMethod.EMAIL:
context['username'] = user_username(user)
get_adapter(request).send_mail(
'account/email/password_reset_key', email, context
)
return self.cleaned_data['email']
| true | true |
f7f90a3583fd605043fb9cabd4286008020b0c89 | 3,554 | py | Python | testdata/common/cgroups.py | suifengzhuliu/impala | 611f4c6f3b18cfcddff3b2956cbb87c295a87655 | [
"Apache-2.0"
] | 1,523 | 2015-01-01T03:42:24.000Z | 2022-02-06T22:24:04.000Z | testdata/common/cgroups.py | xwzbupt/impala | 97dda2b27da99367f4d07699aa046b16cda16dd4 | [
"Apache-2.0"
] | 10 | 2015-01-09T06:46:05.000Z | 2022-03-29T21:57:57.000Z | testdata/common/cgroups.py | xwzbupt/impala | 97dda2b27da99367f4d07699aa046b16cda16dd4 | [
"Apache-2.0"
] | 647 | 2015-01-02T04:01:40.000Z | 2022-03-30T15:57:35.000Z | #!/usr/bin/env impala-python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Utility code for creating cgroups for the Impala development environment.
# May be used as a library or as a command-line utility for manual testing.
import os
import sys
import errno
from optparse import OptionParser
# Options
parser = OptionParser()
parser.add_option("-s", "--cluster_size", type="int", dest="cluster_size", default=3,
help="Size of the cluster (number of impalad instances to start).")
def get_cpu_controller_root():
"""Returns the filesystem path of the CPU cgroup controller.
Currently assumes the CPU controller is mounted in the standard location.
TODO: Read /etc/mounts to find where cpu controller is mounted.
"""
CGROUP_CPU_ROOT = "/sys/fs/cgroup/cpu"
if not os.path.isdir(CGROUP_CPU_ROOT):
raise Exception("Cgroup CPU controller is not mounted at %s" % (CGROUP_CPU_ROOT))
return CGROUP_CPU_ROOT
def get_session_cpu_path():
"""Returns the path of the CPU cgroup hierarchy for this session, which is writable
by the impalad processes. The cgroup hierarchy is specified as an absolute path
under the CPU controller root.
"""
PROC_SELF_CGROUP = '/proc/self/cgroup'
cgroup_paths = open(PROC_SELF_CGROUP)
try:
for line in cgroup_paths:
parts = line.strip().split(':')
if len(parts) == 3 and parts[1] == 'cpu':
return parts[2]
finally:
cgroup_paths.close()
raise Exception("Process cgroup CPU hierarchy not found in %s" % (PROC_SELF_CGROUP))
def create_impala_cgroup_path(instance_num):
"""Returns the full filesystem path of a CPU controller cgroup hierarchy which is
writeable by an impalad. The base cgroup path is read from the environment variable
IMPALA_CGROUP_BASE_PATH if it is set, otherwise it is set to a child of the path of
the cgroup for this process.
instance_num is used to provide different (sibiling) cgroups for each impalad
instance. The returned cgroup is created if necessary.
"""
parent_cgroup = os.getenv('IMPALA_CGROUP_BASE_PATH')
if parent_cgroup is None:
# Join root path with the cpu hierarchy path by concatenting the strings. Can't use
# path.join() because the session cpu hierarchy path looks like an absolute FS path.
parent_cgroup = "%s%s" % (get_cpu_controller_root(), get_session_cpu_path())
cgroup_path = os.path.join(parent_cgroup, ("impala-%s" % instance_num))
try:
os.makedirs(cgroup_path)
except OSError, ex:
if ex.errno == errno.EEXIST and os.path.isdir(cgroup_path):
pass
else: raise
return cgroup_path
if __name__ == "__main__":
if options.cluster_size < 0:
print 'Please specify a cluster size >= 0'
sys.exit(1)
for i in range(options.cluster_size):
create_impala_cgroup_path(i)
| 40.386364 | 88 | 0.735791 |
import os
import sys
import errno
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-s", "--cluster_size", type="int", dest="cluster_size", default=3,
help="Size of the cluster (number of impalad instances to start).")
def get_cpu_controller_root():
"""Returns the filesystem path of the CPU cgroup controller.
Currently assumes the CPU controller is mounted in the standard location.
TODO: Read /etc/mounts to find where cpu controller is mounted.
"""
CGROUP_CPU_ROOT = "/sys/fs/cgroup/cpu"
if not os.path.isdir(CGROUP_CPU_ROOT):
raise Exception("Cgroup CPU controller is not mounted at %s" % (CGROUP_CPU_ROOT))
return CGROUP_CPU_ROOT
def get_session_cpu_path():
"""Returns the path of the CPU cgroup hierarchy for this session, which is writable
by the impalad processes. The cgroup hierarchy is specified as an absolute path
under the CPU controller root.
"""
PROC_SELF_CGROUP = '/proc/self/cgroup'
cgroup_paths = open(PROC_SELF_CGROUP)
try:
for line in cgroup_paths:
parts = line.strip().split(':')
if len(parts) == 3 and parts[1] == 'cpu':
return parts[2]
finally:
cgroup_paths.close()
raise Exception("Process cgroup CPU hierarchy not found in %s" % (PROC_SELF_CGROUP))
def create_impala_cgroup_path(instance_num):
"""Returns the full filesystem path of a CPU controller cgroup hierarchy which is
writeable by an impalad. The base cgroup path is read from the environment variable
IMPALA_CGROUP_BASE_PATH if it is set, otherwise it is set to a child of the path of
the cgroup for this process.
instance_num is used to provide different (sibiling) cgroups for each impalad
instance. The returned cgroup is created if necessary.
"""
parent_cgroup = os.getenv('IMPALA_CGROUP_BASE_PATH')
if parent_cgroup is None:
# path.join() because the session cpu hierarchy path looks like an absolute FS path.
parent_cgroup = "%s%s" % (get_cpu_controller_root(), get_session_cpu_path())
cgroup_path = os.path.join(parent_cgroup, ("impala-%s" % instance_num))
try:
os.makedirs(cgroup_path)
except OSError, ex:
if ex.errno == errno.EEXIST and os.path.isdir(cgroup_path):
pass
else: raise
return cgroup_path
if __name__ == "__main__":
if options.cluster_size < 0:
print 'Please specify a cluster size >= 0'
sys.exit(1)
for i in range(options.cluster_size):
create_impala_cgroup_path(i)
| false | true |
f7f90ab8093b615c87e2065480a30e60ee540897 | 5,384 | py | Python | _unittest/test_31_Q3D.py | Samuelopez-ansys/pyaedt | 87df97641aeff02024e9d5756ae82d78bb4bc033 | [
"MIT"
] | null | null | null | _unittest/test_31_Q3D.py | Samuelopez-ansys/pyaedt | 87df97641aeff02024e9d5756ae82d78bb4bc033 | [
"MIT"
] | null | null | null | _unittest/test_31_Q3D.py | Samuelopez-ansys/pyaedt | 87df97641aeff02024e9d5756ae82d78bb4bc033 | [
"MIT"
] | null | null | null | import os
# Setup paths for module imports
from _unittest.conftest import scratch_path, local_path
import gc
# Import required modules
from pyaedt import Q3d, Q2d
from pyaedt.generic.filesystem import Scratch
test_project_name = "coax_Q3D"
bondwire_project_name = "bondwireq3d"
class TestClass:
def setup_class(self):
# set a scratch directory and the environment / test data
with Scratch(scratch_path) as self.local_scratch:
self.aedtapp = Q3d()
example_project = os.path.join(local_path, "example_models", bondwire_project_name + ".aedt")
self.test_project = self.local_scratch.copyfile(example_project)
def teardown_class(self):
self.aedtapp._desktop.ClearMessages("", "", 3)
assert self.aedtapp.close_project(self.aedtapp.project_name, saveproject=False)
self.local_scratch.remove()
gc.collect()
def test_01_save(self):
test_project = os.path.join(self.local_scratch.path, test_project_name + ".aedt")
self.aedtapp.save_project(test_project)
assert os.path.exists(test_project)
def test_02_create_primitive(self):
udp = self.aedtapp.modeler.Position(0, 0, 0)
coax_dimension = 30
o = self.aedtapp.modeler.primitives.create_cylinder(
self.aedtapp.PLANE.XY, udp, 3, coax_dimension, 0, matname="brass", name="MyCylinder"
)
assert isinstance(o.id, int)
def test_03_get_properties(self):
assert self.aedtapp.odefinition_manager
assert self.aedtapp.omaterial_manager
assert self.aedtapp.design_file
def test_06a_create_setup(self):
mysetup = self.aedtapp.create_setup()
mysetup.props["SaveFields"] = True
assert mysetup.update()
sweep = self.aedtapp.create_discrete_sweep(mysetup.name, sweepname="mysweep", freqstart=1, units="GHz")
assert sweep
assert sweep.props["RangeStart"] == "1GHz"
def test_06b_create_setup(self):
mysetup = self.aedtapp.create_setup()
mysetup.props["SaveFields"] = True
assert mysetup.update()
sweep2 = self.aedtapp.create_frequency_sweep(
mysetup.name, sweepname="mysweep2", units="GHz", freqstart=1, freqstop=4
)
assert sweep2
assert sweep2.props["RangeEnd"] == "4GHz"
def test_06c_autoidentify(self):
assert self.aedtapp.auto_identify_nets()
pass
def test_07_create_source_sinks(self):
source = self.aedtapp.assign_source_to_objectface("MyCylinder", axisdir=0, source_name="Source1")
sink = self.aedtapp.assign_sink_to_objectface("MyCylinder", axisdir=3, sink_name="Sink1")
assert source.name == "Source1"
assert sink.name == "Sink1"
def test_07B_create_source_tosheet(self):
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [0, 0, 0], 4, name="Source1")
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [10, 10, 10], 4, name="Sink1")
source = self.aedtapp.assign_source_to_sheet("Source1", sourcename="Source3")
sink = self.aedtapp.assign_sink_to_sheet("Sink1", sinkname="Sink3")
assert source.name == "Source3"
assert sink.name == "Sink3"
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [0, 0, 0], 4, name="Source1")
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [10, 10, 10], 4, name="Sink1")
source = self.aedtapp.assign_source_to_sheet("Source1", netname="GND", objectname="Cylinder1")
sink = self.aedtapp.assign_sink_to_sheet("Sink1", netname="GND", objectname="Cylinder1")
assert source
assert sink
sink.name = "My_new_name"
assert sink.update()
assert sink.name == "My_new_name"
assert len(self.aedtapp.nets) > 0
assert len(self.aedtapp.net_sources("GND")) > 0
assert len(self.aedtapp.net_sinks("GND")) > 0
assert len(self.aedtapp.net_sources("PGND")) == 0
assert len(self.aedtapp.net_sinks("PGND")) == 0
def test_08_create_faceted_bondwire(self):
self.aedtapp.load_project(self.test_project, close_active_proj=True)
test = self.aedtapp.modeler.create_faceted_bondwire_from_true_surface(
"bondwire_example", self.aedtapp.AXIS.Z, min_size=0.2, numberofsegments=8
)
assert test
pass
def test_10_q2d(self):
q2d = Q2d()
assert q2d
assert q2d.dim == "2D"
pass
def test_11_assign_net(self):
box = self.aedtapp.modeler.create_box([30, 30, 30], [10, 10, 10], name="mybox")
net_name = "my_net"
net = self.aedtapp.assign_net(box, net_name)
assert net
assert net.name == net_name
box = self.aedtapp.modeler.create_box([40, 30, 30], [10, 10, 10], name="mybox2")
net = self.aedtapp.assign_net(box, None, "Ground")
assert net
box = self.aedtapp.modeler.create_box([60, 30, 30], [10, 10, 10], name="mybox3")
net = self.aedtapp.assign_net(box, None, "Floating")
assert net
net.name = "new_net_name"
assert net.update()
assert net.name == "new_net_name"
def test_12_mesh_settings(self):
assert self.aedtapp.mesh.initial_mesh_settings
assert self.aedtapp.mesh.initial_mesh_settings.props
| 40.481203 | 111 | 0.66679 | import os
from _unittest.conftest import scratch_path, local_path
import gc
from pyaedt import Q3d, Q2d
from pyaedt.generic.filesystem import Scratch
test_project_name = "coax_Q3D"
bondwire_project_name = "bondwireq3d"
class TestClass:
def setup_class(self):
with Scratch(scratch_path) as self.local_scratch:
self.aedtapp = Q3d()
example_project = os.path.join(local_path, "example_models", bondwire_project_name + ".aedt")
self.test_project = self.local_scratch.copyfile(example_project)
def teardown_class(self):
self.aedtapp._desktop.ClearMessages("", "", 3)
assert self.aedtapp.close_project(self.aedtapp.project_name, saveproject=False)
self.local_scratch.remove()
gc.collect()
def test_01_save(self):
test_project = os.path.join(self.local_scratch.path, test_project_name + ".aedt")
self.aedtapp.save_project(test_project)
assert os.path.exists(test_project)
def test_02_create_primitive(self):
udp = self.aedtapp.modeler.Position(0, 0, 0)
coax_dimension = 30
o = self.aedtapp.modeler.primitives.create_cylinder(
self.aedtapp.PLANE.XY, udp, 3, coax_dimension, 0, matname="brass", name="MyCylinder"
)
assert isinstance(o.id, int)
def test_03_get_properties(self):
assert self.aedtapp.odefinition_manager
assert self.aedtapp.omaterial_manager
assert self.aedtapp.design_file
def test_06a_create_setup(self):
mysetup = self.aedtapp.create_setup()
mysetup.props["SaveFields"] = True
assert mysetup.update()
sweep = self.aedtapp.create_discrete_sweep(mysetup.name, sweepname="mysweep", freqstart=1, units="GHz")
assert sweep
assert sweep.props["RangeStart"] == "1GHz"
def test_06b_create_setup(self):
mysetup = self.aedtapp.create_setup()
mysetup.props["SaveFields"] = True
assert mysetup.update()
sweep2 = self.aedtapp.create_frequency_sweep(
mysetup.name, sweepname="mysweep2", units="GHz", freqstart=1, freqstop=4
)
assert sweep2
assert sweep2.props["RangeEnd"] == "4GHz"
def test_06c_autoidentify(self):
assert self.aedtapp.auto_identify_nets()
pass
def test_07_create_source_sinks(self):
source = self.aedtapp.assign_source_to_objectface("MyCylinder", axisdir=0, source_name="Source1")
sink = self.aedtapp.assign_sink_to_objectface("MyCylinder", axisdir=3, sink_name="Sink1")
assert source.name == "Source1"
assert sink.name == "Sink1"
def test_07B_create_source_tosheet(self):
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [0, 0, 0], 4, name="Source1")
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [10, 10, 10], 4, name="Sink1")
source = self.aedtapp.assign_source_to_sheet("Source1", sourcename="Source3")
sink = self.aedtapp.assign_sink_to_sheet("Sink1", sinkname="Sink3")
assert source.name == "Source3"
assert sink.name == "Sink3"
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [0, 0, 0], 4, name="Source1")
self.aedtapp.modeler.primitives.create_circle(self.aedtapp.PLANE.XY, [10, 10, 10], 4, name="Sink1")
source = self.aedtapp.assign_source_to_sheet("Source1", netname="GND", objectname="Cylinder1")
sink = self.aedtapp.assign_sink_to_sheet("Sink1", netname="GND", objectname="Cylinder1")
assert source
assert sink
sink.name = "My_new_name"
assert sink.update()
assert sink.name == "My_new_name"
assert len(self.aedtapp.nets) > 0
assert len(self.aedtapp.net_sources("GND")) > 0
assert len(self.aedtapp.net_sinks("GND")) > 0
assert len(self.aedtapp.net_sources("PGND")) == 0
assert len(self.aedtapp.net_sinks("PGND")) == 0
def test_08_create_faceted_bondwire(self):
self.aedtapp.load_project(self.test_project, close_active_proj=True)
test = self.aedtapp.modeler.create_faceted_bondwire_from_true_surface(
"bondwire_example", self.aedtapp.AXIS.Z, min_size=0.2, numberofsegments=8
)
assert test
pass
def test_10_q2d(self):
q2d = Q2d()
assert q2d
assert q2d.dim == "2D"
pass
def test_11_assign_net(self):
box = self.aedtapp.modeler.create_box([30, 30, 30], [10, 10, 10], name="mybox")
net_name = "my_net"
net = self.aedtapp.assign_net(box, net_name)
assert net
assert net.name == net_name
box = self.aedtapp.modeler.create_box([40, 30, 30], [10, 10, 10], name="mybox2")
net = self.aedtapp.assign_net(box, None, "Ground")
assert net
box = self.aedtapp.modeler.create_box([60, 30, 30], [10, 10, 10], name="mybox3")
net = self.aedtapp.assign_net(box, None, "Floating")
assert net
net.name = "new_net_name"
assert net.update()
assert net.name == "new_net_name"
def test_12_mesh_settings(self):
assert self.aedtapp.mesh.initial_mesh_settings
assert self.aedtapp.mesh.initial_mesh_settings.props
| true | true |
f7f90c6d5c788b9ab72e73ebedb40c4fab332b5a | 600 | py | Python | pythonhblib/hbrandlib.py | hbachchas/myexp | a5e055b9565d20b824210e4d18e260a569321971 | [
"MIT"
] | null | null | null | pythonhblib/hbrandlib.py | hbachchas/myexp | a5e055b9565d20b824210e4d18e260a569321971 | [
"MIT"
] | null | null | null | pythonhblib/hbrandlib.py | hbachchas/myexp | a5e055b9565d20b824210e4d18e260a569321971 | [
"MIT"
] | null | null | null | import math
import numpy.random as nprand
def get_rand_list(alist, th=.5):
"""
Breaks down the input list into two lists by randomly moving elements
Input: threshold is used
example: [list1, list2] = get_rand_list([1,2,3,4,5,6,7,8,9], 0.2);\\
list1 = 0.8, list2 = 0.2
"""
nlist = []
counter = math.ceil( len(alist)*th )
while(counter):
num = nprand.randint(0,len(alist))
assert num < len(alist)
nlist.append( alist[num] )
alist.pop(num)
# del alist[num]
counter = counter - 1
return [alist, nlist]
| 23.076923 | 73 | 0.58 | import math
import numpy.random as nprand
def get_rand_list(alist, th=.5):
nlist = []
counter = math.ceil( len(alist)*th )
while(counter):
num = nprand.randint(0,len(alist))
assert num < len(alist)
nlist.append( alist[num] )
alist.pop(num)
counter = counter - 1
return [alist, nlist]
| true | true |
f7f90c9689aab51534d1ab3ed46ca29f8349b052 | 1,727 | py | Python | pype/hosts/resolve/plugins/publish/collect_workfile.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | pype/hosts/resolve/plugins/publish/collect_workfile.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | pype/hosts/resolve/plugins/publish/collect_workfile.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | import pyblish.api
from pype.hosts import resolve
from avalon import api as avalon
from pprint import pformat
# dev
from importlib import reload
from pype.hosts.resolve.otio import davinci_export
reload(davinci_export)
class CollectWorkfile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
label = "Collect Workfile"
order = pyblish.api.CollectorOrder - 0.6
def process(self, context):
asset = avalon.Session["AVALON_ASSET"]
subset = "workfile"
project = resolve.get_current_project()
fps = project.GetSetting("timelineFrameRate")
active_timeline = resolve.get_current_timeline()
video_tracks = resolve.get_video_track_names()
# adding otio timeline to context
otio_timeline = davinci_export.create_otio_timeline(project)
instance_data = {
"name": "{}_{}".format(asset, subset),
"asset": asset,
"subset": "{}{}".format(asset, subset.capitalize()),
"item": project,
"family": "workfile"
}
# create instance with workfile
instance = context.create_instance(**instance_data)
# update context with main project attributes
context_data = {
"activeProject": project,
"otioTimeline": otio_timeline,
"videoTracks": video_tracks,
"currentFile": project.GetName(),
"fps": fps,
}
context.data.update(context_data)
self.log.info("Creating instance: {}".format(instance))
self.log.debug("__ instance.data: {}".format(pformat(instance.data)))
self.log.debug("__ context_data: {}".format(pformat(context_data)))
| 31.4 | 77 | 0.640996 | import pyblish.api
from pype.hosts import resolve
from avalon import api as avalon
from pprint import pformat
from importlib import reload
from pype.hosts.resolve.otio import davinci_export
reload(davinci_export)
class CollectWorkfile(pyblish.api.ContextPlugin):
label = "Collect Workfile"
order = pyblish.api.CollectorOrder - 0.6
def process(self, context):
asset = avalon.Session["AVALON_ASSET"]
subset = "workfile"
project = resolve.get_current_project()
fps = project.GetSetting("timelineFrameRate")
active_timeline = resolve.get_current_timeline()
video_tracks = resolve.get_video_track_names()
otio_timeline = davinci_export.create_otio_timeline(project)
instance_data = {
"name": "{}_{}".format(asset, subset),
"asset": asset,
"subset": "{}{}".format(asset, subset.capitalize()),
"item": project,
"family": "workfile"
}
instance = context.create_instance(**instance_data)
context_data = {
"activeProject": project,
"otioTimeline": otio_timeline,
"videoTracks": video_tracks,
"currentFile": project.GetName(),
"fps": fps,
}
context.data.update(context_data)
self.log.info("Creating instance: {}".format(instance))
self.log.debug("__ instance.data: {}".format(pformat(instance.data)))
self.log.debug("__ context_data: {}".format(pformat(context_data)))
| true | true |
f7f90ccf566c15fd7b12d22dc94c7d9c8f2c00ff | 10,426 | py | Python | docs/conf.py | huanghua1994/FMM3D | 0f635079b0c0468c0544648780b83902d66fb13a | [
"Apache-2.0"
] | null | null | null | docs/conf.py | huanghua1994/FMM3D | 0f635079b0c0468c0544648780b83902d66fb13a | [
"Apache-2.0"
] | null | null | null | docs/conf.py | huanghua1994/FMM3D | 0f635079b0c0468c0544648780b83902d66fb13a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# fmm3d documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 1 16:19:13 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx.ext.autodoc
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('sphinxext'))
sys.path.insert(0,os.path.abspath('../../texext'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.6' # dylan, but I only have 1.3.6
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
# 'sphinx.ext.autosectionlabel', # needs v 1.4; can :ref: other files w/o this; removed 7/29/18
'texext',
'sphinxcontrib.bibtex',
]
bibtex_bibfiles=['references.bib']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'fmm3d'
copyright = u'2018-2019 The Simons Foundation, Inc. - All Rights Reserved'
author = u"Travis Askham, Zydrunas Gimbutas, Leslie Greengard, Libin Lu, Jeremy Magland, Dhairya Malhotra, Mike O'Neil, Manas Rachh, Vladimir Rokhlin, and Felipe Vico"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
html_theme = 'classic'
#html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {'collapsiblesidebar': 'true', 'sidebarwidth': '270px'}
#html_theme_options = {"codebgcolor":"rgb(240,240,240)"}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'fmm3ddoc'
# To fix location of equation numbering. Barnett tried 6/19/18
# see https://samnicholls.net/2016/06/15/how-to-sphinx-readthedocs/
def setup(app):
app.add_css_file('theme_overrides.css')
app.add_css_file('custom.css')
# it doesn't fail if this file not found in _static :(
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fmm3d.tex', u'fmm3d Documentation',
u"Travis Askham \\and Zydrunas Gimbutas \\and Leslie Greengard \\and Libin Lu \\and Jeremy Magland \\and Dhairya Malhotra \\and Mike O'Neil \\and Manas Rachh \\and Vladimir Rokhlin \\and Felipe Vico", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fmm3d', u'fmm3d Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'fmm3d', u'fmm3d Documentation',
author, 'fmm3d', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 33.7411 | 216 | 0.716766 |
import sys
import os
import sphinx.ext.autodoc
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('sphinxext'))
sys.path.insert(0,os.path.abspath('../../texext'))
nsions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
['references.bib']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'fmm3d'
copyright = u'2018-2019 The Simons Foundation, Inc. - All Rights Reserved'
author = u"Travis Askham, Zydrunas Gimbutas, Leslie Greengard, Libin Lu, Jeremy Magland, Dhairya Malhotra, Mike O'Neil, Manas Rachh, Vladimir Rokhlin, and Felipe Vico"
# The version info for the project you're documenting, acts as replacement for
version = u'1.0.0'
release = u'1.0.0'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = True
html_theme = 'classic'
html_theme_options = {'collapsiblesidebar': 'true', 'sidebarwidth': '270px'}
html_static_path = ['_static']
htmlhelp_basename = 'fmm3ddoc'
def setup(app):
app.add_css_file('theme_overrides.css')
app.add_css_file('custom.css')
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fmm3d.tex', u'fmm3d Documentation',
u"Travis Askham \\and Zydrunas Gimbutas \\and Leslie Greengard \\and Libin Lu \\and Jeremy Magland \\and Dhairya Malhotra \\and Mike O'Neil \\and Manas Rachh \\and Vladimir Rokhlin \\and Felipe Vico", 'manual'),
]
man_pages = [
(master_doc, 'fmm3d', u'fmm3d Documentation',
[author], 1)
]
texinfo_documents = [
(master_doc, 'fmm3d', u'fmm3d Documentation',
author, 'fmm3d', 'One line description of project.',
'Miscellaneous'),
]
#texinfo_no_detailmenu = False
| true | true |
f7f90d3a53db73f2706a7a910f1d6609cc798e4f | 1,192 | py | Python | Assignments 2/question3.py | rohitmalik10/MOJOwork | 8767a39b4397032b98d878bbc9aea7642a83f75c | [
"Apache-2.0"
] | null | null | null | Assignments 2/question3.py | rohitmalik10/MOJOwork | 8767a39b4397032b98d878bbc9aea7642a83f75c | [
"Apache-2.0"
] | null | null | null | Assignments 2/question3.py | rohitmalik10/MOJOwork | 8767a39b4397032b98d878bbc9aea7642a83f75c | [
"Apache-2.0"
] | null | null | null | import requests
import json
url = "https://awm16002.srv.wifi.arista.com/new/webservice/login/modScanWifi/86400"
payload = "{\n\"type\":\"apikeycredentials\",\n\"keyId\":\"KEY-ATN565039-674\",\n\"keyValue\":\"16d7b32456a7700568d359fa452818bd\"\n}"
headers1 = {
'Content-Type': "application/json",
'cache-control': "no-cache",
'Postman-Token': "305e8595-dcd0-4d7b-82c7-0b27f86ba1fe"
}
response = requests.request("POST", url, data=payload, headers=headers1)
print(response)
url1 = "https://awm16002.srv.wifi.arista.com/new/webservice/v2/devices/clients"
headers1 = {
'User-Agent': "PostmanRuntime/7.15.2",
'Accept': "*/*",
'Cache-Control': "no-cache",
'Postman-Token': "0bdf46b9-a7fc-40d0-a7be-dafe1405f330,11166b29-8af0-43ff-a155-eb71a74dafb7",
'Host': "awm16002.srv.wifi.arista.com",
'Cookie': "JSESSIONID=22E95AE729DED106F391529AFE1855EA",
'Accept-Encoding': "gzip, deflate",
'Connection': "keep-alive",
'cache-control': "no-cache"
}
response1 = requests.request("GET", url1, headers=headers1)
print(response1)
data=response1.json()
import pandas as pd
df=pd.DataFrame(data)
print(df)
df.to_csv("Visualize.csv", index = False) | 31.368421 | 134 | 0.697987 | import requests
import json
url = "https://awm16002.srv.wifi.arista.com/new/webservice/login/modScanWifi/86400"
payload = "{\n\"type\":\"apikeycredentials\",\n\"keyId\":\"KEY-ATN565039-674\",\n\"keyValue\":\"16d7b32456a7700568d359fa452818bd\"\n}"
headers1 = {
'Content-Type': "application/json",
'cache-control': "no-cache",
'Postman-Token': "305e8595-dcd0-4d7b-82c7-0b27f86ba1fe"
}
response = requests.request("POST", url, data=payload, headers=headers1)
print(response)
url1 = "https://awm16002.srv.wifi.arista.com/new/webservice/v2/devices/clients"
headers1 = {
'User-Agent': "PostmanRuntime/7.15.2",
'Accept': "*/*",
'Cache-Control': "no-cache",
'Postman-Token': "0bdf46b9-a7fc-40d0-a7be-dafe1405f330,11166b29-8af0-43ff-a155-eb71a74dafb7",
'Host': "awm16002.srv.wifi.arista.com",
'Cookie': "JSESSIONID=22E95AE729DED106F391529AFE1855EA",
'Accept-Encoding': "gzip, deflate",
'Connection': "keep-alive",
'cache-control': "no-cache"
}
response1 = requests.request("GET", url1, headers=headers1)
print(response1)
data=response1.json()
import pandas as pd
df=pd.DataFrame(data)
print(df)
df.to_csv("Visualize.csv", index = False) | true | true |
f7f90d3c3725723f59c631163e5506ce02225ba9 | 146 | py | Python | item_engine/lin_dag_network/constants.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | item_engine/lin_dag_network/constants.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | item_engine/lin_dag_network/constants.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | from typing import TypeVar
from ..elements import Element
__all__ = ["E", "F"]
E = TypeVar("E", bound=Element)
F = TypeVar("F", bound=Element)
| 16.222222 | 31 | 0.678082 | from typing import TypeVar
from ..elements import Element
__all__ = ["E", "F"]
E = TypeVar("E", bound=Element)
F = TypeVar("F", bound=Element)
| true | true |
f7f90de8f73ea29de48c5fb2f56ea65818c0886a | 2,763 | py | Python | aws_parsecf/parser.py | dklawson/aws-parsecf | 0ef89231821bd5848acff0e0646f45c414e0b340 | [
"MIT"
] | 10 | 2018-04-08T23:25:19.000Z | 2021-04-30T01:50:07.000Z | aws_parsecf/parser.py | dklawson/aws-parsecf | 0ef89231821bd5848acff0e0646f45c414e0b340 | [
"MIT"
] | 3 | 2018-06-04T15:17:56.000Z | 2020-03-17T22:51:07.000Z | aws_parsecf/parser.py | dklawson/aws-parsecf | 0ef89231821bd5848acff0e0646f45c414e0b340 | [
"MIT"
] | 7 | 2017-07-12T06:01:21.000Z | 2021-06-17T12:36:26.000Z | from aws_parsecf.common import DELETE
from aws_parsecf.conditions import Conditions
from aws_parsecf.functions import Functions
class Parser:
def __init__(self, root, default_region, parameters={}):
self.functions = Functions(self, root, default_region, parameters)
self.conditions = Conditions(self, root, default_region)
def explode(self, current):
# object
if isinstance(current, dict):
if '_exploded' in current:
return
current['_exploded'] = True
# explode children first
for key, value in current.items():
self.exploded(current, key)
condition_name = current.get('Condition')
if condition_name and isinstance(condition_name, str):
# condition
if not self.conditions.evaluate(condition_name):
return DELETE
elif len(current) == 2: # including '_exploded'
# possibly a condition
key, value = next((key, value) for key, value in current.items() if key != '_exploded')
try:
return self.functions.evaluate(key, value)
except KeyError as e:
if e.args != (key,):
raise
# not an intrinsic function
if key != 'Condition': # 'Condition' means a name of a condtion, would make a mess
try:
return self.conditions.evaluate({key: value})
except KeyError as e:
if e.args != (key,):
raise
# not a condition
# array
elif isinstance(current, list):
for index, value in enumerate(current):
self.exploded(current, index)
def cleanup(self, current):
if isinstance(current, dict):
if '_exploded' in current:
del current['_exploded']
for key, value in list(current.items()):
if value is DELETE:
del current[key]
else:
self.cleanup(value)
elif isinstance(current, list):
deleted = 0
for index, value in enumerate(list(current)):
if value is DELETE:
del current[index - deleted]
deleted += 1
else:
self.cleanup(value)
def exploded(self, collection, key):
if collection[key] is None:
return None
exploded = self.explode(collection[key])
if exploded is not None:
collection[key] = exploded
return collection[key]
| 37.849315 | 103 | 0.522258 | from aws_parsecf.common import DELETE
from aws_parsecf.conditions import Conditions
from aws_parsecf.functions import Functions
class Parser:
def __init__(self, root, default_region, parameters={}):
self.functions = Functions(self, root, default_region, parameters)
self.conditions = Conditions(self, root, default_region)
def explode(self, current):
if isinstance(current, dict):
if '_exploded' in current:
return
current['_exploded'] = True
for key, value in current.items():
self.exploded(current, key)
condition_name = current.get('Condition')
if condition_name and isinstance(condition_name, str):
if not self.conditions.evaluate(condition_name):
return DELETE
elif len(current) == 2:
key, value = next((key, value) for key, value in current.items() if key != '_exploded')
try:
return self.functions.evaluate(key, value)
except KeyError as e:
if e.args != (key,):
raise
if key != 'Condition':
try:
return self.conditions.evaluate({key: value})
except KeyError as e:
if e.args != (key,):
raise
elif isinstance(current, list):
for index, value in enumerate(current):
self.exploded(current, index)
def cleanup(self, current):
if isinstance(current, dict):
if '_exploded' in current:
del current['_exploded']
for key, value in list(current.items()):
if value is DELETE:
del current[key]
else:
self.cleanup(value)
elif isinstance(current, list):
deleted = 0
for index, value in enumerate(list(current)):
if value is DELETE:
del current[index - deleted]
deleted += 1
else:
self.cleanup(value)
def exploded(self, collection, key):
if collection[key] is None:
return None
exploded = self.explode(collection[key])
if exploded is not None:
collection[key] = exploded
return collection[key]
| true | true |
f7f90df69d452181e45fc33f58bfab087715e8da | 5,324 | py | Python | voila/execute.py | catethos/voila | 621c4375533ddb1afd70256765474a9f9a06a0c0 | [
"BSD-3-Clause"
] | 1 | 2019-06-27T11:26:14.000Z | 2019-06-27T11:26:14.000Z | voila/execute.py | silky/voila | 74a186f745fe609271e79de0d8a0153b6f4d3438 | [
"BSD-3-Clause"
] | null | null | null | voila/execute.py | silky/voila | 74a186f745fe609271e79de0d8a0153b6f4d3438 | [
"BSD-3-Clause"
] | null | null | null | from nbconvert.preprocessors.execute import ExecutePreprocessor
from ipykernel.jsonutil import json_clean
class OutputWidget:
"""This class mimics a front end output widget"""
def __init__(self, comm_id, state, kernel_client, executor):
self.comm_id = comm_id
self.state = state
self.kernel_client = kernel_client
self.executor = executor
self.topic = ('comm-%s' % self.comm_id).encode('ascii')
self.outputs = self.state['outputs']
self.clear_before_next_output = False
def clear_output(self, outs, msg, cell_index):
self.parent_header = msg['parent_header']
content = msg['content']
if content.get('wait'):
self.clear_before_next_output = True
else:
self.outputs = []
# sync back the state to the kernel
self.sync_state()
if hasattr(self.executor, 'widget_state'):
# sync the state to the nbconvert state as well, since that is used for testing
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def sync_state(self):
state = {'outputs': self.outputs}
msg = {'method': 'update', 'state': state, 'buffer_paths': []}
self.send(msg)
def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
"""Helper for sending a comm message on IOPub"""
data = {} if data is None else data
metadata = {} if metadata is None else metadata
content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
msg = self.kernel_client.session.msg(msg_type, content=content, parent=self.parent_header, metadata=metadata)
self.kernel_client.shell_channel.send(msg)
def send(self, data=None, metadata=None, buffers=None):
self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers)
def output(self, outs, msg, display_id, cell_index):
if self.clear_before_next_output:
self.outputs = []
self.clear_before_next_output = False
self.parent_header = msg['parent_header']
content = msg['content']
if 'data' not in content:
output = {"output_type": "stream", "text": content['text'], "name": content['name']}
else:
data = content['data']
output = {"output_type": "display_data", "data": data, "metadata": {}}
self.outputs.append(output)
self.sync_state()
if hasattr(self.executor, 'widget_state'):
# sync the state to the nbconvert state as well, since that is used for testing
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def set_state(self, state):
if 'msg_id' in state:
msg_id = state.get('msg_id')
if msg_id:
self.executor.output_hook[msg_id] = self
self.msg_id = msg_id
else:
del self.executor.output_hook[self.msg_id]
self.msg_id = msg_id
class ExecutePreprocessorWithOutputWidget(ExecutePreprocessor):
"""Execute, but respect the output widget behaviour"""
def preprocess(self, nb, resources, km=None):
self.output_hook = {}
self.output_objects = {}
return super(ExecutePreprocessorWithOutputWidget, self).preprocess(nb, resources=resources, km=km)
def output(self, outs, msg, display_id, cell_index):
parent_msg_id = msg['parent_header'].get('msg_id')
if parent_msg_id in self.output_hook:
self.output_hook[parent_msg_id].output(outs, msg, display_id, cell_index)
return
super(ExecutePreprocessorWithOutputWidget, self).output(outs, msg, display_id, cell_index)
def handle_comm_msg(self, outs, msg, cell_index):
super(ExecutePreprocessorWithOutputWidget, self).handle_comm_msg(outs, msg, cell_index)
self.log.debug('comm msg: %r', msg)
if msg['msg_type'] == 'comm_open':
content = msg['content']
data = content['data']
state = data['state']
comm_id = msg['content']['comm_id']
if state['_model_module'] == '@jupyter-widgets/output' and state['_model_name'] == 'OutputModel':
self.output_objects[comm_id] = OutputWidget(comm_id, state, self.kc, self)
elif msg['msg_type'] == 'comm_msg':
content = msg['content']
data = content['data']
if 'state' in data:
state = data['state']
comm_id = msg['content']['comm_id']
if comm_id in self.output_objects:
self.output_objects[comm_id].set_state(state)
def clear_output(self, outs, msg, cell_index):
parent_msg_id = msg['parent_header'].get('msg_id')
if parent_msg_id in self.output_hook:
self.output_hook[parent_msg_id].clear_output(outs, msg, cell_index)
return
super(ExecutePreprocessorWithOutputWidget, self).clear_output(outs, msg, cell_index)
def executenb(nb, cwd=None, km=None, **kwargs):
resources = {}
if cwd is not None:
resources['metadata'] = {'path': cwd} # pragma: no cover
ep = ExecutePreprocessorWithOutputWidget(**kwargs)
return ep.preprocess(nb, resources, km=km)[0]
| 44.366667 | 117 | 0.627911 | from nbconvert.preprocessors.execute import ExecutePreprocessor
from ipykernel.jsonutil import json_clean
class OutputWidget:
def __init__(self, comm_id, state, kernel_client, executor):
self.comm_id = comm_id
self.state = state
self.kernel_client = kernel_client
self.executor = executor
self.topic = ('comm-%s' % self.comm_id).encode('ascii')
self.outputs = self.state['outputs']
self.clear_before_next_output = False
def clear_output(self, outs, msg, cell_index):
self.parent_header = msg['parent_header']
content = msg['content']
if content.get('wait'):
self.clear_before_next_output = True
else:
self.outputs = []
self.sync_state()
if hasattr(self.executor, 'widget_state'):
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def sync_state(self):
state = {'outputs': self.outputs}
msg = {'method': 'update', 'state': state, 'buffer_paths': []}
self.send(msg)
def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):
data = {} if data is None else data
metadata = {} if metadata is None else metadata
content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
msg = self.kernel_client.session.msg(msg_type, content=content, parent=self.parent_header, metadata=metadata)
self.kernel_client.shell_channel.send(msg)
def send(self, data=None, metadata=None, buffers=None):
self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers)
def output(self, outs, msg, display_id, cell_index):
if self.clear_before_next_output:
self.outputs = []
self.clear_before_next_output = False
self.parent_header = msg['parent_header']
content = msg['content']
if 'data' not in content:
output = {"output_type": "stream", "text": content['text'], "name": content['name']}
else:
data = content['data']
output = {"output_type": "display_data", "data": data, "metadata": {}}
self.outputs.append(output)
self.sync_state()
if hasattr(self.executor, 'widget_state'):
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def set_state(self, state):
if 'msg_id' in state:
msg_id = state.get('msg_id')
if msg_id:
self.executor.output_hook[msg_id] = self
self.msg_id = msg_id
else:
del self.executor.output_hook[self.msg_id]
self.msg_id = msg_id
class ExecutePreprocessorWithOutputWidget(ExecutePreprocessor):
def preprocess(self, nb, resources, km=None):
self.output_hook = {}
self.output_objects = {}
return super(ExecutePreprocessorWithOutputWidget, self).preprocess(nb, resources=resources, km=km)
def output(self, outs, msg, display_id, cell_index):
parent_msg_id = msg['parent_header'].get('msg_id')
if parent_msg_id in self.output_hook:
self.output_hook[parent_msg_id].output(outs, msg, display_id, cell_index)
return
super(ExecutePreprocessorWithOutputWidget, self).output(outs, msg, display_id, cell_index)
def handle_comm_msg(self, outs, msg, cell_index):
super(ExecutePreprocessorWithOutputWidget, self).handle_comm_msg(outs, msg, cell_index)
self.log.debug('comm msg: %r', msg)
if msg['msg_type'] == 'comm_open':
content = msg['content']
data = content['data']
state = data['state']
comm_id = msg['content']['comm_id']
if state['_model_module'] == '@jupyter-widgets/output' and state['_model_name'] == 'OutputModel':
self.output_objects[comm_id] = OutputWidget(comm_id, state, self.kc, self)
elif msg['msg_type'] == 'comm_msg':
content = msg['content']
data = content['data']
if 'state' in data:
state = data['state']
comm_id = msg['content']['comm_id']
if comm_id in self.output_objects:
self.output_objects[comm_id].set_state(state)
def clear_output(self, outs, msg, cell_index):
parent_msg_id = msg['parent_header'].get('msg_id')
if parent_msg_id in self.output_hook:
self.output_hook[parent_msg_id].clear_output(outs, msg, cell_index)
return
super(ExecutePreprocessorWithOutputWidget, self).clear_output(outs, msg, cell_index)
def executenb(nb, cwd=None, km=None, **kwargs):
resources = {}
if cwd is not None:
resources['metadata'] = {'path': cwd}
ep = ExecutePreprocessorWithOutputWidget(**kwargs)
return ep.preprocess(nb, resources, km=km)[0]
| true | true |
f7f90e13b9ee05d0c14d79f6245a3073660c6bd4 | 7,337 | py | Python | mmpose/models/losses/mse_loss.py | fourierer/mmpose | 1b481a4e46cea7cf47e01669d61ac4670f361e07 | [
"Apache-2.0"
] | null | null | null | mmpose/models/losses/mse_loss.py | fourierer/mmpose | 1b481a4e46cea7cf47e01669d61ac4670f361e07 | [
"Apache-2.0"
] | null | null | null | mmpose/models/losses/mse_loss.py | fourierer/mmpose | 1b481a4e46cea7cf47e01669d61ac4670f361e07 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
from ..registry import LOSSES
@LOSSES.register_module()
class JointsMSELoss(nn.Module):
"""MSE loss for heatmaps.
Args:
use_target_weight (bool): Option to use weighted MSE loss.
Different joint types may have different target weights.
"""
def __init__(self, use_target_weight=False):
super().__init__()
self.criterion = nn.MSELoss()
self.use_target_weight = use_target_weight
def forward(self, output, target, target_weight):
"""Forward function."""
batch_size = output.size(0)
num_joints = output.size(1)
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
loss = 0.
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
heatmap_gt = heatmaps_gt[idx].squeeze(1)
if self.use_target_weight:
loss += self.criterion(
heatmap_pred.mul(target_weight[:, idx]),
heatmap_gt.mul(target_weight[:, idx]))
else:
loss += self.criterion(heatmap_pred, heatmap_gt)
return loss / num_joints
@LOSSES.register_module()
class CombinedTargetMSELoss(nn.Module):
"""MSE loss for combined target.
CombinedTarget: The combination of classification target
(response map) and regression target (offset map).
Paper ref: Huang et al. The Devil is in the Details: Delving into
Unbiased Data Processing for Human Pose Estimation (CVPR 2020).
Args:
use_target_weight (bool): Option to use weighted MSE loss.
Different joint types may have different target weights.
"""
def __init__(self, use_target_weight):
super().__init__()
self.criterion = nn.MSELoss(reduction='mean')
self.use_target_weight = use_target_weight
def forward(self, output, target, target_weight):
batch_size = output.size(0)
num_channels = output.size(1)
heatmaps_pred = output.reshape(
(batch_size, num_channels, -1)).split(1, 1)
heatmaps_gt = target.reshape(
(batch_size, num_channels, -1)).split(1, 1)
loss = 0.
num_joints = num_channels // 3
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx * 3].squeeze()
heatmap_gt = heatmaps_gt[idx * 3].squeeze()
offset_x_pred = heatmaps_pred[idx * 3 + 1].squeeze()
offset_x_gt = heatmaps_gt[idx * 3 + 1].squeeze()
offset_y_pred = heatmaps_pred[idx * 3 + 2].squeeze()
offset_y_gt = heatmaps_gt[idx * 3 + 2].squeeze()
if self.use_target_weight:
heatmap_pred = heatmap_pred.mul(target_weight[:, idx])
heatmap_gt = heatmap_gt.mul(target_weight[:, idx])
# classification loss
loss += 0.5 * self.criterion(heatmap_pred, heatmap_gt)
# regression loss
loss += 0.5 * self.criterion(heatmap_gt * offset_x_pred,
heatmap_gt * offset_x_gt)
loss += 0.5 * self.criterion(heatmap_gt * offset_y_pred,
heatmap_gt * offset_y_gt)
return loss / num_joints
@LOSSES.register_module()
class JointsOHKMMSELoss(nn.Module):
"""MSE loss with online hard keypoint mining.
Args:
use_target_weight (bool): Option to use weighted MSE loss.
Different joint types may have different target weights.
topk (int): Only top k joint losses are kept.
"""
def __init__(self, use_target_weight=False, topk=8):
super().__init__()
assert topk > 0
self.criterion = nn.MSELoss(reduction='none')
self.use_target_weight = use_target_weight
self.topk = topk
def _ohkm(self, loss):
"""Online hard keypoint mining."""
ohkm_loss = 0.
N = len(loss)
for i in range(N):
sub_loss = loss[i]
_, topk_idx = torch.topk(
sub_loss, k=self.topk, dim=0, sorted=False)
tmp_loss = torch.gather(sub_loss, 0, topk_idx)
ohkm_loss += torch.sum(tmp_loss) / self.topk
ohkm_loss /= N
return ohkm_loss
def forward(self, output, target, target_weight):
"""Forward function."""
batch_size = output.size(0)
num_joints = output.size(1)
if num_joints < self.topk:
raise ValueError(f'topk ({self.topk}) should not '
f'larger than num_joints ({num_joints}).')
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
losses = []
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
heatmap_gt = heatmaps_gt[idx].squeeze(1)
if self.use_target_weight:
losses.append(
self.criterion(
heatmap_pred.mul(target_weight[:, idx]),
heatmap_gt.mul(target_weight[:, idx])))
else:
losses.append(self.criterion(heatmap_pred, heatmap_gt))
losses = [loss.mean(dim=1).unsqueeze(dim=1) for loss in losses]
losses = torch.cat(losses, dim=1)
return self._ohkm(losses)
if __name__=='__main__':
'''
从下面代码的调试结果来看,OHKM实际上是取top-k个关键点的平均损失进行反传
'''
def ohkm(loss):
ohkm_loss = 0.
N = len(loss) # 64
for i in range(N):
sub_loss = loss[i]
_, topk_idx = torch.topk(
sub_loss, k=8, dim=0, sorted=False)
# print(topk_idx) # 损失值最大的k个关键点的索引,如tensor([ 8, 16, 15, 4, 3, 5, 2, 14])
tmp_loss = torch.gather(sub_loss, 0, topk_idx)
# print(tmp_loss.size()) # torch.Size([8])
ohkm_loss += torch.sum(tmp_loss) / 8
ohkm_loss /= N
return ohkm_loss
criterion = nn.MSELoss(reduction='none')
output = torch.randn(64,17,48,64)
target = torch.randn(64,17,48,64)
batch_size = output.size(0)
num_joints = output.size(1)
if num_joints < 8:
raise ValueError(f'topk ({self.topk}) should not '
f'larger than num_joints ({num_joints}).')
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
# print(len(heatmaps_pred)) # 17
# print(heatmaps_pred[0].size()) # torch.Size([64,1,3072])
losses = []
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
# print(heatmap_pred.size()) # torch.Size([64,3072])
heatmap_gt = heatmaps_gt[idx].squeeze(1)
losses.append(criterion(heatmap_pred, heatmap_gt))
# print(len(losses)) # 17
# print(losses[0].size()) # torch.Size([64,3072])
losses = [loss.mean(dim=1).unsqueeze(dim=1) for loss in losses]
losses = torch.cat(losses, dim=1)
# print(len(losses)) # 64
# print(losses[0].size()) #torch.Size([17])
final_loss = ohkm(losses)
| 37.433673 | 88 | 0.589751 | import torch
import torch.nn as nn
from ..registry import LOSSES
@LOSSES.register_module()
class JointsMSELoss(nn.Module):
def __init__(self, use_target_weight=False):
super().__init__()
self.criterion = nn.MSELoss()
self.use_target_weight = use_target_weight
def forward(self, output, target, target_weight):
batch_size = output.size(0)
num_joints = output.size(1)
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
loss = 0.
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
heatmap_gt = heatmaps_gt[idx].squeeze(1)
if self.use_target_weight:
loss += self.criterion(
heatmap_pred.mul(target_weight[:, idx]),
heatmap_gt.mul(target_weight[:, idx]))
else:
loss += self.criterion(heatmap_pred, heatmap_gt)
return loss / num_joints
@LOSSES.register_module()
class CombinedTargetMSELoss(nn.Module):
def __init__(self, use_target_weight):
super().__init__()
self.criterion = nn.MSELoss(reduction='mean')
self.use_target_weight = use_target_weight
def forward(self, output, target, target_weight):
batch_size = output.size(0)
num_channels = output.size(1)
heatmaps_pred = output.reshape(
(batch_size, num_channels, -1)).split(1, 1)
heatmaps_gt = target.reshape(
(batch_size, num_channels, -1)).split(1, 1)
loss = 0.
num_joints = num_channels // 3
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx * 3].squeeze()
heatmap_gt = heatmaps_gt[idx * 3].squeeze()
offset_x_pred = heatmaps_pred[idx * 3 + 1].squeeze()
offset_x_gt = heatmaps_gt[idx * 3 + 1].squeeze()
offset_y_pred = heatmaps_pred[idx * 3 + 2].squeeze()
offset_y_gt = heatmaps_gt[idx * 3 + 2].squeeze()
if self.use_target_weight:
heatmap_pred = heatmap_pred.mul(target_weight[:, idx])
heatmap_gt = heatmap_gt.mul(target_weight[:, idx])
loss += 0.5 * self.criterion(heatmap_pred, heatmap_gt)
loss += 0.5 * self.criterion(heatmap_gt * offset_x_pred,
heatmap_gt * offset_x_gt)
loss += 0.5 * self.criterion(heatmap_gt * offset_y_pred,
heatmap_gt * offset_y_gt)
return loss / num_joints
@LOSSES.register_module()
class JointsOHKMMSELoss(nn.Module):
def __init__(self, use_target_weight=False, topk=8):
super().__init__()
assert topk > 0
self.criterion = nn.MSELoss(reduction='none')
self.use_target_weight = use_target_weight
self.topk = topk
def _ohkm(self, loss):
ohkm_loss = 0.
N = len(loss)
for i in range(N):
sub_loss = loss[i]
_, topk_idx = torch.topk(
sub_loss, k=self.topk, dim=0, sorted=False)
tmp_loss = torch.gather(sub_loss, 0, topk_idx)
ohkm_loss += torch.sum(tmp_loss) / self.topk
ohkm_loss /= N
return ohkm_loss
def forward(self, output, target, target_weight):
batch_size = output.size(0)
num_joints = output.size(1)
if num_joints < self.topk:
raise ValueError(f'topk ({self.topk}) should not '
f'larger than num_joints ({num_joints}).')
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
losses = []
for idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
heatmap_gt = heatmaps_gt[idx].squeeze(1)
if self.use_target_weight:
losses.append(
self.criterion(
heatmap_pred.mul(target_weight[:, idx]),
heatmap_gt.mul(target_weight[:, idx])))
else:
losses.append(self.criterion(heatmap_pred, heatmap_gt))
losses = [loss.mean(dim=1).unsqueeze(dim=1) for loss in losses]
losses = torch.cat(losses, dim=1)
return self._ohkm(losses)
if __name__=='__main__':
def ohkm(loss):
ohkm_loss = 0.
N = len(loss)
for i in range(N):
sub_loss = loss[i]
_, topk_idx = torch.topk(
sub_loss, k=8, dim=0, sorted=False)
)
_loss += torch.sum(tmp_loss) / 8
ohkm_loss /= N
return ohkm_loss
criterion = nn.MSELoss(reduction='none')
output = torch.randn(64,17,48,64)
target = torch.randn(64,17,48,64)
batch_size = output.size(0)
num_joints = output.size(1)
if num_joints < 8:
raise ValueError(f'topk ({self.topk}) should not '
f'larger than num_joints ({num_joints}).')
heatmaps_pred = output.reshape(
(batch_size, num_joints, -1)).split(1, 1)
heatmaps_gt = target.reshape((batch_size, num_joints, -1)).split(1, 1)
idx in range(num_joints):
heatmap_pred = heatmaps_pred[idx].squeeze(1)
eatmaps_gt[idx].squeeze(1)
losses.append(criterion(heatmap_pred, heatmap_gt))
n(dim=1).unsqueeze(dim=1) for loss in losses]
losses = torch.cat(losses, dim=1)
= ohkm(losses)
| true | true |
f7f90e9473d82089eca1a34d6b4b53d614979092 | 8,125 | py | Python | venv/lib/python3.6/site-packages/sqlalchemy/dialects/mysql/oursql.py | tchengatcincoai/cryptocoin-prices-compare | f295fecc7213a877bf717af0eb98414e9137b554 | [
"MIT"
] | 78 | 2017-08-19T03:46:13.000Z | 2020-02-19T04:29:45.000Z | virtual/lib/python3.6/site-packages/sqlalchemy/dialects/mysql/oursql.py | annstella/blog | 1cdb7e7e7df028a84fae9b7d901116aae577589d | [
"MIT"
] | 95 | 2018-01-31T20:28:13.000Z | 2018-04-10T01:55:35.000Z | virtual/lib/python3.6/site-packages/sqlalchemy/dialects/mysql/oursql.py | annstella/blog | 1cdb7e7e7df028a84fae9b7d901116aae577589d | [
"MIT"
] | 43 | 2018-02-05T23:23:46.000Z | 2021-07-28T22:51:42.000Z | # mysql/oursql.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+oursql
:name: OurSQL
:dbapi: oursql
:connectstring: mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
:url: http://packages.python.org/oursql/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
"""
import re
from .base import (BIT, MySQLDialect, MySQLExecutionContext)
from ... import types as sqltypes, util
class _oursqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""oursql already converts mysql bits, so."""
return None
class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self):
return self.execution_options.get('_oursql_plain_query', False)
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
if util.py2k:
supports_unicode_binds = True
supports_unicode_statements = True
supports_native_decimal = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
execution_ctx_cls = MySQLExecutionContext_oursql
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
sqltypes.Time: sqltypes.Time,
BIT: _oursqlBIT,
}
)
@classmethod
def dbapi(cls):
return __import__('oursql')
def do_execute(self, cursor, statement, parameters, context=None):
"""Provide an implementation of
*cursor.execute(statement, parameters)*."""
if context and context.plain_query:
cursor.execute(statement, plain_query=True)
else:
cursor.execute(statement, parameters)
def do_begin(self, connection):
connection.cursor().execute('BEGIN', plain_query=True)
def _xa_query(self, connection, query, xid):
if util.py2k:
arg = connection.connection._escape_string(xid)
else:
charset = self._connection_charset
arg = connection.connection._escape_string(
xid.encode(charset)).decode(charset)
arg = "'%s'" % arg
connection.execution_options(
_oursql_plain_query=True).execute(query % arg)
# Because mysql is bad, these methods have to be
# reimplemented to use _PlainQuery. Basically, some queries
# refuse to return any data if they're run through
# the parameterized query API, or refuse to be parameterized
# in the first place.
def do_begin_twophase(self, connection, xid):
self._xa_query(connection, 'XA BEGIN %s', xid)
def do_prepare_twophase(self, connection, xid):
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA PREPARE %s', xid)
def do_rollback_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA ROLLBACK %s', xid)
def do_commit_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
self._xa_query(connection, 'XA COMMIT %s', xid)
# Q: why didn't we need all these "plain_query" overrides earlier ?
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
return MySQLDialect.has_table(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema
)
def get_table_options(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_table_options(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_columns(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_columns(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_view_names(self, connection, schema=None, **kw):
return MySQLDialect.get_view_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema=schema,
**kw
)
def get_table_names(self, connection, schema=None, **kw):
return MySQLDialect.get_table_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema
)
def get_schema_names(self, connection, **kw):
return MySQLDialect.get_schema_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
**kw
)
def initialize(self, connection):
return MySQLDialect.initialize(
self,
connection.execution_options(_oursql_plain_query=True)
)
def _show_create_table(self, connection, table, charset=None,
full_name=None):
return MySQLDialect._show_create_table(
self,
connection.contextual_connect(close_with_result=True).
execution_options(_oursql_plain_query=True),
table, charset, full_name
)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
return e.errno is None and 'cursor' not in e.args[1] \
and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
def create_connect_args(self, url):
opts = url.translate_connect_args(database='db', username='user',
password='passwd')
opts.update(url.query)
util.coerce_kw_type(opts, 'port', int)
util.coerce_kw_type(opts, 'compress', bool)
util.coerce_kw_type(opts, 'autoping', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
util.coerce_kw_type(opts, 'default_charset', bool)
if opts.pop('default_charset', False):
opts['charset'] = None
else:
util.coerce_kw_type(opts, 'charset', str)
opts['use_unicode'] = opts.get('use_unicode', True)
util.coerce_kw_type(opts, 'use_unicode', bool)
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
# supports_sane_rowcount.
opts.setdefault('found_rows', True)
ssl = {}
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
'ssl_capath', 'ssl_cipher']:
if key in opts:
ssl[key[4:]] = opts[key]
util.coerce_kw_type(ssl, key[4:], str)
del opts[key]
if ssl:
opts['ssl'] = ssl
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile(r'[.\-]')
for n in r.split(dbapi_con.server_info):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _extract_error_code(self, exception):
return exception.errno
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
return connection.connection.charset
def _compat_fetchall(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchone()
def _compat_first(self, rp, charset=None):
return rp.first()
dialect = MySQLDialect_oursql
| 31.862745 | 77 | 0.621046 |
import re
from .base import (BIT, MySQLDialect, MySQLExecutionContext)
from ... import types as sqltypes, util
class _oursqlBIT(BIT):
def result_processor(self, dialect, coltype):
return None
class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self):
return self.execution_options.get('_oursql_plain_query', False)
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
if util.py2k:
supports_unicode_binds = True
supports_unicode_statements = True
supports_native_decimal = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
execution_ctx_cls = MySQLExecutionContext_oursql
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
sqltypes.Time: sqltypes.Time,
BIT: _oursqlBIT,
}
)
@classmethod
def dbapi(cls):
return __import__('oursql')
def do_execute(self, cursor, statement, parameters, context=None):
if context and context.plain_query:
cursor.execute(statement, plain_query=True)
else:
cursor.execute(statement, parameters)
def do_begin(self, connection):
connection.cursor().execute('BEGIN', plain_query=True)
def _xa_query(self, connection, query, xid):
if util.py2k:
arg = connection.connection._escape_string(xid)
else:
charset = self._connection_charset
arg = connection.connection._escape_string(
xid.encode(charset)).decode(charset)
arg = "'%s'" % arg
connection.execution_options(
_oursql_plain_query=True).execute(query % arg)
# the parameterized query API, or refuse to be parameterized
# in the first place.
def do_begin_twophase(self, connection, xid):
self._xa_query(connection, 'XA BEGIN %s', xid)
def do_prepare_twophase(self, connection, xid):
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA PREPARE %s', xid)
def do_rollback_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA ROLLBACK %s', xid)
def do_commit_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
self._xa_query(connection, 'XA COMMIT %s', xid)
# Q: why didn't we need all these "plain_query" overrides earlier ?
def has_table(self, connection, table_name, schema=None):
return MySQLDialect.has_table(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema
)
def get_table_options(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_table_options(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_columns(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_columns(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_view_names(self, connection, schema=None, **kw):
return MySQLDialect.get_view_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema=schema,
**kw
)
def get_table_names(self, connection, schema=None, **kw):
return MySQLDialect.get_table_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema
)
def get_schema_names(self, connection, **kw):
return MySQLDialect.get_schema_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
**kw
)
def initialize(self, connection):
return MySQLDialect.initialize(
self,
connection.execution_options(_oursql_plain_query=True)
)
def _show_create_table(self, connection, table, charset=None,
full_name=None):
return MySQLDialect._show_create_table(
self,
connection.contextual_connect(close_with_result=True).
execution_options(_oursql_plain_query=True),
table, charset, full_name
)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
return e.errno is None and 'cursor' not in e.args[1] \
and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
def create_connect_args(self, url):
opts = url.translate_connect_args(database='db', username='user',
password='passwd')
opts.update(url.query)
util.coerce_kw_type(opts, 'port', int)
util.coerce_kw_type(opts, 'compress', bool)
util.coerce_kw_type(opts, 'autoping', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
util.coerce_kw_type(opts, 'default_charset', bool)
if opts.pop('default_charset', False):
opts['charset'] = None
else:
util.coerce_kw_type(opts, 'charset', str)
opts['use_unicode'] = opts.get('use_unicode', True)
util.coerce_kw_type(opts, 'use_unicode', bool)
opts.setdefault('found_rows', True)
ssl = {}
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
'ssl_capath', 'ssl_cipher']:
if key in opts:
ssl[key[4:]] = opts[key]
util.coerce_kw_type(ssl, key[4:], str)
del opts[key]
if ssl:
opts['ssl'] = ssl
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile(r'[.\-]')
for n in r.split(dbapi_con.server_info):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _extract_error_code(self, exception):
return exception.errno
def _detect_charset(self, connection):
return connection.connection.charset
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
def _compat_first(self, rp, charset=None):
return rp.first()
dialect = MySQLDialect_oursql
| true | true |
f7f90fbd4fb9163471ae66d5d83c92e61ae9d1d4 | 516 | py | Python | hatefull/apps/tests/models.py | MauricioDinki/hatefull | fdefd69251ea136798ff483bfa90a3b08a871ec7 | [
"BSD-3-Clause"
] | null | null | null | hatefull/apps/tests/models.py | MauricioDinki/hatefull | fdefd69251ea136798ff483bfa90a3b08a871ec7 | [
"BSD-3-Clause"
] | null | null | null | hatefull/apps/tests/models.py | MauricioDinki/hatefull | fdefd69251ea136798ff483bfa90a3b08a871ec7 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from hatefull.apps.questions.models import Question
class Test(models.Model):
user = models.ForeignKey(
User,
verbose_name='User',
blank=False,
null=False,
)
name = models.CharField(
blank=False,
null=False,
max_length=20
)
questions = models.ManyToManyField(Question)
def __str__(self):
return self.name
| 20.64 | 51 | 0.631783 |
from django.contrib.auth.models import User
from django.db import models
from hatefull.apps.questions.models import Question
class Test(models.Model):
user = models.ForeignKey(
User,
verbose_name='User',
blank=False,
null=False,
)
name = models.CharField(
blank=False,
null=False,
max_length=20
)
questions = models.ManyToManyField(Question)
def __str__(self):
return self.name
| true | true |
f7f9114bcce5c7358fdb6de76cbc16378aef9ff6 | 21,658 | py | Python | devices/openwrt_router.py | GerbenChenCBN/boardfarm | d83dc09ee0f4db044e4b7a7e727cd7d0704e0a3c | [
"BSD-3-Clause-Clear"
] | null | null | null | devices/openwrt_router.py | GerbenChenCBN/boardfarm | d83dc09ee0f4db044e4b7a7e727cd7d0704e0a3c | [
"BSD-3-Clause-Clear"
] | null | null | null | devices/openwrt_router.py | GerbenChenCBN/boardfarm | d83dc09ee0f4db044e4b7a7e727cd7d0704e0a3c | [
"BSD-3-Clause-Clear"
] | 1 | 2020-03-05T01:58:34.000Z | 2020-03-05T01:58:34.000Z | # Copyright (c) 2015
#
# All rights reserved.
#
# This file is distributed under the Clear BSD license.
# The full text can be found in LICENSE in the root directory.
import atexit
import os
import os.path
import random
import signal
import socket
import sys
import urllib2
import pexpect
import base
from datetime import datetime
import ipaddress
import power
import common
import connection_decider
class OpenWrtRouter(base.BaseDevice):
'''
Args:
model: Examples include "ap148" and "ap135".
conn_cmd: Command to connect to device such as "ssh -p 3003 root@10.0.0.202"
power_ip: IP Address of power unit to which this device is connected
power_outlet: Outlet # this device is connected
'''
conn_list = None
consoles = []
prompt = ['root\\@.*:.*#', '/ # ', '@R7500:/# ']
uprompt = ['ath>', '\(IPQ\) #', 'ar7240>', '\(IPQ40xx\)']
uboot_eth = "eth0"
linux_booted = False
saveenv_safe = True
lan_gmac_iface = "eth1"
lan_iface = "br-lan"
wan_iface = "eth0"
tftp_server_int = None
flash_meta_booted = False
has_cmts = False
cdrouter_config = None
uboot_net_delay = 30
routing = True
lan_network = ipaddress.IPv4Network(u"192.168.1.0/24")
lan_gateway = ipaddress.IPv4Address(u"192.168.1.1")
def __init__(self,
model,
conn_cmd,
power_ip,
power_outlet,
output=sys.stdout,
password='bigfoot1',
web_proxy=None,
tftp_server=None,
tftp_username=None,
tftp_password=None,
tftp_port=None,
connection_type=None,
power_username=None,
power_password=None,
config=None,
**kwargs):
self.config = config
self.consoles = [self]
self.start = kwargs['start']
if type(conn_cmd) is list:
self.conn_list = conn_cmd
conn_cmd = self.conn_list[0]
if connection_type is None:
print("\nWARNING: Unknown connection type using ser2net\n")
connection_type = "ser2net"
self.connection = connection_decider.connection(connection_type, device=self, conn_cmd=conn_cmd, **kwargs)
self.connection.connect()
self.logfile_read = output
self.power = power.get_power_device(power_ip, outlet=power_outlet, username=power_username, password=power_password)
self.model = model
self.web_proxy = web_proxy
if tftp_server:
try:
self.tftp_server = socket.gethostbyname(tftp_server)
if tftp_username:
self.tftp_username = tftp_username
if tftp_password:
self.tftp_password = tftp_password
if tftp_port:
self.tftp_port = tftp_port
except:
pass
else:
self.tftp_server = None
atexit.register(self.kill_console_at_exit)
def reset(self, break_into_uboot=False):
'''Power-cycle this device.'''
if not break_into_uboot:
self.power.reset()
return
for attempt in range(3):
try:
self.power.reset()
self.expect('U-Boot', timeout=30)
self.expect('Hit any key ')
self.sendline('\n\n\n\n\n\n\n') # try really hard
self.expect(self.uprompt, timeout=4)
# Confirm we are in uboot by typing any command.
# If we weren't in uboot, we wouldn't see the command
# that we type.
self.sendline('echo FOO')
self.expect('echo FOO', timeout=4)
self.expect(self.uprompt, timeout=4)
return
except Exception as e:
print(e)
print("\nWe appeared to have failed to break into U-Boot...")
def get_seconds_uptime(self):
'''Return seconds since last reboot. Stored in /proc/uptime'''
self.sendcontrol('c')
self.expect(self.prompt)
self.sendline('\ncat /proc/uptime')
self.expect('((\d+)\.(\d+)(\s)?)((\d+)\.(\d+))?((\d+)\.(\d+))?\r\n')
seconds_up = float(self.match.group(1))
self.expect(self.prompt)
return seconds_up
def get_memfree(self):
'''Return the kB of free memory.'''
# free pagecache, dentries and inodes for higher accuracy
self.sendline('\nsync; echo 3 > /proc/sys/vm/drop_caches')
self.expect('drop_caches')
self.expect(self.prompt)
self.sendline('cat /proc/meminfo | head -2')
self.expect('MemFree:\s+(\d+) kB')
memFree = self.match.group(1)
self.expect(self.prompt)
return int(memFree)
def get_file(self, fname, lan_ip="192.168.1.1"):
'''
OpenWrt routers have a webserver, so we use that to download
the file via a webproxy (e.g. a device on the board's LAN).
'''
if not self.web_proxy:
raise Exception('No web proxy defined to access board.')
url = 'http://%s/TEMP' % lan_ip
self.sendline("\nchmod a+r %s" % fname)
self.expect('chmod ')
self.expect(self.prompt)
self.sendline("ln -sf %s /www/TEMP" % fname)
self.expect(self.prompt)
proxy = urllib2.ProxyHandler({'http': self.web_proxy+':8080'})
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
print("\nAttempting download of %s via proxy %s" % (url, self.web_proxy+':8080'))
return urllib2.urlopen(url, timeout=30)
def tftp_get_file(self, host, filename, timeout=30):
'''Download file from tftp server.'''
self.sendline("tftp-hpa %s" % host)
self.expect("tftp>")
self.sendline("get %s" % filename)
t = timeout
self.expect("tftp>", timeout=t)
self.sendline("q")
self.expect(self.prompt)
self.sendline("ls `basename %s`" % filename)
new_fname = os.path.basename(filename)
self.expect("%s" % new_fname)
self.expect(self.prompt)
return new_fname
def tftp_get_file_uboot(self, loadaddr, filename, timeout=60):
'''Within u-boot, download file from tftp server.'''
for attempt in range(3):
try:
self.sendline('help')
self.expect_exact('help')
self.expect(self.uprompt)
if 'tftpboot' in self.before:
cmd = 'tftpboot'
else:
cmd = 'tftp'
self.sendline("%s %s %s" % (cmd, loadaddr, filename))
self.expect_exact("%s %s %s" % (cmd, loadaddr, filename))
i = self.expect(['Bytes transferred = (\d+) (.* hex)'] + self.uprompt, timeout=timeout)
if i != 0:
continue
ret = int(self.match.group(1))
self.expect(self.uprompt)
return ret
except:
print("\nTFTP failed, let us try that again")
self.sendcontrol('c')
self.expect(self.uprompt)
raise Exception("TFTP failed, try rebooting the board.")
def prepare_file(self, fname, tserver=None, tusername=None, tpassword=None, tport=None):
'''Copy file to tftp server, so that it it available to tftp
to the board itself.'''
if tserver is None:
tserver = self.tftp_server
if tusername is None:
tusername = self.tftp_username
if tpassword is None:
tpassword = self.tftp_password
if tport is None:
tport = self.tftp_port
if fname.startswith("http://") or fname.startswith("https://"):
return common.download_from_web(fname, tserver, tusername, tpassword, tport)
else:
return common.scp_to_tftp_server(os.path.abspath(fname), tserver, tusername, tpassword, tport)
def install_package(self, fname):
'''Install OpenWrt package (opkg).'''
target_file = fname.replace('\\', '/').split('/')[-1]
new_fname = self.prepare_file(fname)
local_file = self.tftp_get_file(self.tftp_server, new_fname, timeout=60)
# opkg requires a correct file name
self.sendline("mv %s %s" % (local_file, target_file))
self.expect(self.prompt)
self.sendline("opkg install --force-downgrade %s" % target_file)
self.expect(['Installing', 'Upgrading', 'Downgrading'])
self.expect(self.prompt, timeout=60)
self.sendline("rm -f /%s" % target_file)
self.expect(self.prompt)
def check_memory_addresses(self):
'''Check/set memory addresses and size for proper flashing.'''
pass
def flash_uboot(self, uboot):
raise Exception('Code not written for flash_uboot for this board type, %s' % self.model)
def flash_rootfs(self, ROOTFS):
raise Exception('Code not written for flash_rootfs for this board type, %s' % self.model)
def flash_linux(self, KERNEL):
raise Exception('Code not written for flash_linux for this board type, %s.' % self.model)
def flash_meta(self, META_BUILD, wan, lan):
raise Exception('Code not written for flash_meta for this board type, %s.' % self.model)
def prepare_nfsroot(self, NFSROOT):
raise Exception('Code not written for prepare_nfsroot for this board type, %s.' % self.model)
def wait_for_boot(self):
'''
Break into U-Boot. Check memory locations and sizes, and set
variables needed for flashing.
'''
# Try to break into uboot
for attempt in range(4):
try:
self.expect('U-Boot', timeout=30)
i = self.expect(['Hit any key ', 'gpio 17 value 1'] + self.uprompt)
if i == 1:
print("\n\nWARN: possibly need to hold down reset button to break into U-Boot\n\n")
self.expect('Hit any key ')
self.sendline('\n\n\n\n\n\n\n') # try really hard
i = self.expect(['httpd'] + self.uprompt, timeout=4)
if i == 0:
self.sendcontrol('c')
self.sendline('echo FOO')
self.expect('echo FOO')
self.expect('FOO')
self.expect(self.uprompt, timeout=4)
break
except:
print('\n\nFailed to break into uboot, try again.')
self.reset()
else:
# Tried too many times without success
print('\nUnable to break into U-Boot, test will likely fail')
self.check_memory_addresses()
# save env first, so CRC is OK for later tests
self.sendline("saveenv")
self.expect(["Writing to Nand... done", "Protected 1 sectors", "Saving Environment to NAND...", 'Saving Environment to FAT...'])
self.expect(self.uprompt)
def kill_console_at_exit(self):
self.kill(signal.SIGKILL)
def wait_for_network(self):
'''Wait until network interfaces have IP Addresses.'''
for interface in [self.wan_iface, self.lan_iface]:
for i in range(5):
try:
if interface is not None:
ipaddr = self.get_interface_ipaddr(interface).strip()
if not ipaddr:
continue
self.sendline("route -n")
self.expect(interface, timeout=2)
self.expect(self.prompt)
except pexpect.TIMEOUT:
print("waiting for wan/lan ipaddr")
else:
break
def network_restart(self):
'''Restart networking.'''
self.sendline('\nifconfig')
self.expect('HWaddr', timeout=10)
self.expect(self.prompt)
self.sendline('/etc/init.d/network restart')
self.expect(self.prompt, timeout=40)
self.sendline('ifconfig')
self.expect(self.prompt)
self.wait_for_network()
def firewall_restart(self):
'''Restart the firewall. Return how long it took.'''
start = datetime.now()
self.sendline('/etc/init.d/firewall restart')
self.expect_exact(["Loading redirects", "* Running script '/usr/share/miniupnpd/firewall.include'", "Running script '/etc/firewall.user'"])
if 'StreamBoost' in self.before:
print("test_msg: Sleeping for Streamboost")
self.expect(pexpect.TIMEOUT, timeout=45)
else:
self.expect(pexpect.TIMEOUT, timeout=15)
self.expect(self.prompt, timeout=80)
return int((datetime.now() - start).seconds)
def get_wan_iface(self):
'''Return name of WAN interface.'''
self.sendline('\nuci show network.wan.ifname')
self.expect("wan.ifname='?([a-zA-Z0-9\.-]*)'?\r\n", timeout=5)
return self.match.group(1)
def get_wan_proto(self):
'''Return protocol of WAN interface, e.g. dhcp.'''
self.sendline('\nuci show network.wan.proto')
self.expect("wan.proto='?([a-zA-Z0-9\.-]*)'?\r\n", timeout=5)
return self.match.group(1)
def setup_uboot_network(self, tftp_server=None):
if self.tftp_server_int is None:
if tftp_server is None:
raise Exception("Error in TFTP server configuration")
self.tftp_server_int = tftp_server
'''Within U-boot, request IP Address,
set server IP, and other networking tasks.'''
# Use standard eth1 address of wan-side computer
self.sendline('setenv autoload no')
self.expect(self.uprompt)
self.sendline('setenv ethact %s' % self.uboot_eth)
self.expect(self.uprompt)
self.expect(pexpect.TIMEOUT, timeout=self.uboot_net_delay) # running dhcp too soon causes hang
self.sendline('dhcp')
i = self.expect(['Unknown command', 'DHCP client bound to address'], timeout=60)
self.expect(self.uprompt)
if i == 0:
self.sendline('setenv ipaddr 192.168.0.2')
self.expect(self.uprompt)
self.sendline('setenv serverip %s' % self.tftp_server_int)
self.expect(self.uprompt)
if self.tftp_server_int:
#interfaces=['eth1','eth0']
passed = False
for attempt in range(5):
try:
self.sendcontrol('c')
self.expect('<INTERRUPT>')
self.expect(self.uprompt)
self.sendline("ping $serverip")
self.expect("host %s is alive" % self.tftp_server_int)
self.expect(self.uprompt)
passed = True
break
except:
print("ping failed, trying again")
# Try other interface
self.sendcontrol('c')
self.expect('<INTERRUPT>')
self.expect(self.uprompt)
#self.sendline('setenv ethact %s' % (interfaces[attempt%2]))
#self.expect(self.uprompt)
self.sendline('dhcp')
self.expect('DHCP client bound to address', timeout=60)
self.expect(self.uprompt)
self.expect(pexpect.TIMEOUT, timeout=1)
assert passed
self.sendline('setenv dumpdir crashdump')
if self.saveenv_safe:
self.expect(self.uprompt)
self.sendline('saveenv')
self.expect(self.uprompt)
def boot_linux(self, rootfs=None, bootargs=""):
print("\nWARNING: We don't know how to boot this board to linux "
"please write the code to do so.")
def wait_for_linux(self):
'''Verify Linux starts up.'''
i = self.expect(['Reset Button Push down', 'Linux version', 'Booting Linux', 'Starting kernel ...', 'Kernel command line specified:'], timeout=45)
if i == 0:
self.expect('httpd')
self.sendcontrol('c')
self.expect(self.uprompt)
self.sendline('boot')
i = self.expect(['U-Boot', 'login:', 'Please press Enter to activate this console'] + self.prompt, timeout=150)
if i == 0:
raise Exception('U-Boot came back when booting kernel')
elif i == 1:
self.sendline('root')
if 0 == self.expect(['assword:'] + self.prompt):
self.sendline('password')
self.expect(self.prompt)
# Give things time to start or crash on their own.
# Some things, like wifi, take a while.
self.expect(pexpect.TIMEOUT, timeout=40)
self.sendline('\r')
self.expect(self.prompt)
self.sendline('uname -a')
self.expect('Linux ')
self.expect(self.prompt)
def config_wan_proto(self, proto):
'''Set protocol for WAN interface.'''
if "dhcp" in proto:
if self.get_wan_proto() != "dhcp":
self.sendline("uci set network.wan.proto=dhcp")
self.sendline("uci commit")
self.expect(self.prompt)
self.network_restart()
self.expect(pexpect.TIMEOUT, timeout=10)
if "pppoe" in proto:
self.wan_iface = "pppoe-wan"
if self.get_wan_proto() != "pppoe":
self.sendline("uci set network.wan.proto=pppoe")
self.sendline("uci commit")
self.expect(self.prompt)
self.network_restart()
self.expect(pexpect.TIMEOUT, timeout=10)
def uci_allow_wan_http(self, lan_ip="192.168.1.1"):
'''Allow access to webgui from devices on WAN interface.'''
self.uci_forward_traffic_redirect("tcp", "80", lan_ip)
def uci_allow_wan_ssh(self, lan_ip="192.168.1.1"):
self.uci_forward_traffic_redirect("tcp", "22", lan_ip)
def uci_allow_wan_https(self):
'''Allow access to webgui from devices on WAN interface.'''
self.uci_forward_traffic_redirect("tcp", "443", "192.168.1.1")
def uci_forward_traffic_redirect(self, tcp_udp, port_wan, ip_lan):
self.sendline('uci add firewall redirect')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].src=wan')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].src_dport=%s' % port_wan)
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].proto=%s' % tcp_udp)
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].dest=lan')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].dest_ip=%s' % ip_lan)
self.expect(self.prompt)
self.sendline('uci commit firewall')
self.expect(self.prompt)
self.firewall_restart()
def uci_forward_traffic_rule(self, tcp_udp, port, ip, target="ACCEPT"):
self.sendline('uci add firewall rule')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].src=wan')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].proto=%s' % tcp_udp)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest=lan')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest_ip=%s' % ip)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest_port=%s' % port)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].target=%s' % target)
self.expect(self.prompt)
self.sendline('uci commit firewall')
self.expect(self.prompt)
self.firewall_restart()
def wait_for_mounts(self):
# wait for overlay to finish mounting
for i in range(5):
try:
board.sendline('mount')
board.expect_exact('overlayfs:/overlay on / type overlay', timeout=15)
board.expect(prompt)
break
except:
pass
else:
print("WARN: Overlay still not mounted")
def get_dns_server(self):
return "%s" % lan_gateway
def get_dns_server_upstream(self):
self.sendline('cat /etc/resolv.conf')
self.expect('nameserver (.*)\r\n', timeout=5)
ret = self.match.group(1)
self.expect(self.prompt)
return ret
def touch(self):
'''Keeps consoles active, so they don't disconnect for long running activities'''
self.sendline()
def get_user_id(self, user_id):
self.sendline('cat /etc/passwd | grep -w ' + user_id)
idx = self.expect([user_id] + self.prompt)
if idx == 0:
self.expect(self.prompt)
return 0 == idx
if __name__ == '__main__':
# Example use
board = OpenWrtRouter('ap148-beeliner',
conn_cmd='telnet 10.0.0.146 6003',
power_ip='10.0.0.218',
power_outlet='9',
web_proxy="10.0.0.66:8080")
board.sendline('\nuname -a')
board.expect('Linux')
board.expect('root@[^ ]+')
#board.reset()
#board.expect('U-Boot')
# Example downloading a file from the board
remote_fname = '/tmp/dhcp.leases'
local_fname = '/tmp/dhcp.leases'
with open(local_fname, 'wb') as local_file:
local_file.write(board.get_file(remote_fname).read())
print("\nCreated %s" % local_fname)
| 39.093863 | 154 | 0.575122 |
import atexit
import os
import os.path
import random
import signal
import socket
import sys
import urllib2
import pexpect
import base
from datetime import datetime
import ipaddress
import power
import common
import connection_decider
class OpenWrtRouter(base.BaseDevice):
'''
Args:
model: Examples include "ap148" and "ap135".
conn_cmd: Command to connect to device such as "ssh -p 3003 root@10.0.0.202"
power_ip: IP Address of power unit to which this device is connected
power_outlet: Outlet # this device is connected
'''
conn_list = None
consoles = []
prompt = ['root\\@.*:.*#', '/ # ', '@R7500:/# ']
uprompt = ['ath>', '\(IPQ\) #', 'ar7240>', '\(IPQ40xx\)']
uboot_eth = "eth0"
linux_booted = False
saveenv_safe = True
lan_gmac_iface = "eth1"
lan_iface = "br-lan"
wan_iface = "eth0"
tftp_server_int = None
flash_meta_booted = False
has_cmts = False
cdrouter_config = None
uboot_net_delay = 30
routing = True
lan_network = ipaddress.IPv4Network(u"192.168.1.0/24")
lan_gateway = ipaddress.IPv4Address(u"192.168.1.1")
def __init__(self,
model,
conn_cmd,
power_ip,
power_outlet,
output=sys.stdout,
password='bigfoot1',
web_proxy=None,
tftp_server=None,
tftp_username=None,
tftp_password=None,
tftp_port=None,
connection_type=None,
power_username=None,
power_password=None,
config=None,
**kwargs):
self.config = config
self.consoles = [self]
self.start = kwargs['start']
if type(conn_cmd) is list:
self.conn_list = conn_cmd
conn_cmd = self.conn_list[0]
if connection_type is None:
print("\nWARNING: Unknown connection type using ser2net\n")
connection_type = "ser2net"
self.connection = connection_decider.connection(connection_type, device=self, conn_cmd=conn_cmd, **kwargs)
self.connection.connect()
self.logfile_read = output
self.power = power.get_power_device(power_ip, outlet=power_outlet, username=power_username, password=power_password)
self.model = model
self.web_proxy = web_proxy
if tftp_server:
try:
self.tftp_server = socket.gethostbyname(tftp_server)
if tftp_username:
self.tftp_username = tftp_username
if tftp_password:
self.tftp_password = tftp_password
if tftp_port:
self.tftp_port = tftp_port
except:
pass
else:
self.tftp_server = None
atexit.register(self.kill_console_at_exit)
def reset(self, break_into_uboot=False):
'''Power-cycle this device.'''
if not break_into_uboot:
self.power.reset()
return
for attempt in range(3):
try:
self.power.reset()
self.expect('U-Boot', timeout=30)
self.expect('Hit any key ')
self.sendline('\n\n\n\n\n\n\n')
self.expect(self.uprompt, timeout=4)
self.sendline('echo FOO')
self.expect('echo FOO', timeout=4)
self.expect(self.uprompt, timeout=4)
return
except Exception as e:
print(e)
print("\nWe appeared to have failed to break into U-Boot...")
def get_seconds_uptime(self):
'''Return seconds since last reboot. Stored in /proc/uptime'''
self.sendcontrol('c')
self.expect(self.prompt)
self.sendline('\ncat /proc/uptime')
self.expect('((\d+)\.(\d+)(\s)?)((\d+)\.(\d+))?((\d+)\.(\d+))?\r\n')
seconds_up = float(self.match.group(1))
self.expect(self.prompt)
return seconds_up
def get_memfree(self):
'''Return the kB of free memory.'''
self.sendline('\nsync; echo 3 > /proc/sys/vm/drop_caches')
self.expect('drop_caches')
self.expect(self.prompt)
self.sendline('cat /proc/meminfo | head -2')
self.expect('MemFree:\s+(\d+) kB')
memFree = self.match.group(1)
self.expect(self.prompt)
return int(memFree)
def get_file(self, fname, lan_ip="192.168.1.1"):
'''
OpenWrt routers have a webserver, so we use that to download
the file via a webproxy (e.g. a device on the board's LAN).
'''
if not self.web_proxy:
raise Exception('No web proxy defined to access board.')
url = 'http://%s/TEMP' % lan_ip
self.sendline("\nchmod a+r %s" % fname)
self.expect('chmod ')
self.expect(self.prompt)
self.sendline("ln -sf %s /www/TEMP" % fname)
self.expect(self.prompt)
proxy = urllib2.ProxyHandler({'http': self.web_proxy+':8080'})
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
print("\nAttempting download of %s via proxy %s" % (url, self.web_proxy+':8080'))
return urllib2.urlopen(url, timeout=30)
def tftp_get_file(self, host, filename, timeout=30):
'''Download file from tftp server.'''
self.sendline("tftp-hpa %s" % host)
self.expect("tftp>")
self.sendline("get %s" % filename)
t = timeout
self.expect("tftp>", timeout=t)
self.sendline("q")
self.expect(self.prompt)
self.sendline("ls `basename %s`" % filename)
new_fname = os.path.basename(filename)
self.expect("%s" % new_fname)
self.expect(self.prompt)
return new_fname
def tftp_get_file_uboot(self, loadaddr, filename, timeout=60):
'''Within u-boot, download file from tftp server.'''
for attempt in range(3):
try:
self.sendline('help')
self.expect_exact('help')
self.expect(self.uprompt)
if 'tftpboot' in self.before:
cmd = 'tftpboot'
else:
cmd = 'tftp'
self.sendline("%s %s %s" % (cmd, loadaddr, filename))
self.expect_exact("%s %s %s" % (cmd, loadaddr, filename))
i = self.expect(['Bytes transferred = (\d+) (.* hex)'] + self.uprompt, timeout=timeout)
if i != 0:
continue
ret = int(self.match.group(1))
self.expect(self.uprompt)
return ret
except:
print("\nTFTP failed, let us try that again")
self.sendcontrol('c')
self.expect(self.uprompt)
raise Exception("TFTP failed, try rebooting the board.")
def prepare_file(self, fname, tserver=None, tusername=None, tpassword=None, tport=None):
'''Copy file to tftp server, so that it it available to tftp
to the board itself.'''
if tserver is None:
tserver = self.tftp_server
if tusername is None:
tusername = self.tftp_username
if tpassword is None:
tpassword = self.tftp_password
if tport is None:
tport = self.tftp_port
if fname.startswith("http://") or fname.startswith("https://"):
return common.download_from_web(fname, tserver, tusername, tpassword, tport)
else:
return common.scp_to_tftp_server(os.path.abspath(fname), tserver, tusername, tpassword, tport)
def install_package(self, fname):
'''Install OpenWrt package (opkg).'''
target_file = fname.replace('\\', '/').split('/')[-1]
new_fname = self.prepare_file(fname)
local_file = self.tftp_get_file(self.tftp_server, new_fname, timeout=60)
# opkg requires a correct file name
self.sendline("mv %s %s" % (local_file, target_file))
self.expect(self.prompt)
self.sendline("opkg install --force-downgrade %s" % target_file)
self.expect(['Installing', 'Upgrading', 'Downgrading'])
self.expect(self.prompt, timeout=60)
self.sendline("rm -f /%s" % target_file)
self.expect(self.prompt)
def check_memory_addresses(self):
'''Check/set memory addresses and size for proper flashing.'''
pass
def flash_uboot(self, uboot):
raise Exception('Code not written for flash_uboot for this board type, %s' % self.model)
def flash_rootfs(self, ROOTFS):
raise Exception('Code not written for flash_rootfs for this board type, %s' % self.model)
def flash_linux(self, KERNEL):
raise Exception('Code not written for flash_linux for this board type, %s.' % self.model)
def flash_meta(self, META_BUILD, wan, lan):
raise Exception('Code not written for flash_meta for this board type, %s.' % self.model)
def prepare_nfsroot(self, NFSROOT):
raise Exception('Code not written for prepare_nfsroot for this board type, %s.' % self.model)
def wait_for_boot(self):
'''
Break into U-Boot. Check memory locations and sizes, and set
variables needed for flashing.
'''
# Try to break into uboot
for attempt in range(4):
try:
self.expect('U-Boot', timeout=30)
i = self.expect(['Hit any key ', 'gpio 17 value 1'] + self.uprompt)
if i == 1:
print("\n\nWARN: possibly need to hold down reset button to break into U-Boot\n\n")
self.expect('Hit any key ')
self.sendline('\n\n\n\n\n\n\n') # try really hard
i = self.expect(['httpd'] + self.uprompt, timeout=4)
if i == 0:
self.sendcontrol('c')
self.sendline('echo FOO')
self.expect('echo FOO')
self.expect('FOO')
self.expect(self.uprompt, timeout=4)
break
except:
print('\n\nFailed to break into uboot, try again.')
self.reset()
else:
# Tried too many times without success
print('\nUnable to break into U-Boot, test will likely fail')
self.check_memory_addresses()
# save env first, so CRC is OK for later tests
self.sendline("saveenv")
self.expect(["Writing to Nand... done", "Protected 1 sectors", "Saving Environment to NAND...", 'Saving Environment to FAT...'])
self.expect(self.uprompt)
def kill_console_at_exit(self):
self.kill(signal.SIGKILL)
def wait_for_network(self):
'''Wait until network interfaces have IP Addresses.'''
for interface in [self.wan_iface, self.lan_iface]:
for i in range(5):
try:
if interface is not None:
ipaddr = self.get_interface_ipaddr(interface).strip()
if not ipaddr:
continue
self.sendline("route -n")
self.expect(interface, timeout=2)
self.expect(self.prompt)
except pexpect.TIMEOUT:
print("waiting for wan/lan ipaddr")
else:
break
def network_restart(self):
'''Restart networking.'''
self.sendline('\nifconfig')
self.expect('HWaddr', timeout=10)
self.expect(self.prompt)
self.sendline('/etc/init.d/network restart')
self.expect(self.prompt, timeout=40)
self.sendline('ifconfig')
self.expect(self.prompt)
self.wait_for_network()
def firewall_restart(self):
'''Restart the firewall. Return how long it took.'''
start = datetime.now()
self.sendline('/etc/init.d/firewall restart')
self.expect_exact(["Loading redirects", "* Running script '/usr/share/miniupnpd/firewall.include'", "Running script '/etc/firewall.user'"])
if 'StreamBoost' in self.before:
print("test_msg: Sleeping for Streamboost")
self.expect(pexpect.TIMEOUT, timeout=45)
else:
self.expect(pexpect.TIMEOUT, timeout=15)
self.expect(self.prompt, timeout=80)
return int((datetime.now() - start).seconds)
def get_wan_iface(self):
'''Return name of WAN interface.'''
self.sendline('\nuci show network.wan.ifname')
self.expect("wan.ifname='?([a-zA-Z0-9\.-]*)'?\r\n", timeout=5)
return self.match.group(1)
def get_wan_proto(self):
'''Return protocol of WAN interface, e.g. dhcp.'''
self.sendline('\nuci show network.wan.proto')
self.expect("wan.proto='?([a-zA-Z0-9\.-]*)'?\r\n", timeout=5)
return self.match.group(1)
def setup_uboot_network(self, tftp_server=None):
if self.tftp_server_int is None:
if tftp_server is None:
raise Exception("Error in TFTP server configuration")
self.tftp_server_int = tftp_server
'''Within U-boot, request IP Address,
set server IP, and other networking tasks.'''
# Use standard eth1 address of wan-side computer
self.sendline('setenv autoload no')
self.expect(self.uprompt)
self.sendline('setenv ethact %s' % self.uboot_eth)
self.expect(self.uprompt)
self.expect(pexpect.TIMEOUT, timeout=self.uboot_net_delay) # running dhcp too soon causes hang
self.sendline('dhcp')
i = self.expect(['Unknown command', 'DHCP client bound to address'], timeout=60)
self.expect(self.uprompt)
if i == 0:
self.sendline('setenv ipaddr 192.168.0.2')
self.expect(self.uprompt)
self.sendline('setenv serverip %s' % self.tftp_server_int)
self.expect(self.uprompt)
if self.tftp_server_int:
#interfaces=['eth1','eth0']
passed = False
for attempt in range(5):
try:
self.sendcontrol('c')
self.expect('<INTERRUPT>')
self.expect(self.uprompt)
self.sendline("ping $serverip")
self.expect("host %s is alive" % self.tftp_server_int)
self.expect(self.uprompt)
passed = True
break
except:
print("ping failed, trying again")
# Try other interface
self.sendcontrol('c')
self.expect('<INTERRUPT>')
self.expect(self.uprompt)
#self.sendline('setenv ethact %s' % (interfaces[attempt%2]))
#self.expect(self.uprompt)
self.sendline('dhcp')
self.expect('DHCP client bound to address', timeout=60)
self.expect(self.uprompt)
self.expect(pexpect.TIMEOUT, timeout=1)
assert passed
self.sendline('setenv dumpdir crashdump')
if self.saveenv_safe:
self.expect(self.uprompt)
self.sendline('saveenv')
self.expect(self.uprompt)
def boot_linux(self, rootfs=None, bootargs=""):
print("\nWARNING: We don't know how to boot this board to linux "
"please write the code to do so.")
def wait_for_linux(self):
'''Verify Linux starts up.'''
i = self.expect(['Reset Button Push down', 'Linux version', 'Booting Linux', 'Starting kernel ...', 'Kernel command line specified:'], timeout=45)
if i == 0:
self.expect('httpd')
self.sendcontrol('c')
self.expect(self.uprompt)
self.sendline('boot')
i = self.expect(['U-Boot', 'login:', 'Please press Enter to activate this console'] + self.prompt, timeout=150)
if i == 0:
raise Exception('U-Boot came back when booting kernel')
elif i == 1:
self.sendline('root')
if 0 == self.expect(['assword:'] + self.prompt):
self.sendline('password')
self.expect(self.prompt)
self.expect(pexpect.TIMEOUT, timeout=40)
self.sendline('\r')
self.expect(self.prompt)
self.sendline('uname -a')
self.expect('Linux ')
self.expect(self.prompt)
def config_wan_proto(self, proto):
'''Set protocol for WAN interface.'''
if "dhcp" in proto:
if self.get_wan_proto() != "dhcp":
self.sendline("uci set network.wan.proto=dhcp")
self.sendline("uci commit")
self.expect(self.prompt)
self.network_restart()
self.expect(pexpect.TIMEOUT, timeout=10)
if "pppoe" in proto:
self.wan_iface = "pppoe-wan"
if self.get_wan_proto() != "pppoe":
self.sendline("uci set network.wan.proto=pppoe")
self.sendline("uci commit")
self.expect(self.prompt)
self.network_restart()
self.expect(pexpect.TIMEOUT, timeout=10)
def uci_allow_wan_http(self, lan_ip="192.168.1.1"):
'''Allow access to webgui from devices on WAN interface.'''
self.uci_forward_traffic_redirect("tcp", "80", lan_ip)
def uci_allow_wan_ssh(self, lan_ip="192.168.1.1"):
self.uci_forward_traffic_redirect("tcp", "22", lan_ip)
def uci_allow_wan_https(self):
'''Allow access to webgui from devices on WAN interface.'''
self.uci_forward_traffic_redirect("tcp", "443", "192.168.1.1")
def uci_forward_traffic_redirect(self, tcp_udp, port_wan, ip_lan):
self.sendline('uci add firewall redirect')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].src=wan')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].src_dport=%s' % port_wan)
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].proto=%s' % tcp_udp)
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].dest=lan')
self.expect(self.prompt)
self.sendline('uci set firewall.@redirect[-1].dest_ip=%s' % ip_lan)
self.expect(self.prompt)
self.sendline('uci commit firewall')
self.expect(self.prompt)
self.firewall_restart()
def uci_forward_traffic_rule(self, tcp_udp, port, ip, target="ACCEPT"):
self.sendline('uci add firewall rule')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].src=wan')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].proto=%s' % tcp_udp)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest=lan')
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest_ip=%s' % ip)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].dest_port=%s' % port)
self.expect(self.prompt)
self.sendline('uci set firewall.@rule[-1].target=%s' % target)
self.expect(self.prompt)
self.sendline('uci commit firewall')
self.expect(self.prompt)
self.firewall_restart()
def wait_for_mounts(self):
for i in range(5):
try:
board.sendline('mount')
board.expect_exact('overlayfs:/overlay on / type overlay', timeout=15)
board.expect(prompt)
break
except:
pass
else:
print("WARN: Overlay still not mounted")
def get_dns_server(self):
return "%s" % lan_gateway
def get_dns_server_upstream(self):
self.sendline('cat /etc/resolv.conf')
self.expect('nameserver (.*)\r\n', timeout=5)
ret = self.match.group(1)
self.expect(self.prompt)
return ret
def touch(self):
'''Keeps consoles active, so they don't disconnect for long running activities'''
self.sendline()
def get_user_id(self, user_id):
self.sendline('cat /etc/passwd | grep -w ' + user_id)
idx = self.expect([user_id] + self.prompt)
if idx == 0:
self.expect(self.prompt)
return 0 == idx
if __name__ == '__main__':
# Example use
board = OpenWrtRouter('ap148-beeliner',
conn_cmd='telnet 10.0.0.146 6003',
power_ip='10.0.0.218',
power_outlet='9',
web_proxy="10.0.0.66:8080")
board.sendline('\nuname -a')
board.expect('Linux')
board.expect('root@[^ ]+')
#board.reset()
#board.expect('U-Boot')
# Example downloading a file from the board
remote_fname = '/tmp/dhcp.leases'
local_fname = '/tmp/dhcp.leases'
with open(local_fname, 'wb') as local_file:
local_file.write(board.get_file(remote_fname).read())
print("\nCreated %s" % local_fname)
| false | true |
f7f9164efff6d52516815096462e484ab74c68bb | 7,128 | py | Python | tests/test_widgets.py | KyleGW/gopro-dashboard-overlay | 0bc244dccfc0046e680876e91541de81c54f7477 | [
"MIT"
] | null | null | null | tests/test_widgets.py | KyleGW/gopro-dashboard-overlay | 0bc244dccfc0046e680876e91541de81c54f7477 | [
"MIT"
] | null | null | null | tests/test_widgets.py | KyleGW/gopro-dashboard-overlay | 0bc244dccfc0046e680876e91541de81c54f7477 | [
"MIT"
] | null | null | null | import random
from datetime import timedelta
from PIL import ImageFont
from gopro_overlay import fake
from gopro_overlay.dimensions import Dimension
from gopro_overlay.layout import BigMetric, gps_info
from gopro_overlay.layout_components import text, metric
from gopro_overlay.point import Coordinate
from gopro_overlay.timing import PoorTimer
from gopro_overlay.units import units
from gopro_overlay.widgets import simple_icon, Text, Scene, CachingText, Composite, Translate, Frame
from gopro_overlay.widgets_info import ComparativeEnergy
from tests.approval import approve_image
from tests.testenvironment import is_make
font = ImageFont.truetype(font='Roboto-Medium.ttf', size=18)
title_font = font.font_variant(size=16)
# Need reproducible results for approval tests
rng = random.Random()
rng.seed(12345)
ts = fake.fake_framemeta(timedelta(minutes=10), step=timedelta(seconds=1), rng=rng)
@approve_image
def test_render_icon():
return time_rendering("icon", widgets=[
simple_icon(Coordinate(50, 50), "gauge-1.png", invert=True),
])
@approve_image
def test_render_text():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font)])
@approve_image
def test_render_text_colour():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, fill=(255, 255, 0))])
@approve_image
def test_render_text_vertical():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, direction="ttb", align="lt")])
@approve_image
def test_render_caching_text_vertical():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, direction="ttb", align="lt")])
@approve_image
def test_render_caching_text_small():
return time_rendering("simple text (cached)", [CachingText(Coordinate(50, 50), lambda: "Hello", font)])
@approve_image
def test_render_text_big():
# 15ms target to beat
return time_rendering("big text",
[Text(Coordinate(50, 50), lambda: "Hello", font.font_variant(size=160))])
@approve_image
def test_render_caching_text_big():
# Avg: 0.00014, Rate: 6,966.58
return time_rendering("big text (cached)",
[CachingText(Coordinate(50, 50), lambda: "Hello", font.font_variant(size=160))])
@approve_image
def test_render_gps_info():
# Avg: 0.00645, Rate: 155.05
entry = ts.get(ts.min)
return time_rendering(
name="gps info",
widgets=[gps_info(Coordinate(400, 10), lambda: entry, font)]
)
@approve_image
def test_render_big_metric():
# Avg: 0.00026, Rate: 3,871.63
return time_rendering(name="big speed", widgets=[
BigMetric(Coordinate(10, 10), title=lambda: "MPH", value=lambda: "27", font_title=font,
font_metric=font.font_variant(size=160))
])
@approve_image
def test_render_comparative_energy():
# Avg: 0.00148, Rate: 676.70
speed = units.Quantity(25, units.mph)
return time_rendering(name="comparative energy",
dimensions=Dimension(x=1300, y=200),
widgets=[
Translate(
Coordinate(10, 50),
ComparativeEnergy(
font=font,
speed=lambda: speed,
person=units.Quantity(60, units.kg),
bike=units.Quantity(12, units.kg),
car=units.Quantity(2678, units.kg),
van=units.Quantity(3500, units.kg)
)
)
])
@approve_image
def test_text_component():
return time_rendering(name="text", widgets=[
text(at=Coordinate(100, 100), value=lambda: "String", font=font.font_variant(size=50))
])
@approve_image
def test_metric_component():
return time_rendering(name="text", widgets=[
metric(
at=Coordinate(100, 100),
entry=lambda: ts.get(ts.min),
accessor=lambda e: e.speed,
formatter=lambda v: format(v, ".1f"),
font=font.font_variant(size=160),
)
])
@approve_image
def test_composite_viewport():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(330, 130),
Composite(
text(at=Coordinate(0, 0), cache=True, value=lambda: "String", font=font.font_variant(size=50)),
simple_icon(Coordinate(0, 50), "gauge-1.png", invert=True),
)
)
])
@approve_image
def test_frame_border_visible_over_content():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(10, 10),
Frame(
dimensions=Dimension(300, 200),
opacity=0.6,
fill=(0, 0, 0),
outline=(255, 255, 255),
child=CachingText(
at=Coordinate(-8, 0),
fill=(255, 255, 0),
value=lambda: "Hello",
font=font.font_variant(size=64)
),
)
)
])
@approve_image
def test_frame_circular():
return time_rendering(name="viewport", widgets=[
Composite(
Text(at=Coordinate(100, 150), value=lambda: "Partially Visible", font=font.font_variant(size=64)),
Translate(
Coordinate(100, 00),
Frame(
dimensions=Dimension(200, 200),
opacity=0.6,
fill=(0, 0, 0),
outline=(255, 255, 255),
corner_radius=100,
child=CachingText(
at=Coordinate(0, 000),
fill=(255, 255, 0),
value=lambda: "Hello",
font=font.font_variant(size=128)
),
)
)
)
])
@approve_image
def test_frame_clipping():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(10, 10),
Frame(
dimensions=Dimension(300, 200),
opacity=0.5,
outline=(255, 255, 255),
fill=(0, 0, 0),
corner_radius=30,
child=Composite(
CachingText(at=Coordinate(250, -20), value=lambda: "Hello", font=font.font_variant(size=48)),
CachingText(at=Coordinate(-30, 50), value=lambda: "World", font=font.font_variant(size=48))
)
)
)
])
def time_rendering(name, widgets, dimensions: Dimension = Dimension(x=600, y=300), repeat=100):
timer = PoorTimer(name)
scene = Scene(dimensions, widgets)
draw = None
for i in range(0, repeat):
draw = timer.time(lambda: scene.draw())
if not is_make():
draw.show()
print(timer)
return draw
| 31.400881 | 120 | 0.575898 | import random
from datetime import timedelta
from PIL import ImageFont
from gopro_overlay import fake
from gopro_overlay.dimensions import Dimension
from gopro_overlay.layout import BigMetric, gps_info
from gopro_overlay.layout_components import text, metric
from gopro_overlay.point import Coordinate
from gopro_overlay.timing import PoorTimer
from gopro_overlay.units import units
from gopro_overlay.widgets import simple_icon, Text, Scene, CachingText, Composite, Translate, Frame
from gopro_overlay.widgets_info import ComparativeEnergy
from tests.approval import approve_image
from tests.testenvironment import is_make
font = ImageFont.truetype(font='Roboto-Medium.ttf', size=18)
title_font = font.font_variant(size=16)
rng = random.Random()
rng.seed(12345)
ts = fake.fake_framemeta(timedelta(minutes=10), step=timedelta(seconds=1), rng=rng)
@approve_image
def test_render_icon():
return time_rendering("icon", widgets=[
simple_icon(Coordinate(50, 50), "gauge-1.png", invert=True),
])
@approve_image
def test_render_text():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font)])
@approve_image
def test_render_text_colour():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, fill=(255, 255, 0))])
@approve_image
def test_render_text_vertical():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, direction="ttb", align="lt")])
@approve_image
def test_render_caching_text_vertical():
return time_rendering("simple text", [Text(Coordinate(50, 50), lambda: "Hello", font, direction="ttb", align="lt")])
@approve_image
def test_render_caching_text_small():
return time_rendering("simple text (cached)", [CachingText(Coordinate(50, 50), lambda: "Hello", font)])
@approve_image
def test_render_text_big():
return time_rendering("big text",
[Text(Coordinate(50, 50), lambda: "Hello", font.font_variant(size=160))])
@approve_image
def test_render_caching_text_big():
return time_rendering("big text (cached)",
[CachingText(Coordinate(50, 50), lambda: "Hello", font.font_variant(size=160))])
@approve_image
def test_render_gps_info():
entry = ts.get(ts.min)
return time_rendering(
name="gps info",
widgets=[gps_info(Coordinate(400, 10), lambda: entry, font)]
)
@approve_image
def test_render_big_metric():
return time_rendering(name="big speed", widgets=[
BigMetric(Coordinate(10, 10), title=lambda: "MPH", value=lambda: "27", font_title=font,
font_metric=font.font_variant(size=160))
])
@approve_image
def test_render_comparative_energy():
speed = units.Quantity(25, units.mph)
return time_rendering(name="comparative energy",
dimensions=Dimension(x=1300, y=200),
widgets=[
Translate(
Coordinate(10, 50),
ComparativeEnergy(
font=font,
speed=lambda: speed,
person=units.Quantity(60, units.kg),
bike=units.Quantity(12, units.kg),
car=units.Quantity(2678, units.kg),
van=units.Quantity(3500, units.kg)
)
)
])
@approve_image
def test_text_component():
return time_rendering(name="text", widgets=[
text(at=Coordinate(100, 100), value=lambda: "String", font=font.font_variant(size=50))
])
@approve_image
def test_metric_component():
return time_rendering(name="text", widgets=[
metric(
at=Coordinate(100, 100),
entry=lambda: ts.get(ts.min),
accessor=lambda e: e.speed,
formatter=lambda v: format(v, ".1f"),
font=font.font_variant(size=160),
)
])
@approve_image
def test_composite_viewport():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(330, 130),
Composite(
text(at=Coordinate(0, 0), cache=True, value=lambda: "String", font=font.font_variant(size=50)),
simple_icon(Coordinate(0, 50), "gauge-1.png", invert=True),
)
)
])
@approve_image
def test_frame_border_visible_over_content():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(10, 10),
Frame(
dimensions=Dimension(300, 200),
opacity=0.6,
fill=(0, 0, 0),
outline=(255, 255, 255),
child=CachingText(
at=Coordinate(-8, 0),
fill=(255, 255, 0),
value=lambda: "Hello",
font=font.font_variant(size=64)
),
)
)
])
@approve_image
def test_frame_circular():
return time_rendering(name="viewport", widgets=[
Composite(
Text(at=Coordinate(100, 150), value=lambda: "Partially Visible", font=font.font_variant(size=64)),
Translate(
Coordinate(100, 00),
Frame(
dimensions=Dimension(200, 200),
opacity=0.6,
fill=(0, 0, 0),
outline=(255, 255, 255),
corner_radius=100,
child=CachingText(
at=Coordinate(0, 000),
fill=(255, 255, 0),
value=lambda: "Hello",
font=font.font_variant(size=128)
),
)
)
)
])
@approve_image
def test_frame_clipping():
return time_rendering(name="viewport", widgets=[
Translate(
Coordinate(10, 10),
Frame(
dimensions=Dimension(300, 200),
opacity=0.5,
outline=(255, 255, 255),
fill=(0, 0, 0),
corner_radius=30,
child=Composite(
CachingText(at=Coordinate(250, -20), value=lambda: "Hello", font=font.font_variant(size=48)),
CachingText(at=Coordinate(-30, 50), value=lambda: "World", font=font.font_variant(size=48))
)
)
)
])
def time_rendering(name, widgets, dimensions: Dimension = Dimension(x=600, y=300), repeat=100):
timer = PoorTimer(name)
scene = Scene(dimensions, widgets)
draw = None
for i in range(0, repeat):
draw = timer.time(lambda: scene.draw())
if not is_make():
draw.show()
print(timer)
return draw
| true | true |
f7f91764a81371107a333003e63f98bef4c3bace | 2,281 | py | Python | uwsgi_tasks/decorators.py | zanachka/uwsgi_tasks | cad3e72c8bd66979c6ac23a1cc68caff176435ab | [
"MIT"
] | 92 | 2015-01-21T06:25:18.000Z | 2022-02-23T22:29:05.000Z | uwsgi_tasks/decorators.py | zanachka/uwsgi_tasks | cad3e72c8bd66979c6ac23a1cc68caff176435ab | [
"MIT"
] | 17 | 2015-04-30T21:02:05.000Z | 2021-03-09T17:30:00.000Z | uwsgi_tasks/decorators.py | zanachka/uwsgi_tasks | cad3e72c8bd66979c6ac23a1cc68caff176435ab | [
"MIT"
] | 13 | 2015-04-30T20:54:36.000Z | 2022-02-12T17:06:15.000Z | # -*- coding: utf-8 -*-
from uwsgi_tasks.tasks import Task, TaskExecutor, TimerTask, CronTask
__all__ = ('task', 'timer', 'timer_lazy', 'cron', 'cron_lazy')
def task(func=None, executor=TaskExecutor.AUTO, **setup):
def create_task(function):
return Task(function, executor, **setup)
if callable(func):
return create_task(func)
else:
return lambda f: create_task(f)
def timer(func=None, seconds=0, iterations=None,
executor=TaskExecutor.AUTO, **setup):
"""Create timer on initialization"""
return timer_lazy(
func=func,
seconds=seconds,
iterations=iterations,
executor=executor,
run=True,
**setup
)
def timer_lazy(func=None, seconds=0, iterations=None,
executor=TaskExecutor.AUTO, run=False, **setup):
"""Create task on execution"""
def inner(function):
timer_task = TimerTask(
function=function,
executor=executor,
seconds=seconds,
iterations=iterations,
**setup
)
if run:
return timer_task()
else:
timer_task.register_signal()
return timer_task
if func is None:
return inner
return inner(func)
def cron(func=None, minute=-1, hour=-1, day=-1, month=-1, dayweek=-1,
executor=TaskExecutor.AUTO, **setup):
"""Creates cron-like task on initialization"""
return cron_lazy(
func=func,
minute=minute,
hour=hour,
day=day,
month=month,
dayweek=dayweek,
executor=executor,
run=True,
**setup
)
def cron_lazy(func=None, minute=-1, hour=-1, day=-1, month=-1, dayweek=-1,
executor=TaskExecutor.AUTO, run=False, **setup):
"""Creates cron-like task on execution"""
def inner(function):
cron_task = CronTask(
function=function,
executor=executor,
minute=minute,
hour=hour,
day=day,
month=month,
dayweek=dayweek,
**setup
)
if run:
return cron_task()
else:
return cron_task
if func is None:
return inner
return inner(func)
| 23.515464 | 74 | 0.562911 |
from uwsgi_tasks.tasks import Task, TaskExecutor, TimerTask, CronTask
__all__ = ('task', 'timer', 'timer_lazy', 'cron', 'cron_lazy')
def task(func=None, executor=TaskExecutor.AUTO, **setup):
def create_task(function):
return Task(function, executor, **setup)
if callable(func):
return create_task(func)
else:
return lambda f: create_task(f)
def timer(func=None, seconds=0, iterations=None,
executor=TaskExecutor.AUTO, **setup):
return timer_lazy(
func=func,
seconds=seconds,
iterations=iterations,
executor=executor,
run=True,
**setup
)
def timer_lazy(func=None, seconds=0, iterations=None,
executor=TaskExecutor.AUTO, run=False, **setup):
def inner(function):
timer_task = TimerTask(
function=function,
executor=executor,
seconds=seconds,
iterations=iterations,
**setup
)
if run:
return timer_task()
else:
timer_task.register_signal()
return timer_task
if func is None:
return inner
return inner(func)
def cron(func=None, minute=-1, hour=-1, day=-1, month=-1, dayweek=-1,
executor=TaskExecutor.AUTO, **setup):
return cron_lazy(
func=func,
minute=minute,
hour=hour,
day=day,
month=month,
dayweek=dayweek,
executor=executor,
run=True,
**setup
)
def cron_lazy(func=None, minute=-1, hour=-1, day=-1, month=-1, dayweek=-1,
executor=TaskExecutor.AUTO, run=False, **setup):
def inner(function):
cron_task = CronTask(
function=function,
executor=executor,
minute=minute,
hour=hour,
day=day,
month=month,
dayweek=dayweek,
**setup
)
if run:
return cron_task()
else:
return cron_task
if func is None:
return inner
return inner(func)
| true | true |
f7f9177527f4f51d7b3ba22fe9cbe8733ddaaee6 | 16,878 | py | Python | ivi/agilent/agilentBase8340.py | sacherjj/python-ivi | 6dd1ba93d65dc30a652a3a1b34c66921d94315e8 | [
"MIT"
] | 161 | 2015-01-23T17:43:01.000Z | 2022-03-29T14:42:42.000Z | ivi/agilent/agilentBase8340.py | sacherjj/python-ivi | 6dd1ba93d65dc30a652a3a1b34c66921d94315e8 | [
"MIT"
] | 45 | 2015-01-15T13:35:04.000Z | 2021-06-03T01:58:55.000Z | ivi/agilent/agilentBase8340.py | sacherjj/python-ivi | 6dd1ba93d65dc30a652a3a1b34c66921d94315e8 | [
"MIT"
] | 87 | 2015-01-31T10:55:23.000Z | 2022-03-17T08:18:47.000Z | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .. import ivi
from .. import rfsiggen
from .. import extra
class agilentBase8340(rfsiggen.Base, rfsiggen.ModulateAM, rfsiggen.ModulateFM,
rfsiggen.ModulatePulse, rfsiggen.Sweep, rfsiggen.FrequencySweep, rfsiggen.PowerSweep,
extra.common.Memory,
extra.common.SystemSetup,
ivi.Driver):
"Agilent 8340A IVI RF sweep generator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(agilentBase8340, self).__init__(*args, **kwargs)
self._identity_description = "Agilent 8340 IVI RF sweep generator driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Agilent Technologies"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 2
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = list(['8340A','8340B', '8341A', '8341B'])
self._frequency_low = 10e6
self._frequency_high = 26.5e9
self._memory_size = 8
self._add_property('sweep.frequency_sweep.center',
self._get_sweep_frequency_sweep_center,
self._set_sweep_frequency_sweep_center)
self._add_property('sweep.frequency_sweep.span',
self._get_sweep_frequency_sweep_span,
self._set_sweep_frequency_sweep_span)
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
"Opens an I/O session to the instrument."
super(agilentBase8340, self)._initialize(resource, id_query, reset, **keywargs)
# interface clear
if not self._driver_operation_simulate:
self._clear()
# check ID
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
# reset
if reset:
self.utility_reset()
def _get_identity_instrument_manufacturer(self):
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
if self._driver_operation_simulate:
self._identity_instrument_model = "Not available while simulating"
else:
self._identity_instrument_model = self._ask("OI").split('REV')[0]
self._set_cache_valid()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
if self._driver_operation_simulate:
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
self._identity_instrument_firmware_revision = self._ask("OI").split('REV')[1]
self._set_cache_valid()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
#if not self._driver_operation_simulate:
# error_code = int(self._ask("OE"))
# if error_code == 0:
# error_code = int(self._ask("OH"))
# if error_code != 0:
# error_message = "Unknown error"
# if error_code in Messages:
# error_message = Messages[error_code]
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("IP")
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
return (code, message)
def _utility_unlock_object(self):
pass
def _memory_save(self, index):
index = int(index)
if index < 0 or index >= self._memory_size:
raise OutOfRangeException()
if not self._driver_operation_simulate:
self._write("SV %d" % (index+1))
def _memory_recall(self, index):
index = int(index)
if index < 0 or index >= self._memory_size:
raise OutOfRangeException()
if not self._driver_operation_simulate:
self._write("RC %d" % (index+1))
def _system_fetch_setup(self):
if self._driver_operation_simulate:
return b''
self._write("OL?")
return self._read_raw()
def _system_load_setup(self, data):
if self._driver_operation_simulate:
return
self._write_raw(b'IL'+data)
self.driver_operation.invalidate_all_attributes()
def _get_rf_frequency(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._rf_frequency = float(self._ask("OPCW"))
self._set_cache_valid()
return self._rf_frequency
def _set_rf_frequency(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("CW%eHZ" % value)
self._rf_frequency = value
self._set_cache_valid()
def _get_rf_level(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._rf_level = float(self._ask("OPPL"))
self._set_cache_valid()
return self._rf_level
def _set_rf_level(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("PL%eDB" % value)
self._rf_level = value
self._set_cache_valid()
def _get_rf_output_enabled(self):
#if not self._driver_operation_simulate and not self._get_cache_valid():
# self._rf_output_enabled = bool(int(self._ask("OPRF")))
# self._set_cache_valid()
return self._rf_output_enabled
def _set_rf_output_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("RF%d" % int(value))
self._rf_output_enabled = value
self._set_cache_valid()
def _get_alc_enabled(self):
return self._alc_enabled
def _set_alc_enabled(self, value):
value = bool(value)
self._alc_enabled = value
def _rf_is_settled(self):
if not self._driver_operation_simulate:
return self._read_stb() & (1 << 4) != 0
return True
def _rf_wait_until_settled(self, maximum_time):
t = 0
while not self._rf_is_settled() and t < maximum_time:
time.sleep(0.01)
t = t + 0.01
def _get_analog_modulation_am_enabled(self):
#if not self._driver_operation_simulate and not self._get_cache_valid():
# self._analog_modulation_am_enabled = bool(int(self._ask("OPAM")))
# self._set_cache_valid()
return self._analog_modulation_am_enabled
def _set_analog_modulation_am_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("AM%d" % int(value))
self._analog_modulation_am_enabled = value
self._set_cache_valid()
def _get_analog_modulation_am_source(self):
return self._analog_modulation_am_source
def _set_analog_modulation_am_source(self, value):
value = str(value)
self._analog_modulation_am_source = value
def _get_analog_modulation_am_scaling(self):
return self._analog_modulation_am_scaling
def _set_analog_modulation_am_scaling(self, value):
value = int(value)
self._analog_modulation_am_scaling = value
def _get_analog_modulation_am_external_coupling(self):
return self._analog_modulation_am_external_coupling
def _set_analog_modulation_am_external_coupling(self, value):
value = int(value)
self._analog_modulation_am_external_coupling = value
def _get_analog_modulation_am_nominal_voltage(self):
return self._analog_modulation_am_nominal_voltage
def _set_analog_modulation_am_nominal_voltage(self, value):
value = float(value)
self._analog_modulation_am_nominal_voltage = value
def _get_analog_modulation_am_depth(self):
return self._analog_modulation_am_depth
def _set_analog_modulation_am_depth(self, value):
value = float(value)
self._analog_modulation_am_depth = value
def _get_analog_modulation_fm_enabled(self):
return self._analog_modulation_fm_enabled
def _set_analog_modulation_fm_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("FM%d" % int(value))
self._analog_modulation_fm_enabled = value
self._set_cache_valid()
def _get_analog_modulation_fm_source(self):
return self._analog_modulation_fm_source
def _set_analog_modulation_fm_source(self, value):
value = str(value)
self._analog_modulation_fm_source = value
def _get_analog_modulation_fm_external_coupling(self):
return self._analog_modulation_fm_external_coupling
def _set_analog_modulation_fm_external_coupling(self, value):
value = int(value)
self._analog_modulation_fm_external_coupling = value
def _get_analog_modulation_fm_nominal_voltage(self):
return self._analog_modulation_fm_nominal_voltage
def _set_analog_modulation_fm_nominal_voltage(self, value):
value = float(value)
self._analog_modulation_fm_nominal_voltage = value
def _get_analog_modulation_fm_deviation(self):
return self._analog_modulation_fm_deviation
def _set_analog_modulation_fm_deviation(self, value):
value = float(value)
#if not self._driver_operation_simulate:
# self._write("FM %e HZ" % value)
self._analog_modulation_fm_deviation = value
#self._set_cache_valid()
def _get_pulse_modulation_enabled(self):
return self._pulse_modulation_enabled
def _set_pulse_modulation_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("PM%d" % int(value))
self._pulse_modulation_enabled = value
self._set_cache_valid()
def _get_pulse_modulation_source(self):
return self._pulse_modulation_source
def _set_pulse_modulation_source(self, value):
value = str(value)
self._pulse_modulation_source = value
def _get_pulse_modulation_external_polarity(self):
return self._pulse_modulation_external_polarity
def _set_pulse_modulation_external_polarity(self, value):
value = int(value)
self._pulse_modulation_external_polarity = value
def _get_sweep_mode(self):
return self._sweep_mode
def _set_sweep_mode(self, value):
value = int(value)
self._sweep_mode = value
def _get_sweep_trigger_source(self):
return self._sweep_trigger_source
def _set_sweep_trigger_source(self, value):
value = str(value)
self._sweep_trigger_source = value
def _get_sweep_frequency_sweep_start(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_start = float(self._ask("OPFA"))
self._set_cache_valid()
return self._sweep_frequency_sweep_start
def _set_sweep_frequency_sweep_start(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("FA%fHZ" % value)
self._sweep_frequency_sweep_start = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_center')
self._set_cache_valid(False, 'sweep_frequency_sweep_span')
def _get_sweep_frequency_sweep_stop(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_stop = float(self._ask("OPFB"))
self._set_cache_valid()
return self._sweep_frequency_sweep_stop
def _set_sweep_frequency_sweep_stop(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("FB%fHZ" % value)
self._sweep_frequency_sweep_stop = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_center')
self._set_cache_valid(False, 'sweep_frequency_sweep_span')
def _get_sweep_frequency_sweep_center(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_center = float(self._ask("OPCF"))
self._set_cache_valid()
return self._sweep_frequency_sweep_center
def _set_sweep_frequency_sweep_center(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("CF%fHZ" % value)
self._sweep_frequency_sweep_center = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_start')
self._set_cache_valid(False, 'sweep_frequency_sweep_stop')
def _get_sweep_frequency_sweep_span(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_span = float(self._ask("OPDF"))
self._set_cache_valid()
return self._sweep_frequency_sweep_span
def _set_sweep_frequency_sweep_span(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("DF%fHZ" % value)
self._sweep_frequency_sweep_span = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_start')
self._set_cache_valid(False, 'sweep_frequency_sweep_stop')
def _get_sweep_frequency_sweep_time(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_time = float(self._ask("OPST"))
self._set_cache_valid()
return self._sweep_frequency_sweep_time
def _set_sweep_frequency_sweep_time(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("ST%fSC" % value)
self._sweep_frequency_sweep_time = value
def _get_sweep_power_sweep_start(self):
return self._get_rf_level()
def _set_sweep_power_sweep_start(self, value):
self._set_rf_level(value)
def _get_sweep_power_sweep_stop(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_power_sweep_stop = float(self._ask("OPPS"))
self._set_cache_valid()
return self._sweep_power_sweep_stop
def _set_sweep_power_sweep_stop(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("PS%fDB" % value)
self._sweep_power_sweep_stop = value
self._set_cache_valid()
def _get_sweep_power_sweep_time(self):
return self._get_sweep_frequency_sweep_time()
def _set_sweep_power_sweep_time(self, value):
self._set_sweep_frequency_sweep_time(value)
| 36.851528 | 99 | 0.691077 |
from .. import ivi
from .. import rfsiggen
from .. import extra
class agilentBase8340(rfsiggen.Base, rfsiggen.ModulateAM, rfsiggen.ModulateFM,
rfsiggen.ModulatePulse, rfsiggen.Sweep, rfsiggen.FrequencySweep, rfsiggen.PowerSweep,
extra.common.Memory,
extra.common.SystemSetup,
ivi.Driver):
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(agilentBase8340, self).__init__(*args, **kwargs)
self._identity_description = "Agilent 8340 IVI RF sweep generator driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Agilent Technologies"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 2
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = list(['8340A','8340B', '8341A', '8341B'])
self._frequency_low = 10e6
self._frequency_high = 26.5e9
self._memory_size = 8
self._add_property('sweep.frequency_sweep.center',
self._get_sweep_frequency_sweep_center,
self._set_sweep_frequency_sweep_center)
self._add_property('sweep.frequency_sweep.span',
self._get_sweep_frequency_sweep_span,
self._set_sweep_frequency_sweep_span)
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
super(agilentBase8340, self)._initialize(resource, id_query, reset, **keywargs)
if not self._driver_operation_simulate:
self._clear()
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
if reset:
self.utility_reset()
def _get_identity_instrument_manufacturer(self):
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
if self._driver_operation_simulate:
self._identity_instrument_model = "Not available while simulating"
else:
self._identity_instrument_model = self._ask("OI").split('REV')[0]
self._set_cache_valid()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
if self._driver_operation_simulate:
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
self._identity_instrument_firmware_revision = self._ask("OI").split('REV')[1]
self._set_cache_valid()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("IP")
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
return (code, message)
def _utility_unlock_object(self):
pass
def _memory_save(self, index):
index = int(index)
if index < 0 or index >= self._memory_size:
raise OutOfRangeException()
if not self._driver_operation_simulate:
self._write("SV %d" % (index+1))
def _memory_recall(self, index):
index = int(index)
if index < 0 or index >= self._memory_size:
raise OutOfRangeException()
if not self._driver_operation_simulate:
self._write("RC %d" % (index+1))
def _system_fetch_setup(self):
if self._driver_operation_simulate:
return b''
self._write("OL?")
return self._read_raw()
def _system_load_setup(self, data):
if self._driver_operation_simulate:
return
self._write_raw(b'IL'+data)
self.driver_operation.invalidate_all_attributes()
def _get_rf_frequency(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._rf_frequency = float(self._ask("OPCW"))
self._set_cache_valid()
return self._rf_frequency
def _set_rf_frequency(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("CW%eHZ" % value)
self._rf_frequency = value
self._set_cache_valid()
def _get_rf_level(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._rf_level = float(self._ask("OPPL"))
self._set_cache_valid()
return self._rf_level
def _set_rf_level(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("PL%eDB" % value)
self._rf_level = value
self._set_cache_valid()
def _get_rf_output_enabled(self):
return self._rf_output_enabled
def _set_rf_output_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("RF%d" % int(value))
self._rf_output_enabled = value
self._set_cache_valid()
def _get_alc_enabled(self):
return self._alc_enabled
def _set_alc_enabled(self, value):
value = bool(value)
self._alc_enabled = value
def _rf_is_settled(self):
if not self._driver_operation_simulate:
return self._read_stb() & (1 << 4) != 0
return True
def _rf_wait_until_settled(self, maximum_time):
t = 0
while not self._rf_is_settled() and t < maximum_time:
time.sleep(0.01)
t = t + 0.01
def _get_analog_modulation_am_enabled(self):
return self._analog_modulation_am_enabled
def _set_analog_modulation_am_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("AM%d" % int(value))
self._analog_modulation_am_enabled = value
self._set_cache_valid()
def _get_analog_modulation_am_source(self):
return self._analog_modulation_am_source
def _set_analog_modulation_am_source(self, value):
value = str(value)
self._analog_modulation_am_source = value
def _get_analog_modulation_am_scaling(self):
return self._analog_modulation_am_scaling
def _set_analog_modulation_am_scaling(self, value):
value = int(value)
self._analog_modulation_am_scaling = value
def _get_analog_modulation_am_external_coupling(self):
return self._analog_modulation_am_external_coupling
def _set_analog_modulation_am_external_coupling(self, value):
value = int(value)
self._analog_modulation_am_external_coupling = value
def _get_analog_modulation_am_nominal_voltage(self):
return self._analog_modulation_am_nominal_voltage
def _set_analog_modulation_am_nominal_voltage(self, value):
value = float(value)
self._analog_modulation_am_nominal_voltage = value
def _get_analog_modulation_am_depth(self):
return self._analog_modulation_am_depth
def _set_analog_modulation_am_depth(self, value):
value = float(value)
self._analog_modulation_am_depth = value
def _get_analog_modulation_fm_enabled(self):
return self._analog_modulation_fm_enabled
def _set_analog_modulation_fm_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("FM%d" % int(value))
self._analog_modulation_fm_enabled = value
self._set_cache_valid()
def _get_analog_modulation_fm_source(self):
return self._analog_modulation_fm_source
def _set_analog_modulation_fm_source(self, value):
value = str(value)
self._analog_modulation_fm_source = value
def _get_analog_modulation_fm_external_coupling(self):
return self._analog_modulation_fm_external_coupling
def _set_analog_modulation_fm_external_coupling(self, value):
value = int(value)
self._analog_modulation_fm_external_coupling = value
def _get_analog_modulation_fm_nominal_voltage(self):
return self._analog_modulation_fm_nominal_voltage
def _set_analog_modulation_fm_nominal_voltage(self, value):
value = float(value)
self._analog_modulation_fm_nominal_voltage = value
def _get_analog_modulation_fm_deviation(self):
return self._analog_modulation_fm_deviation
def _set_analog_modulation_fm_deviation(self, value):
value = float(value)
self._analog_modulation_fm_deviation = value
def _get_pulse_modulation_enabled(self):
return self._pulse_modulation_enabled
def _set_pulse_modulation_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("PM%d" % int(value))
self._pulse_modulation_enabled = value
self._set_cache_valid()
def _get_pulse_modulation_source(self):
return self._pulse_modulation_source
def _set_pulse_modulation_source(self, value):
value = str(value)
self._pulse_modulation_source = value
def _get_pulse_modulation_external_polarity(self):
return self._pulse_modulation_external_polarity
def _set_pulse_modulation_external_polarity(self, value):
value = int(value)
self._pulse_modulation_external_polarity = value
def _get_sweep_mode(self):
return self._sweep_mode
def _set_sweep_mode(self, value):
value = int(value)
self._sweep_mode = value
def _get_sweep_trigger_source(self):
return self._sweep_trigger_source
def _set_sweep_trigger_source(self, value):
value = str(value)
self._sweep_trigger_source = value
def _get_sweep_frequency_sweep_start(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_start = float(self._ask("OPFA"))
self._set_cache_valid()
return self._sweep_frequency_sweep_start
def _set_sweep_frequency_sweep_start(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("FA%fHZ" % value)
self._sweep_frequency_sweep_start = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_center')
self._set_cache_valid(False, 'sweep_frequency_sweep_span')
def _get_sweep_frequency_sweep_stop(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_stop = float(self._ask("OPFB"))
self._set_cache_valid()
return self._sweep_frequency_sweep_stop
def _set_sweep_frequency_sweep_stop(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("FB%fHZ" % value)
self._sweep_frequency_sweep_stop = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_center')
self._set_cache_valid(False, 'sweep_frequency_sweep_span')
def _get_sweep_frequency_sweep_center(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_center = float(self._ask("OPCF"))
self._set_cache_valid()
return self._sweep_frequency_sweep_center
def _set_sweep_frequency_sweep_center(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("CF%fHZ" % value)
self._sweep_frequency_sweep_center = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_start')
self._set_cache_valid(False, 'sweep_frequency_sweep_stop')
def _get_sweep_frequency_sweep_span(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_span = float(self._ask("OPDF"))
self._set_cache_valid()
return self._sweep_frequency_sweep_span
def _set_sweep_frequency_sweep_span(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("DF%fHZ" % value)
self._sweep_frequency_sweep_span = value
self._set_cache_valid()
self._set_cache_valid(False, 'sweep_frequency_sweep_start')
self._set_cache_valid(False, 'sweep_frequency_sweep_stop')
def _get_sweep_frequency_sweep_time(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_frequency_sweep_time = float(self._ask("OPST"))
self._set_cache_valid()
return self._sweep_frequency_sweep_time
def _set_sweep_frequency_sweep_time(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("ST%fSC" % value)
self._sweep_frequency_sweep_time = value
def _get_sweep_power_sweep_start(self):
return self._get_rf_level()
def _set_sweep_power_sweep_start(self, value):
self._set_rf_level(value)
def _get_sweep_power_sweep_stop(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._sweep_power_sweep_stop = float(self._ask("OPPS"))
self._set_cache_valid()
return self._sweep_power_sweep_stop
def _set_sweep_power_sweep_stop(self, value):
value = float(value)
if not self._driver_operation_simulate:
self._write("PS%fDB" % value)
self._sweep_power_sweep_stop = value
self._set_cache_valid()
def _get_sweep_power_sweep_time(self):
return self._get_sweep_frequency_sweep_time()
def _set_sweep_power_sweep_time(self, value):
self._set_sweep_frequency_sweep_time(value)
| true | true |
f7f917cf3dbc3931977727128198cdddbd382e8a | 4,623 | py | Python | keras_retinanet/callbacks/eval.py | hunglvfimo/keras-retinanet | af5948a1d52edf30f2eb50d7332a6a3a9971ad66 | [
"Apache-2.0"
] | null | null | null | keras_retinanet/callbacks/eval.py | hunglvfimo/keras-retinanet | af5948a1d52edf30f2eb50d7332a6a3a9971ad66 | [
"Apache-2.0"
] | null | null | null | keras_retinanet/callbacks/eval.py | hunglvfimo/keras-retinanet | af5948a1d52edf30f2eb50d7332a6a3a9971ad66 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import keras
from ..utils.eval import evaluate
import os
import csv
class Evaluate(keras.callbacks.Callback):
""" Evaluation callback for arbitrary datasets.
"""
def __init__(
self,
generator,
iou_threshold=0.5,
score_threshold=0.05,
max_detections=100,
save_path=None,
tensorboard=None,
csv_logger=None,
weighted_average=False,
verbose=1
):
""" Evaluate a given dataset using a given model at the end of every epoch during training.
# Arguments
generator : The generator that represents the dataset to evaluate.
iou_threshold : The threshold used to consider when a detection is positive or negative.
score_threshold : The score confidence threshold to use for detections.
max_detections : The maximum number of detections to use per image.
save_path : The path to save images with visualized detections to.
tensorboard : Instance of keras.callbacks.TensorBoard used to log the mAP value.
weighted_average : Compute the mAP using the weighted average of precisions among classes.
verbose : Set the verbosity level, by default this is set to 1.
"""
self.generator = generator
self.iou_threshold = iou_threshold
self.score_threshold = score_threshold
self.max_detections = max_detections
self.save_path = save_path
self.tensorboard = tensorboard
self.csv_logger = csv_logger
self.weighted_average = weighted_average
self.verbose = verbose
super(Evaluate, self).__init__()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
# run evaluation
average_precisions = evaluate(
self.generator,
self.model,
iou_threshold=self.iou_threshold,
score_threshold=self.score_threshold,
max_detections=self.max_detections,
save_path=self.save_path
)
# compute per class average precision
total_instances = []
precisions = []
label_names = []
for label, (average_precision, num_annotations ) in average_precisions.items():
label_names.append(self.generator.label_to_name(label))
if self.verbose == 1:
print('{:.0f} instances of class'.format(num_annotations),
self.generator.label_to_name(label), 'with average precision: {:.4f}'.format(average_precision))
total_instances.append(num_annotations)
precisions.append(average_precision)
if self.weighted_average:
self.mean_ap = sum([a * b for a, b in zip(total_instances, precisions)]) / sum(total_instances)
else:
self.mean_ap = sum(precisions) / sum(x > 0 for x in total_instances)
if self.tensorboard is not None and self.tensorboard.writer is not None:
import tensorflow as tf
summary = tf.Summary()
summary_value = summary.value.add()
summary_value.simple_value = self.mean_ap
summary_value.tag = "mAP"
self.tensorboard.writer.add_summary(summary, epoch)
if self.csv_logger is not None:
if os.path.isfile (self.csv_logger):
with open(self.csv_logger, mode='a', newline='') as csv_f:
writer = csv.writer(csv_f)
writer.writerow([epoch, self.mean_ap] + precisions)
else:
with open(self.csv_logger, mode='w', newline='') as csv_f:
writer = csv.writer(csv_f)
# write header
writer.writerow(["epoch", "mAP"] + label_names)
writer.writerow([epoch, self.mean_ap] + precisions)
logs['mAP'] = self.mean_ap
if self.verbose == 1:
print('mAP: {:.4f}'.format(self.mean_ap))
| 39.853448 | 118 | 0.625784 |
import keras
from ..utils.eval import evaluate
import os
import csv
class Evaluate(keras.callbacks.Callback):
def __init__(
self,
generator,
iou_threshold=0.5,
score_threshold=0.05,
max_detections=100,
save_path=None,
tensorboard=None,
csv_logger=None,
weighted_average=False,
verbose=1
):
self.generator = generator
self.iou_threshold = iou_threshold
self.score_threshold = score_threshold
self.max_detections = max_detections
self.save_path = save_path
self.tensorboard = tensorboard
self.csv_logger = csv_logger
self.weighted_average = weighted_average
self.verbose = verbose
super(Evaluate, self).__init__()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
average_precisions = evaluate(
self.generator,
self.model,
iou_threshold=self.iou_threshold,
score_threshold=self.score_threshold,
max_detections=self.max_detections,
save_path=self.save_path
)
total_instances = []
precisions = []
label_names = []
for label, (average_precision, num_annotations ) in average_precisions.items():
label_names.append(self.generator.label_to_name(label))
if self.verbose == 1:
print('{:.0f} instances of class'.format(num_annotations),
self.generator.label_to_name(label), 'with average precision: {:.4f}'.format(average_precision))
total_instances.append(num_annotations)
precisions.append(average_precision)
if self.weighted_average:
self.mean_ap = sum([a * b for a, b in zip(total_instances, precisions)]) / sum(total_instances)
else:
self.mean_ap = sum(precisions) / sum(x > 0 for x in total_instances)
if self.tensorboard is not None and self.tensorboard.writer is not None:
import tensorflow as tf
summary = tf.Summary()
summary_value = summary.value.add()
summary_value.simple_value = self.mean_ap
summary_value.tag = "mAP"
self.tensorboard.writer.add_summary(summary, epoch)
if self.csv_logger is not None:
if os.path.isfile (self.csv_logger):
with open(self.csv_logger, mode='a', newline='') as csv_f:
writer = csv.writer(csv_f)
writer.writerow([epoch, self.mean_ap] + precisions)
else:
with open(self.csv_logger, mode='w', newline='') as csv_f:
writer = csv.writer(csv_f)
writer.writerow(["epoch", "mAP"] + label_names)
writer.writerow([epoch, self.mean_ap] + precisions)
logs['mAP'] = self.mean_ap
if self.verbose == 1:
print('mAP: {:.4f}'.format(self.mean_ap))
| true | true |
f7f919252b012f3c42c96d0559257130df289fc9 | 5,508 | py | Python | pypureclient/flasharray/FA_2_3/models/protection_group_snapshot_transfer_get_response.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | pypureclient/flasharray/FA_2_3/models/protection_group_snapshot_transfer_get_response.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | pypureclient/flasharray/FA_2_3/models/protection_group_snapshot_transfer_get_response.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_3 import models
class ProtectionGroupSnapshotTransferGetResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'more_items_remaining': 'bool',
'total_item_count': 'int',
'continuation_token': 'str',
'items': 'list[ProtectionGroupSnapshotTransfer]',
'total': 'list[ProtectionGroupSnapshotTransfer]'
}
attribute_map = {
'more_items_remaining': 'more_items_remaining',
'total_item_count': 'total_item_count',
'continuation_token': 'continuation_token',
'items': 'items',
'total': 'total'
}
required_args = {
}
def __init__(
self,
more_items_remaining=None, # type: bool
total_item_count=None, # type: int
continuation_token=None, # type: str
items=None, # type: List[models.ProtectionGroupSnapshotTransfer]
total=None, # type: List[models.ProtectionGroupSnapshotTransfer]
):
"""
Keyword args:
more_items_remaining (bool): Returns a value of `true` if subsequent items can be retrieved.
total_item_count (int): The total number of records after applying all filter query parameters. The `total_item_count` will be calculated if and only if the corresponding query parameter `total_item_count` is set to `true`. If this query parameter is not set or set to `false`, a value of `null` will be returned.
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the continuation token to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The continuation token is generated if the limit is less than the remaining number of items, and the default sort is used (no sort is specified).
items (list[ProtectionGroupSnapshotTransfer]): Returns a list of all items after filtering. The values are displayed for each name where meaningful.
total (list[ProtectionGroupSnapshotTransfer]): The aggregate value of all items after filtering. Where it makes more sense, the average value is displayed instead. The values are displayed for each field where meaningful.
"""
if more_items_remaining is not None:
self.more_items_remaining = more_items_remaining
if total_item_count is not None:
self.total_item_count = total_item_count
if continuation_token is not None:
self.continuation_token = continuation_token
if items is not None:
self.items = items
if total is not None:
self.total = total
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupSnapshotTransferGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ProtectionGroupSnapshotTransferGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ProtectionGroupSnapshotTransferGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 40.5 | 524 | 0.629085 |
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_3 import models
class ProtectionGroupSnapshotTransferGetResponse(object):
swagger_types = {
'more_items_remaining': 'bool',
'total_item_count': 'int',
'continuation_token': 'str',
'items': 'list[ProtectionGroupSnapshotTransfer]',
'total': 'list[ProtectionGroupSnapshotTransfer]'
}
attribute_map = {
'more_items_remaining': 'more_items_remaining',
'total_item_count': 'total_item_count',
'continuation_token': 'continuation_token',
'items': 'items',
'total': 'total'
}
required_args = {
}
def __init__(
self,
more_items_remaining=None,
total_item_count=None,
continuation_token=None,
items=None,
total=None,
):
if more_items_remaining is not None:
self.more_items_remaining = more_items_remaining
if total_item_count is not None:
self.total_item_count = total_item_count
if continuation_token is not None:
self.continuation_token = continuation_token
if items is not None:
self.items = items
if total is not None:
self.total = total
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ProtectionGroupSnapshotTransferGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ProtectionGroupSnapshotTransferGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ProtectionGroupSnapshotTransferGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f7f91a28ad3a3759a0901289e41eb13277be61e7 | 3,072 | py | Python | data_preprocess.py | alexandrumeterez/talkwithmete | dc027a35b6b8b6dfdb9a86ccd5a3284e332a54b7 | [
"MIT"
] | null | null | null | data_preprocess.py | alexandrumeterez/talkwithmete | dc027a35b6b8b6dfdb9a86ccd5a3284e332a54b7 | [
"MIT"
] | null | null | null | data_preprocess.py | alexandrumeterez/talkwithmete | dc027a35b6b8b6dfdb9a86ccd5a3284e332a54b7 | [
"MIT"
] | null | null | null | import unicodedata
import re
from voc import Voc
MAX_LENGTH = 50 # Maximum sentence length to consider
# Turn a Unicode string to plain ASCII, thanks to
# http://stackoverflow.com/a/518232/2809427
def unicodeToAscii(s):
return ''.join(
c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn'
)
# Lowercase, trim, and remove non-letter characters
def normalizeString(s):
s = unicodeToAscii(s.lower().strip())
s = re.sub(r"([.!?])", r" \1", s)
s = re.sub(r"[^a-zA-Z.!?]+", r" ", s)
s = re.sub(r"\s+", r" ", s).strip()
return s
# Read query/response pairs and return a voc object
def readVocs(datafile, corpus_name):
print("Reading lines...")
# Read the file and split into lines
lines = open(datafile, encoding='utf-8').read().strip().split('\n')
# Split every line into pairs and normalize
pairs = [[normalizeString(s) for s in l.split('\t')] for l in lines]
voc = Voc(corpus_name)
return voc, pairs
# Returns True iff both sentences in a pair 'p' are under the MAX_LENGTH threshold
def filterPair(p):
# Input sequences need to preserve the last word for EOS token
return MAX_LENGTH > len(p[0].split(' ')) > 1 and MAX_LENGTH > len(p[1].split(' ')) > 1
# Filter pairs using filterPair condition
def filterPairs(pairs):
return [pair for pair in pairs if filterPair(pair)]
# Using the functions defined above, return a populated voc object and pairs list
def loadPrepareData(corpus_name, datafile):
print("Start preparing training data ...")
voc, pairs = readVocs(datafile, corpus_name)
print("Read {!s} sentence pairs".format(len(pairs)))
pairs = list(filter(lambda x: len(x) == 2, pairs))
pairs = filterPairs(pairs)
print("Trimmed to {!s} sentence pairs".format(len(pairs)))
print("Counting words...")
for pair in pairs:
voc.addSentence(pair[0])
voc.addSentence(pair[1])
print("Counted words:", voc.num_words)
return voc, pairs
def trimRareWords(voc, pairs, MIN_COUNT):
# Trim words used under the MIN_COUNT from the voc
voc.trim(MIN_COUNT)
# Filter out pairs with trimmed words
keep_pairs = []
for pair in pairs:
input_sentence = pair[0]
output_sentence = pair[1]
keep_input = True
keep_output = True
# Check input sentence
for word in input_sentence.split(' '):
if word not in voc.word2index:
keep_input = False
break
# Check output sentence
for word in output_sentence.split(' '):
if word not in voc.word2index:
keep_output = False
break
# Only keep pairs that do not contain trimmed word(s) in their input or output sentence
if keep_input and keep_output:
keep_pairs.append(pair)
print("Trimmed from {} pairs to {}, {:.4f} of total".format(len(pairs), len(keep_pairs),
len(keep_pairs) / len(pairs)))
return keep_pairs
| 33.391304 | 95 | 0.629557 | import unicodedata
import re
from voc import Voc
MAX_LENGTH = 50
def unicodeToAscii(s):
return ''.join(
c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn'
)
def normalizeString(s):
s = unicodeToAscii(s.lower().strip())
s = re.sub(r"([.!?])", r" \1", s)
s = re.sub(r"[^a-zA-Z.!?]+", r" ", s)
s = re.sub(r"\s+", r" ", s).strip()
return s
def readVocs(datafile, corpus_name):
print("Reading lines...")
lines = open(datafile, encoding='utf-8').read().strip().split('\n')
pairs = [[normalizeString(s) for s in l.split('\t')] for l in lines]
voc = Voc(corpus_name)
return voc, pairs
def filterPair(p):
return MAX_LENGTH > len(p[0].split(' ')) > 1 and MAX_LENGTH > len(p[1].split(' ')) > 1
def filterPairs(pairs):
return [pair for pair in pairs if filterPair(pair)]
def loadPrepareData(corpus_name, datafile):
print("Start preparing training data ...")
voc, pairs = readVocs(datafile, corpus_name)
print("Read {!s} sentence pairs".format(len(pairs)))
pairs = list(filter(lambda x: len(x) == 2, pairs))
pairs = filterPairs(pairs)
print("Trimmed to {!s} sentence pairs".format(len(pairs)))
print("Counting words...")
for pair in pairs:
voc.addSentence(pair[0])
voc.addSentence(pair[1])
print("Counted words:", voc.num_words)
return voc, pairs
def trimRareWords(voc, pairs, MIN_COUNT):
voc.trim(MIN_COUNT)
keep_pairs = []
for pair in pairs:
input_sentence = pair[0]
output_sentence = pair[1]
keep_input = True
keep_output = True
for word in input_sentence.split(' '):
if word not in voc.word2index:
keep_input = False
break
for word in output_sentence.split(' '):
if word not in voc.word2index:
keep_output = False
break
if keep_input and keep_output:
keep_pairs.append(pair)
print("Trimmed from {} pairs to {}, {:.4f} of total".format(len(pairs), len(keep_pairs),
len(keep_pairs) / len(pairs)))
return keep_pairs
| true | true |
f7f91be39c48179d45fa6f9c2acb5819e494b0c2 | 1,863 | py | Python | warGame.py | f-forte-carpio/PyCards | 9b0c1a9fab068c1597209d81a4c50ee349b21342 | [
"Unlicense"
] | null | null | null | warGame.py | f-forte-carpio/PyCards | 9b0c1a9fab068c1597209d81a4c50ee349b21342 | [
"Unlicense"
] | null | null | null | warGame.py | f-forte-carpio/PyCards | 9b0c1a9fab068c1597209d81a4c50ee349b21342 | [
"Unlicense"
] | null | null | null | import pygame as Pg
import CardGameBasicFrame as Cg
players = Cg.give_cards(2, 26)
print('Welcome to the card game War')
Pg.init()
surface=Pg.display.set_mode((100,100))
def main():
def check_cards(index):
p1 = players[0][index][1]
p2 = players[1][index][1]
print("player 1's draw:", Cg.CardsAceTop[p1], "|| player 2's draw:", Cg.CardsAceTop[p2], '\n')
if p1 > p2:
temp = players[1][slice(0, index + 1, 1)]
players[0].extend(temp)
for i in range(index + 1):
players[1].pop(0)
print('Player 1 wins that draw\n')
elif p1 == p2:
print('WAR')
if len(players[0]) > 4 and len(players[1]) > 4:
check_cards(index + 4)
else:
t = []
if len(players[0]) > len(players[1]):
for i in range(index + 1):
t = players[1].pop(0)
players[0].append(t)
else:
for i in range(index + 1):
t = players[1].pop(0)
players[0].append(t)
else:
temp = players[0][slice(0, index + 1, 1)]
players[1].extend(temp)
for i in range(index + 1):
players[0].pop(0)
print('Player 2 wins that draw\n')
win = False
while not win:
input('Press enter to draw:\n')
if len(players[0]) < 52 and len(players[1]) < 52:
check_cards(0)
print('card amounts:\nplayer 1:', len(players[0]), '|| player 2:', len(players[1]), '\n')
else:
win = True
if len(players[0]) > len(players[1]):
print('\nplayer 1 wins!!!')
else:
print('\nplayer 2 wins!!!')
main() | 31.576271 | 103 | 0.457327 | import pygame as Pg
import CardGameBasicFrame as Cg
players = Cg.give_cards(2, 26)
print('Welcome to the card game War')
Pg.init()
surface=Pg.display.set_mode((100,100))
def main():
def check_cards(index):
p1 = players[0][index][1]
p2 = players[1][index][1]
print("player 1's draw:", Cg.CardsAceTop[p1], "|| player 2's draw:", Cg.CardsAceTop[p2], '\n')
if p1 > p2:
temp = players[1][slice(0, index + 1, 1)]
players[0].extend(temp)
for i in range(index + 1):
players[1].pop(0)
print('Player 1 wins that draw\n')
elif p1 == p2:
print('WAR')
if len(players[0]) > 4 and len(players[1]) > 4:
check_cards(index + 4)
else:
t = []
if len(players[0]) > len(players[1]):
for i in range(index + 1):
t = players[1].pop(0)
players[0].append(t)
else:
for i in range(index + 1):
t = players[1].pop(0)
players[0].append(t)
else:
temp = players[0][slice(0, index + 1, 1)]
players[1].extend(temp)
for i in range(index + 1):
players[0].pop(0)
print('Player 2 wins that draw\n')
win = False
while not win:
input('Press enter to draw:\n')
if len(players[0]) < 52 and len(players[1]) < 52:
check_cards(0)
print('card amounts:\nplayer 1:', len(players[0]), '|| player 2:', len(players[1]), '\n')
else:
win = True
if len(players[0]) > len(players[1]):
print('\nplayer 1 wins!!!')
else:
print('\nplayer 2 wins!!!')
main() | true | true |
f7f91e3af56179c30d291b22ea147491cd6e61e3 | 1,287 | py | Python | 107 Binary Tree Level Order Traversal II.py | scorpionpd/LeetCode-all | 0d65494f37d093d650b83b93409e874c041f3abe | [
"MIT"
] | null | null | null | 107 Binary Tree Level Order Traversal II.py | scorpionpd/LeetCode-all | 0d65494f37d093d650b83b93409e874c041f3abe | [
"MIT"
] | null | null | null | 107 Binary Tree Level Order Traversal II.py | scorpionpd/LeetCode-all | 0d65494f37d093d650b83b93409e874c041f3abe | [
"MIT"
] | null | null | null | """
Given a binary tree, return the bottom-up level order traversal of its nodes' values. (ie, from left to right, level by
level from leaf to root).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its bottom-up level order traversal as:
[
[15,7],
[9,20],
[3]
]
"""
__author__ = 'Danyang'
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def levelOrderBottom(self, root):
"""
bfs
:param root: TreeNode
:return: Integers
"""
if not root:
return []
result = []
next_level = [root]
while next_level:
current_level = next_level
result.insert(0, map(lambda x: x.val, current_level)) # current level, only difference with Binary Tree Level Order Traversal I
next_level = []
for element in current_level:
if element.left:
next_level.append(element.left)
if element.right:
next_level.append(element.right)
return result
if __name__=="__main__":
Solution().levelOrderBottom(TreeNode(1))
| 20.758065 | 140 | 0.566434 | __author__ = 'Danyang'
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def levelOrderBottom(self, root):
if not root:
return []
result = []
next_level = [root]
while next_level:
current_level = next_level
result.insert(0, map(lambda x: x.val, current_level))
next_level = []
for element in current_level:
if element.left:
next_level.append(element.left)
if element.right:
next_level.append(element.right)
return result
if __name__=="__main__":
Solution().levelOrderBottom(TreeNode(1))
| true | true |
f7f91e8d1f21cacff6ca3536401360aa17ebea6b | 8,373 | py | Python | breathe-easy.py | shuvrag/breathe-easy | 0d20e5f7a81669e1e2f298fb8999837310c0593c | [
"MIT"
] | null | null | null | breathe-easy.py | shuvrag/breathe-easy | 0d20e5f7a81669e1e2f298fb8999837310c0593c | [
"MIT"
] | null | null | null | breathe-easy.py | shuvrag/breathe-easy | 0d20e5f7a81669e1e2f298fb8999837310c0593c | [
"MIT"
] | null | null | null | import streamlit as st
import pandas as pd
import numpy as np
import datetime
from datetime import timedelta
import plotly.graph_objects as go
timechoice = [' ', '00:00:00', '01:00:00', '02:00:00', '03:00:00',
'04:00:00', '05:00:00', '06:00:00', '07:00:00',
'08:00:00', '09:00:00', '10:00:00', '11:00:00',
'12:00:00', '13:00:00', '14:00:00', '15:00:00',
'16:00:00', '17:00:00', '18:00:00', '19:00:00',
'20:00:00', '21:00:00', '22:00:00', '23:00:00']
t1 = '10:00:00'
t1 = pd.to_datetime(t1)
time=[]
time.append(t1)
for i in range(1,25):
time.append(t1 + timedelta(hours=i))
#st.write(time)
#time = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
time = ['10:00:00', '11:00:00',
'12:00:00', '13:00:00', '14:00:00', '15:00:00',
'16:00:00', '17:00:00', '18:00:00', '19:00:00',
'20:00:00', '21:00:00', '22:00:00', '23:00:00',
'00:00:00', '01:00:00', '02:00:00', '03:00:00',
'04:00:00', '05:00:00', '06:00:00', '07:00:00',
'08:00:00', '09:00:00']
#pm = [167, 191, 229, 249, 172, 171, 174, 105, 86, 67, 53, 56, 63, 61, 88, 139, 124, 98, 93, 111, 134, 97, 111, 101, 130]
pm = [166, 140, 113, 90, 87, 93, 90, 77, 76, 87, 116, 135, 144, 132, 105, 103, 143, 131, 154, 182, 184, 187, 157, 129, 118]
#st.header("Beta Distribution Tutorial")
st.title('Breathe-Easy PM\u2082\u22C5\u2085 Forecast')
#input_city = st.selectbox("What city are you in?", ["Kolkata"])
#input_date = st.text_input('What is the date and time you are thinking of going out?', '2020-01-11 15:00:00')
input_date = '2020-02-18 15:00:00'
input_date = pd.to_datetime(input_date)
#input_day = st.date_input('Choose the date when you want to go outside:', input_date)
input_day = st.date_input('Please choose the date when you want to go outside:')
#st.write(input_day)
#input_time = st.text_input('Choose the time when you want to go outside?', '10:00:00')
input_time = st.selectbox('Please choose the time when you want to go outside:', timechoice)
#in_date = pd.to_datetime(prediction, format = '%j')
#in_date = in_date.replace(year = 2020)
#input_datetime = pd.to_datetime(input_date)
#st.write(input_datetime)
input_date_time = str(input_day) + ' ' + input_time
#st.write('The particulate matter and weather forecast in Kolkata at', input_date_time, 'is:')
input_date_time = pd.to_datetime(input_date_time)
if input_time != ' ':
st.write('The particulate matter forecast in Kolkata at', input_date_time, 'is:', pm[time.index(input_time)])
# st.write('The particulate matter and weather forecast in Kolkata for the next 24 hours is as follows:')
# st.write('The best particulate matter forecast in Kolkata is at:')
fig1 = go.Figure()
# Add scatter trace for line
fig1.add_trace(go.Scatter(
x = time,
y = pm,
mode="lines",
name="pollutant concentration",
hovertext=["Temp 82, Hmdty 82, PM2.5 166", "Temp 81, Hmdty 81, PM2.5 140", "Temp 79, Hmdty 79, PM2.5 113",
"Temp 77, Hmdty 77, PM2.5 90", "Temp 73, Hmdty 73, PM2.5 87", "Temp 72, Hmdty 72, PM2.5 93",
"Temp 70, Hmdty 70, PM2.5 90", "Temp 66, Hmdty 72, PM2.5 77", "Temp 64, Hmdty 70, PM2.5 76",
"Temp 63, Hmdty 78, PM2.5 87", "Temp 63, Hmdty 83, PM2.5 116", "Temp 61, Hmdty 88, PM2.5 135",
"Temp 61, Hmdty 82, PM2.5 144",
"Temp 63, Hmdty 82, PM2.5 132", "Temp 68, Hmdty 88, PM2.5 105", "Temp 72, Hmdty 88, PM2.5 103",
"Temp 77, Hmdty 82, PM2.5 143", "Temp 81, Hmdty 73, PM2.5 131", "Temp 82, Hmdty 64, PM2.5 154",
"Temp 84, Hmdty 57, PM2.5 182", "Temp 84, Hmdty 51, PM2.5 184", "Temp 84, Hmdty 48, PM2.5 187",
"Temp 82, Hmdty 42, PM2.5 157", "Temp 81, Hmdty 48, PM2.5 129", "Temp 77, Hmdty 42, PM2.5 118"
],
hoverinfo="text",
marker=dict(color="green"),
showlegend=False
))
fig1.update_layout(
title="Pollution for the next 24 hours",
xaxis_title="Time",
yaxis_title="Conc. of PM 2.5 in micrograms/m^3",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
),
shapes=[
go.layout.Shape(
type="rect",
# x-reference is assigned to the x-values
xref="paper",
# y-reference is assigned to the plot paper [0,1]
yref="y",
x0=0,
y0=0,
x1=1,
y1=50,
fillcolor="Green",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=50,
x1=1,
y1=100,
fillcolor="Yellow",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=100,
x1=1,
y1=150,
fillcolor="Orange",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=150,
x1=1,
y1=200,
fillcolor="Red",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=201,
x1=1,
y1=300,
fillcolor="Purple",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=300,
x1=1,
y1=500,
fillcolor="Purple",
opacity=0.4,
layer="below",
line_width=0,
),
# dict(
go.layout.Shape(
type="rect",
xref="x",
yref="y",
x0=7,
y0=0,
x1=8,
y1=pm[7],
fillcolor="Blue",
opacity=0.5,
layer="below",
line_width=0,
)
]
)
st.write(fig1)
#fig.show()
fig2 = go.Figure()
# Add scatter trace for line
fig2.add_trace(go.Scatter(
#x=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
x = time,
# y=[77, 81, 82, 82, 84, 82, 82, 79, 73, 75, 72, 72, 72, 68, 66, 64, 64, 64, 64, 63, 64, 63, 61, 61, 61],
y = [82, 81, 79, 77, 73, 72, 70, 72, 70, 66, 64, 63, 63, 61, 61, 63, 68, 72, 77, 81, 82, 84, 84, 84, 82],
mode="lines",
name="temperature"
))
fig2.update_layout(
title="Temperature for the next 24 hours",
xaxis_title="Time",
yaxis_title="Temperature (in F)",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
)
)
st.write(fig2)
fig3 = go.Figure()
# Add scatter trace for line
fig3.add_trace(go.Scatter(
# x=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
x = time,
# y=[57, 51, 48, 45, 42, 42, 39, 44, 50, 57, 57, 60, 60, 68, 73, 77, 73, 73, 77, 77, 77, 82, 88, 88, 94],
y = [82, 81, 79, 77, 73, 72, 70, 72, 70, 78, 83, 88, 82, 82, 88, 88, 82, 73, 64, 57, 51, 48, 42, 48, 42],
mode="lines",
name="humidity"
))
fig3.update_layout(
title="Humidity for the next 24 hours",
xaxis_title="Time",
yaxis_title="Humidity %",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
)
)
st.write(fig3)
#fig.show()
| 31.958015 | 123 | 0.482862 | import streamlit as st
import pandas as pd
import numpy as np
import datetime
from datetime import timedelta
import plotly.graph_objects as go
timechoice = [' ', '00:00:00', '01:00:00', '02:00:00', '03:00:00',
'04:00:00', '05:00:00', '06:00:00', '07:00:00',
'08:00:00', '09:00:00', '10:00:00', '11:00:00',
'12:00:00', '13:00:00', '14:00:00', '15:00:00',
'16:00:00', '17:00:00', '18:00:00', '19:00:00',
'20:00:00', '21:00:00', '22:00:00', '23:00:00']
t1 = '10:00:00'
t1 = pd.to_datetime(t1)
time=[]
time.append(t1)
for i in range(1,25):
time.append(t1 + timedelta(hours=i))
time = ['10:00:00', '11:00:00',
'12:00:00', '13:00:00', '14:00:00', '15:00:00',
'16:00:00', '17:00:00', '18:00:00', '19:00:00',
'20:00:00', '21:00:00', '22:00:00', '23:00:00',
'00:00:00', '01:00:00', '02:00:00', '03:00:00',
'04:00:00', '05:00:00', '06:00:00', '07:00:00',
'08:00:00', '09:00:00']
pm = [166, 140, 113, 90, 87, 93, 90, 77, 76, 87, 116, 135, 144, 132, 105, 103, 143, 131, 154, 182, 184, 187, 157, 129, 118]
st.title('Breathe-Easy PM\u2082\u22C5\u2085 Forecast')
input_date = '2020-02-18 15:00:00'
input_date = pd.to_datetime(input_date)
input_day = st.date_input('Please choose the date when you want to go outside:')
input_time = st.selectbox('Please choose the time when you want to go outside:', timechoice)
input_date_time = str(input_day) + ' ' + input_time
input_date_time = pd.to_datetime(input_date_time)
if input_time != ' ':
st.write('The particulate matter forecast in Kolkata at', input_date_time, 'is:', pm[time.index(input_time)])
fig1 = go.Figure()
fig1.add_trace(go.Scatter(
x = time,
y = pm,
mode="lines",
name="pollutant concentration",
hovertext=["Temp 82, Hmdty 82, PM2.5 166", "Temp 81, Hmdty 81, PM2.5 140", "Temp 79, Hmdty 79, PM2.5 113",
"Temp 77, Hmdty 77, PM2.5 90", "Temp 73, Hmdty 73, PM2.5 87", "Temp 72, Hmdty 72, PM2.5 93",
"Temp 70, Hmdty 70, PM2.5 90", "Temp 66, Hmdty 72, PM2.5 77", "Temp 64, Hmdty 70, PM2.5 76",
"Temp 63, Hmdty 78, PM2.5 87", "Temp 63, Hmdty 83, PM2.5 116", "Temp 61, Hmdty 88, PM2.5 135",
"Temp 61, Hmdty 82, PM2.5 144",
"Temp 63, Hmdty 82, PM2.5 132", "Temp 68, Hmdty 88, PM2.5 105", "Temp 72, Hmdty 88, PM2.5 103",
"Temp 77, Hmdty 82, PM2.5 143", "Temp 81, Hmdty 73, PM2.5 131", "Temp 82, Hmdty 64, PM2.5 154",
"Temp 84, Hmdty 57, PM2.5 182", "Temp 84, Hmdty 51, PM2.5 184", "Temp 84, Hmdty 48, PM2.5 187",
"Temp 82, Hmdty 42, PM2.5 157", "Temp 81, Hmdty 48, PM2.5 129", "Temp 77, Hmdty 42, PM2.5 118"
],
hoverinfo="text",
marker=dict(color="green"),
showlegend=False
))
fig1.update_layout(
title="Pollution for the next 24 hours",
xaxis_title="Time",
yaxis_title="Conc. of PM 2.5 in micrograms/m^3",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
),
shapes=[
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=0,
x1=1,
y1=50,
fillcolor="Green",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=50,
x1=1,
y1=100,
fillcolor="Yellow",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=100,
x1=1,
y1=150,
fillcolor="Orange",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=150,
x1=1,
y1=200,
fillcolor="Red",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=201,
x1=1,
y1=300,
fillcolor="Purple",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="paper",
yref="y",
x0=0,
y0=300,
x1=1,
y1=500,
fillcolor="Purple",
opacity=0.4,
layer="below",
line_width=0,
),
go.layout.Shape(
type="rect",
xref="x",
yref="y",
x0=7,
y0=0,
x1=8,
y1=pm[7],
fillcolor="Blue",
opacity=0.5,
layer="below",
line_width=0,
)
]
)
st.write(fig1)
fig2 = go.Figure()
fig2.add_trace(go.Scatter(
x = time,
y = [82, 81, 79, 77, 73, 72, 70, 72, 70, 66, 64, 63, 63, 61, 61, 63, 68, 72, 77, 81, 82, 84, 84, 84, 82],
mode="lines",
name="temperature"
))
fig2.update_layout(
title="Temperature for the next 24 hours",
xaxis_title="Time",
yaxis_title="Temperature (in F)",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
)
)
st.write(fig2)
fig3 = go.Figure()
fig3.add_trace(go.Scatter(
x = time,
y = [82, 81, 79, 77, 73, 72, 70, 72, 70, 78, 83, 88, 82, 82, 88, 88, 82, 73, 64, 57, 51, 48, 42, 48, 42],
mode="lines",
name="humidity"
))
fig3.update_layout(
title="Humidity for the next 24 hours",
xaxis_title="Time",
yaxis_title="Humidity %",
font=dict(
family="Gravitas One, monospace",
size=18,
color="#7f7f7f"
)
)
st.write(fig3)
| true | true |
f7f91ea58a510a63d8feeffbe35f9df98ff60938 | 784 | py | Python | sample_code/chains.py | Nov05/Lambda-Blockchain | 6aee033969c0ba587198daa2bedbc89f75d87c0c | [
"MIT"
] | null | null | null | sample_code/chains.py | Nov05/Lambda-Blockchain | 6aee033969c0ba587198daa2bedbc89f75d87c0c | [
"MIT"
] | null | null | null | sample_code/chains.py | Nov05/Lambda-Blockchain | 6aee033969c0ba587198daa2bedbc89f75d87c0c | [
"MIT"
] | null | null | null | import random
def longest_linked_list_chain(keys, buckets, loops=10):
"""
Roll keys number of keys into buckets number of random buckets
and count collisions
"""
for i in range(loops):
key_counts = {}
for i in range(buckets):
key_counts[i] = 0
for i in range(keys):
random_key = str(random.random())
hash_index = hash(random_key) % buckets
key_counts[hash_index] += 1
largest_n = 0
for key in key_counts:
if key_counts[key] > largest_n:
largest_n = key_counts[key]
print(f"Longest Linked List Chain for {keys} keys in {buckets} buckets (Load Factor: {keys/buckets:.2f}): {largest_n}")
longest_linked_list_chain(500, 1000, 5)
| 28 | 127 | 0.59949 | import random
def longest_linked_list_chain(keys, buckets, loops=10):
for i in range(loops):
key_counts = {}
for i in range(buckets):
key_counts[i] = 0
for i in range(keys):
random_key = str(random.random())
hash_index = hash(random_key) % buckets
key_counts[hash_index] += 1
largest_n = 0
for key in key_counts:
if key_counts[key] > largest_n:
largest_n = key_counts[key]
print(f"Longest Linked List Chain for {keys} keys in {buckets} buckets (Load Factor: {keys/buckets:.2f}): {largest_n}")
longest_linked_list_chain(500, 1000, 5)
| true | true |
f7f91f033bd6cc73c04ba7bc1b09a753616f98d4 | 697 | py | Python | tests/check_style.py | flysky2008/autogluon | 7ad9e5601cf17e616950ae7ef2e84d77b04832e4 | [
"Apache-2.0"
] | null | null | null | tests/check_style.py | flysky2008/autogluon | 7ad9e5601cf17e616950ae7ef2e84d77b04832e4 | [
"Apache-2.0"
] | null | null | null | tests/check_style.py | flysky2008/autogluon | 7ad9e5601cf17e616950ae7ef2e84d77b04832e4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Check style"""
import sys
from subprocess import Popen, PIPE
import logging
def main():
logging.getLogger().setLevel(logging.INFO)
logging.info("PEP8 Style check")
flake8_proc = Popen(['flake8', '--count'], stdout=PIPE)
flake8_out = flake8_proc.communicate()[0]
lines = flake8_out.splitlines()
count = int(lines[-1].decode())
if count > 0:
logging.warning("%d PEP8 warnings remaining", count)
if count > 3438:
logging.warning("Additional PEP8 warnings were introducing, style check fails")
return 1
logging.info("Passed")
return 0
if __name__ == '__main__':
sys.exit(main())
| 25.814815 | 87 | 0.652798 |
import sys
from subprocess import Popen, PIPE
import logging
def main():
logging.getLogger().setLevel(logging.INFO)
logging.info("PEP8 Style check")
flake8_proc = Popen(['flake8', '--count'], stdout=PIPE)
flake8_out = flake8_proc.communicate()[0]
lines = flake8_out.splitlines()
count = int(lines[-1].decode())
if count > 0:
logging.warning("%d PEP8 warnings remaining", count)
if count > 3438:
logging.warning("Additional PEP8 warnings were introducing, style check fails")
return 1
logging.info("Passed")
return 0
if __name__ == '__main__':
sys.exit(main())
| true | true |
f7f91f4ce0a2b0628f58e9cbecebbecd93ec5caf | 6,584 | py | Python | src/grammar_learner/learner.py | akolonin/language-learning | 022c34a3066aa97ea0d007419e026247a4f78dd5 | [
"MIT"
] | null | null | null | src/grammar_learner/learner.py | akolonin/language-learning | 022c34a3066aa97ea0d007419e026247a4f78dd5 | [
"MIT"
] | null | null | null | src/grammar_learner/learner.py | akolonin/language-learning | 022c34a3066aa97ea0d007419e026247a4f78dd5 | [
"MIT"
] | null | null | null | # language-learning/src/learner.py # 81213
import os, time # pickle, numpy as np, pandas as pd
from copy import deepcopy
from shutil import copy2 as copy
from collections import OrderedDict, Counter
from .utl import UTC, kwa, sec2string
from .read_files import check_dir, check_mst_files
from .pparser import files2links
from .category_learner import learn_categories, cats2list #, add_disjuncts
from .grammar_inducer import induce_grammar, check_cats, prune_cats, add_disjuncts
from .generalization import generalize_categories, generalize_rules, \
generalise_rules, add_upper_level # 81122
from .write_files import list2file, save_link_grammar, save_cat_tree
__all__ = ['learn_grammar']
def learn(**kwargs):
start = time.time()
log = OrderedDict({'start': str(UTC()), 'learn_grammar': 'v.0.7.81109'})
input_parses = kwargs['input_parses']
output_grammar = kwargs['output_grammar']
output_categories = kwa('', 'output_categories', **kwargs)
output_statistics = kwa('', 'output_statistics', **kwargs)
temp_dir = kwa('', 'temp_dir', **kwargs)
if os.path.isdir(output_grammar):
prj_dir = output_grammar
else:
prj_dir = os.path.dirname(output_grammar)
log.update({'project_directory': prj_dir})
if output_categories == '':
output_categories = prj_dir
if output_statistics != '': # TODO: check path: filename/dir?
corpus_stats_file = output_statistics
else:
corpus_stats_file = prj_dir + '/corpus_stats.txt'
if temp_dir != '':
if os.path.isdir(temp_dir):
kwargs['tmpath'] = temp_dir
context = kwa(1, 'context', **kwargs)
clustering = kwa('kmeans', 'clustering', **kwargs) # TODO: update
cats_gen = kwa('off', 'categories_generalization', **kwargs)
grammar_rules = kwa(1, 'grammar_rules', **kwargs)
verbose = kwa('none', 'verbose', **kwargs)
files, re01 = check_mst_files(input_parses, verbose)
log.update(re01)
kwargs['input_files'] = files
links, re02 = files2links(**kwargs)
log.update(re02)
list2file(re02['corpus_stats'], corpus_stats_file)
log.update({'corpus_stats_file': corpus_stats_file})
categories, re03 = learn_categories(links, **kwargs)
log.update(re03)
if 'corpus_stats' in log and 'cleaned_words' in re03: # 81213
log['corpus_stats'].extend([
['Number of unique words after cleanup', re03['cleaned_words']],
['Number of unique features after cleanup', re03['clean_features']]])
list2file(log['corpus_stats'], corpus_stats_file)
'''Generalize word categories'''
if cats_gen == 'jaccard' or (cats_gen == 'auto' and clustering == 'group'):
categories, re04 = generalize_categories(categories, **kwargs)
log.update(re04)
elif cats_gen == 'cosine' or (cats_gen == 'auto' and clustering == 'kmeans'):
log.update({'generalization': 'none: vector-similarity based - maybe some day...'})
else:
log.update({'generalization': 'none: ' + str(cats_gen)})
'''Learn grammar'''
if grammar_rules != context:
context = kwargs['context']
kwargs['context'] = kwargs['grammar_rules']
links, re06 = files2links(**kwargs)
kwargs['context'] = context
if 'disjuncts' not in 'categories': # k-means, sparse agglomerative clustering
categories = add_disjuncts(categories, links, **kwargs)
# TODO: check every category has disjuncts # 81204, blocked 81207
# categories = prune_cats(categories, **kwargs) # [F] ⇒ implant in induce_grammar?
# re = check_cats(categories, **kwargs)
# "fully connected rules": every cluster connected to all clusters # 80825
if kwargs['grammar_rules'] < 0:
rules = deepcopy(categories)
clusters = [i for i, x in enumerate(rules['cluster']) if i > 0 and x is not None]
rule_list = [tuple([-x]) for x in clusters] + [tuple([x]) for x in clusters]
for cluster in clusters:
rules['disjuncts'][cluster] = set(rule_list)
else:
rules, re07 = induce_grammar(categories, **kwargs)
lengths = [len(x) for x in rules['disjuncts']]
if verbose in ['max', 'debug']:
print('N clusters = len(rules[disjuncts]-1):', len(rules['disjuncts']) - 1)
print('Rule set lengths:', lengths)
'''Generalize grammar rules''' # 81121
if 'rules_generalization' in kwargs:
if kwargs['rules_generalization'] in ['jaccard', 'legacy']:
rules, re08 = generalize_rules(rules, **kwargs)
log.update(re08)
elif kwargs['rules_generalization'] in ['hierarchical', 'fast', 'updated', 'new']:
rules, re08 = generalise_rules(rules, **kwargs) # 81121
log.update(re08)
if 'log+' in verbose:
log['rule_sizes'] = dict(Counter(
[len(x) for i, x in enumerate(rules['words']) if rules['parent'][i] == 0]))
'''Save word category tree, Link Grammar files: cat_tree.txt, dict...dict'''
if 'top_level' in kwargs and kwargs['top_level'] > -1: # 81126 3rd hierarchy level over rules
tree, _ = add_upper_level(rules, **kwargs)
re09 = save_cat_tree(tree, output_categories, verbose='none')
else:
re09 = save_cat_tree(rules, output_categories, verbose='none')
# TODO: check file save error?
log.update(re09)
re10 = save_link_grammar(rules, output_grammar, grammar_rules)
log.update(re10)
log.update({'finish': str(UTC())})
# TODO: elapsed execution time? Save log?
log.update({'grammar_learn_time': sec2string(time.time() - start)})
# 81120: check 2nd g12n FIXME
# rules, re11 = generalize_rules(rules, **kwargs)
# re12 = save_cat_tree(rules, output_categories, verbose='none')
# re13 = save_link_grammar(rules, output_grammar, grammar_rules)
# log.update(re11)
# log.update(re12)
# log.update(re13)
# return log
return rules, log # 81126 FIXME?
def learn_grammar(**kwargs): # Backwards compatibility with legacy calls
rules, log = learn(**kwargs)
return log
# Notes:
# 80802: poc05.py/category_learner ⇒ category_learner.py/learn_categories
# 80825: random clusters, interconnected ⇒ cleanup, commit 80828
# 81021 cleanup: Grammar Learner 0.6
# 81102 sparse wordspace agglomerative clustering
# 81126 def learn_grammar ⇒ def learn + decorator
# 81204-07 test and block (snooze) data pruning with max_disjuncts, etc... | 41.670886 | 98 | 0.652491 | os, time
from copy import deepcopy
from shutil import copy2 as copy
from collections import OrderedDict, Counter
from .utl import UTC, kwa, sec2string
from .read_files import check_dir, check_mst_files
from .pparser import files2links
from .category_learner import learn_categories, cats2list
from .grammar_inducer import induce_grammar, check_cats, prune_cats, add_disjuncts
from .generalization import generalize_categories, generalize_rules, \
generalise_rules, add_upper_level
from .write_files import list2file, save_link_grammar, save_cat_tree
__all__ = ['learn_grammar']
def learn(**kwargs):
start = time.time()
log = OrderedDict({'start': str(UTC()), 'learn_grammar': 'v.0.7.81109'})
input_parses = kwargs['input_parses']
output_grammar = kwargs['output_grammar']
output_categories = kwa('', 'output_categories', **kwargs)
output_statistics = kwa('', 'output_statistics', **kwargs)
temp_dir = kwa('', 'temp_dir', **kwargs)
if os.path.isdir(output_grammar):
prj_dir = output_grammar
else:
prj_dir = os.path.dirname(output_grammar)
log.update({'project_directory': prj_dir})
if output_categories == '':
output_categories = prj_dir
if output_statistics != '':
corpus_stats_file = output_statistics
else:
corpus_stats_file = prj_dir + '/corpus_stats.txt'
if temp_dir != '':
if os.path.isdir(temp_dir):
kwargs['tmpath'] = temp_dir
context = kwa(1, 'context', **kwargs)
clustering = kwa('kmeans', 'clustering', **kwargs)
cats_gen = kwa('off', 'categories_generalization', **kwargs)
grammar_rules = kwa(1, 'grammar_rules', **kwargs)
verbose = kwa('none', 'verbose', **kwargs)
files, re01 = check_mst_files(input_parses, verbose)
log.update(re01)
kwargs['input_files'] = files
links, re02 = files2links(**kwargs)
log.update(re02)
list2file(re02['corpus_stats'], corpus_stats_file)
log.update({'corpus_stats_file': corpus_stats_file})
categories, re03 = learn_categories(links, **kwargs)
log.update(re03)
if 'corpus_stats' in log and 'cleaned_words' in re03:
log['corpus_stats'].extend([
['Number of unique words after cleanup', re03['cleaned_words']],
['Number of unique features after cleanup', re03['clean_features']]])
list2file(log['corpus_stats'], corpus_stats_file)
if cats_gen == 'jaccard' or (cats_gen == 'auto' and clustering == 'group'):
categories, re04 = generalize_categories(categories, **kwargs)
log.update(re04)
elif cats_gen == 'cosine' or (cats_gen == 'auto' and clustering == 'kmeans'):
log.update({'generalization': 'none: vector-similarity based - maybe some day...'})
else:
log.update({'generalization': 'none: ' + str(cats_gen)})
if grammar_rules != context:
context = kwargs['context']
kwargs['context'] = kwargs['grammar_rules']
links, re06 = files2links(**kwargs)
kwargs['context'] = context
if 'disjuncts' not in 'categories':
categories = add_disjuncts(categories, links, **kwargs)
es = deepcopy(categories)
clusters = [i for i, x in enumerate(rules['cluster']) if i > 0 and x is not None]
rule_list = [tuple([-x]) for x in clusters] + [tuple([x]) for x in clusters]
for cluster in clusters:
rules['disjuncts'][cluster] = set(rule_list)
else:
rules, re07 = induce_grammar(categories, **kwargs)
lengths = [len(x) for x in rules['disjuncts']]
if verbose in ['max', 'debug']:
print('N clusters = len(rules[disjuncts]-1):', len(rules['disjuncts']) - 1)
print('Rule set lengths:', lengths)
if 'rules_generalization' in kwargs:
if kwargs['rules_generalization'] in ['jaccard', 'legacy']:
rules, re08 = generalize_rules(rules, **kwargs)
log.update(re08)
elif kwargs['rules_generalization'] in ['hierarchical', 'fast', 'updated', 'new']:
rules, re08 = generalise_rules(rules, **kwargs)
log.update(re08)
if 'log+' in verbose:
log['rule_sizes'] = dict(Counter(
[len(x) for i, x in enumerate(rules['words']) if rules['parent'][i] == 0]))
if 'top_level' in kwargs and kwargs['top_level'] > -1:
tree, _ = add_upper_level(rules, **kwargs)
re09 = save_cat_tree(tree, output_categories, verbose='none')
else:
re09 = save_cat_tree(rules, output_categories, verbose='none')
log.update(re09)
re10 = save_link_grammar(rules, output_grammar, grammar_rules)
log.update(re10)
log.update({'finish': str(UTC())})
log.update({'grammar_learn_time': sec2string(time.time() - start)})
return rules, log
def learn_grammar(**kwargs):
rules, log = learn(**kwargs)
return log
| true | true |
f7f91f67c6600a226adbc4ca765b9b3d3d9badbe | 2,725 | py | Python | py_iex/stats.py | jxzym25/py_iex | 0808dff2fd31f3a30a7e1fd1adc38e9ac486d5fd | [
"MIT"
] | 1 | 2018-03-19T06:30:11.000Z | 2018-03-19T06:30:11.000Z | py_iex/stats.py | jxzym25/py_iex | 0808dff2fd31f3a30a7e1fd1adc38e9ac486d5fd | [
"MIT"
] | null | null | null | py_iex/stats.py | jxzym25/py_iex | 0808dff2fd31f3a30a7e1fd1adc38e9ac486d5fd | [
"MIT"
] | null | null | null | from .iex import IEX
class Stats(IEX):
@IEX._call_api_on_func
def get_intraday(self):
"""
Returns intraday statistics
"""
_FUNCTION_KEYS = ("stats", "intraday")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_recent(self):
"""
This call will return a minimum of the last five trading days up to all trading days of the current month.
"""
_FUNCTION_KEYS = ("stats", "recent")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_records(self):
"""
Returns records statistics
"""
_FUNCTION_KEYS = ("stats", "records")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_historical_summary(self, date=None, format=None):
"""
The /stats/historical endpoint without any parameters will return the current month's stats.
Parameters
Parameter Details
date - Parameter is optional
- Value needs to be in four-digit year, two-digit month format (YYYYMM) (i.e January 2017 would be written as 201701)
- Historical data is only available for prior months, starting with January 2014
- When parameter is not present, request returns prior month's data
format - Parameter is optional
- Value can only be csv
- When parameter is not present, format defaults to JSON
"""
_FUNCTION_KEYS = ("stats", "historical")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_historical_daily(self, date=None, last=None, format=None):
"""
This call will return daily stats for a given month or day.
Parameters
Parameter Details
date - Parameter is optional
- Option 1: Value needs to be in four-digit year, two-digit month format (YYYYMM) (i.e January 2017 would be written as 201701)
- Option 2: Value needs to be in four-digit year, two-digit month, two-digit day format (YYYYMMDD) (i.e January 21, 2017 would be written as 20170121)
- Historical data is only available for prior months, starting with January 2014
last - Parameter is optional
- Is used in place of date to retrieve last n number of trading days.
- Value can only be a number up to 90
format - Parameter is optional
- Value can only be csv
- When parameter is not present, format defaults to JSON
"""
_FUNCTION_KEYS = ("stats", "historical", "daily")
return _FUNCTION_KEYS
| 40.671642 | 170 | 0.602202 | from .iex import IEX
class Stats(IEX):
@IEX._call_api_on_func
def get_intraday(self):
_FUNCTION_KEYS = ("stats", "intraday")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_recent(self):
_FUNCTION_KEYS = ("stats", "recent")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_records(self):
_FUNCTION_KEYS = ("stats", "records")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_historical_summary(self, date=None, format=None):
_FUNCTION_KEYS = ("stats", "historical")
return _FUNCTION_KEYS
@IEX._call_api_on_func
def get_historical_daily(self, date=None, last=None, format=None):
_FUNCTION_KEYS = ("stats", "historical", "daily")
return _FUNCTION_KEYS
| true | true |
f7f91f8630c3f524d35f2a305adc8ffc5c0ff0cb | 380 | py | Python | accounts/serializers.py | akahard2dj/DjangoBlackberry | 4e732d8650dc7d61cd79033d5072d836707b411f | [
"MIT"
] | null | null | null | accounts/serializers.py | akahard2dj/DjangoBlackberry | 4e732d8650dc7d61cd79033d5072d836707b411f | [
"MIT"
] | null | null | null | accounts/serializers.py | akahard2dj/DjangoBlackberry | 4e732d8650dc7d61cd79033d5072d836707b411f | [
"MIT"
] | null | null | null | from rest_framework import serializers
from accounts.models import Berry
class BerrySerializer(serializers.ModelSerializer):
class Meta:
model = Berry
fields = ('id', 'email', 'nickname', 'created_at')
class BerryListSerializer(serializers.ModelSerializer):
class Meta:
model = Berry
fields = ('id', 'email', 'nickname', 'created_at')
| 23.75 | 58 | 0.684211 | from rest_framework import serializers
from accounts.models import Berry
class BerrySerializer(serializers.ModelSerializer):
class Meta:
model = Berry
fields = ('id', 'email', 'nickname', 'created_at')
class BerryListSerializer(serializers.ModelSerializer):
class Meta:
model = Berry
fields = ('id', 'email', 'nickname', 'created_at')
| true | true |
f7f91ff5720693ef2b071e117d6f40ac1d0ca390 | 171 | py | Python | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['ConstantTrend'] , ['BestCycle'] , ['LSTM'] ); | 42.75 | 93 | 0.766082 | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['ConstantTrend'] , ['BestCycle'] , ['LSTM'] ); | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.