text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Test Environment Canada diagnostics."""
from datetime import datetime, timezone
import json
from unittest.mock import AsyncMock, MagicMock, patch
from homeassistant.components.environment_canada.const import (
CONF_LANGUAGE,
CONF_STATION,
DOMAIN,
)
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
from tests.components.diagnostics import get_diagnostics_for_config_entry
FIXTURE_USER_INPUT = {
CONF_LATITUDE: 55.55,
CONF_LONGITUDE: 42.42,
CONF_STATION: "XX/1234567",
CONF_LANGUAGE: "Gibberish",
}
async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
"""Set up the Environment Canada integration in Home Assistant."""
def mock_ec():
ec_mock = MagicMock()
ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION]
ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE]
ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE]
ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE]
ec_mock.update = AsyncMock()
return ec_mock
config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT)
config_entry.add_to_hass(hass)
ec_data = json.loads(
load_fixture("environment_canada/current_conditions_data.json")
)
weather_mock = mock_ec()
ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=timezone.utc)
weather_mock.conditions = ec_data["conditions"]
weather_mock.alerts = ec_data["alerts"]
weather_mock.daily_forecasts = ec_data["daily_forecasts"]
weather_mock.metadata = ec_data["metadata"]
radar_mock = mock_ec()
radar_mock.image = b"GIF..."
radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=timezone.utc)
with patch(
"homeassistant.components.environment_canada.ECWeather",
return_value=weather_mock,
), patch(
"homeassistant.components.environment_canada.ECAirQuality",
return_value=mock_ec(),
), patch(
"homeassistant.components.environment_canada.ECRadar", return_value=radar_mock
), patch(
"homeassistant.components.environment_canada.config_flow.ECWeather",
return_value=weather_mock,
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
async def test_entry_diagnostics(hass, hass_client):
"""Test config entry diagnostics."""
config_entry = await init_integration(hass)
diagnostics = await get_diagnostics_for_config_entry(
hass, hass_client, config_entry
)
redacted_entry = json.loads(
load_fixture("environment_canada/config_entry_data.json")
)
assert diagnostics == redacted_entry
|
{
"content_hash": "0acd535993fbeff6fa03cd13286c0cfc",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 86,
"avg_line_length": 32.89411764705882,
"alnum_prop": 0.6963519313304721,
"repo_name": "mezz64/home-assistant",
"id": "a1f3539a5e4d940df5a9214cbf073237fb5b67ce",
"size": "2796",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/environment_canada/test_diagnostics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
import datetime
import uuid
from oslo.config import cfg
import webob
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import config_drive
from nova.api.openstack.compute.plugins.v3 import servers
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
from nova import exception
from nova.network import manager
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class ConfigDriveTest(test.TestCase):
def setUp(self):
super(ConfigDriveTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fake.stub_out_image_service(self.stubs)
def test_show(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get())
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get())
req = webob.Request.blank('/v3/servers/1')
req.headers['Content-Type'] = 'application/json'
response = req.get_response(fakes.wsgi_app_v3(
init_only=('servers', 'os-config-drive')))
self.assertEqual(response.status_int, 200)
res_dict = jsonutils.loads(response.body)
self.assertIn(config_drive.ATTRIBUTE_NAME, res_dict['server'])
def test_detail_servers(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
fakes.fake_instance_get_all_by_filters())
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get())
req = fakes.HTTPRequestV3.blank('/v3/servers/detail')
res = req.get_response(fakes.wsgi_app_v3(
init_only=('servers', 'os-config-drive')))
server_dicts = jsonutils.loads(res.body)['servers']
self.assertNotEqual(len(server_dicts), 0)
for server_dict in server_dicts:
self.assertIn(config_drive.ATTRIBUTE_NAME, server_dict)
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-config-drive',
'osapi_v3')
self.no_config_drive_controller = servers.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', image_ref=image_uuid, flavor_ref=2)
if no_image:
server.pop('image_ref', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
if override_controller:
server = override_controller.create(req, body=body).obj['server']
else:
server = self.controller.create(req, body=body).obj['server']
def test_create_instance_with_config_drive_disabled(self):
params = {config_drive.ATTRIBUTE_NAME: "False"}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('config_drive', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params,
override_controller=self.no_config_drive_controller)
def _create_instance_body_of_config_drive(self, param):
def create(*args, **kwargs):
self.assertIn('config_drive', kwargs)
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stubs.Set(compute_api.API, 'create', create)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v3/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
config_drive.ATTRIBUTE_NAME: param,
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
return req, body
def test_create_instance_with_config_drive(self):
param = True
req, body = self._create_instance_body_of_config_drive(param)
res = self.controller.create(req, body=body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_as_boolean_string(self):
param = 'false'
req, body = self._create_instance_body_of_config_drive(param)
res = self.controller.create(req, body=body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_config_drive(self):
param = 12345
req, body = self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_instance_without_config_drive(self):
param = True
req, body = self._create_instance_body_of_config_drive(param)
del body['server'][config_drive.ATTRIBUTE_NAME]
res = self.controller.create(req, body=body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_empty_config_drive(self):
param = ''
req, body = self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
|
{
"content_hash": "060e645f2ef7672de92b4a748e489436",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 77,
"avg_line_length": 38.8531746031746,
"alnum_prop": 0.5956490654682872,
"repo_name": "viggates/nova",
"id": "3ab3cf70186e5e9ad3c5a5d4690d2d4c496a5a4c",
"size": "10427",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/api/openstack/compute/plugins/v3/test_config_drive.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14822788"
},
{
"name": "Shell",
"bytes": "18352"
}
],
"symlink_target": ""
}
|
import pandas as pd
from collections import OrderedDict
from pickle import dump, load
from backend.portfolio_model import PortfolioModels
from backend.robinhood_data import RobinhoodData
from backend.market_data import MarketData
class BackendClass(object):
"""
Backend wrapper class, provides wrappers to donwload robinhood and market
data, provides access to portfolio models. Mostly deals with UI logic.
--------
datadir: path to data directory
"""
def __init__(self, datadir, userfile):
self.datadir = datadir
self.userfile = userfile
self.df_returns = None
self.daily_returns = None
self.user = {}
self.portfolio = {}
self.trades = {}
self.stock = {}
self.markowitz = {}
self._date_fmt = '{:%d-%m-%Y}'
# read dataframe
self._df_ord = pd.read_pickle(self.datadir+'orders.pkl')
self._df_div = pd.read_pickle(self.datadir+'dividends.pkl')
self._market = pd.read_pickle(self.datadir+'market.pkl')
# handle user dictionary
self._init_user_dict()
self._validate_user_dict()
def _init_user_dict(self):
# try to load the user dict or re-initiate if all fails
try:
with open(self.userfile, 'rb') as f:
self.user = load(f)
self.user["today"] = pd.Timestamp("today")
except:
# check the dates for robinhood data
self.user['rb_dates'] = [
self._df_ord.date.min(),
max(self._df_ord.date.max(), self._df_div.date.max())]
'''
# check the dates for market data
self.user['mkt_dates'] = [
self._market.major_axis.min(),
self._market.major_axis.max()]
'''
# use the fresh start for market when pickle is deleted
self.update_market_data(fresh_start=True)
# get today date
self.user['today'] = pd.Timestamp("today")
# pickle the dictionary
self._pickle_user_dict()
return self
def _validate_user_dict(self):
# check the consistency of the dates
if (
(self.user['mkt_dates'][0] > self.user['rb_dates'][0]) or
(self.user['mkt_dates'][1] < self.user['rb_dates'][1])
):
print('Market data is not consistent with Robinhood data')
self.update_market_data(fresh_start=True)
# check if dates match the actual data
elif (
(self.user['mkt_dates'][0] != self._market.major_axis.min()) or
(self.user['mkt_dates'][1] != self._market.major_axis.max())
):
print('Dates are not consistent with dataset')
self.update_market_data(fresh_start=True)
return self
def _pickle_user_dict(self):
# dump dates and user config
with open(self.userfile, 'wb') as f:
dump(self.user, f)
def update_market_data(self, **keyword_parameters):
"""
if 'fresh_start' is passed than use the rb_dates from
user dict to download the entire history from scratch, otherwise
download only new dates in addition to the existing set
"""
md = MarketData(self.datadir)
# check if symbols match
s1 = list(self._df_ord.symbol.unique())
s1.sort()
s2 = list(self._market.minor_axis)
s2.sort()
if ('fresh_start' in keyword_parameters) or (s1 != s2):
min_date = self.user['rb_dates'][0]
max_date = pd.Timestamp("today")
self._market = md.download_save_market_data(
self._df_ord.symbol.unique(), min_date, max_date)
self.user['mkt_dates'] = [min_date, max_date]
else:
min_date = self.user['mkt_dates'][1]
max_date = pd.Timestamp("today")
self._market = md.download_save_market_data(
self._df_ord.symbol.unique(), min_date, max_date,
update_existing=True)
self.user['mkt_dates'] = [self.user['mkt_dates'][0], max_date]
self._pickle_user_dict()
self.calculate_all()
return self
def update_robinhood_data(self, user, password):
rd = RobinhoodData(self.datadir)
self._df_div, self._df_ord, _, _ =\
rd.download_robinhood_data(user, password)
self.user['rb_dates'] = [
self._df_ord.date.min(), pd.Timestamp("today")]
'''
destroy RobinhoodData instance after download
to protect user information and delete token
'''
rd = 0
self._pickle_user_dict()
self.update_market_data()
self.calculate_all()
return self
def _get_daily_portfolio_panel(self):
"""
Generate a panelframe with daily portfolio changes
"""
self._ptfm = PortfolioModels(self.datadir)
self._panel = self._ptfm.daily_portfolio_changes().panelframe
return self
def _get_latest_portfolio_snapshot(self):
# use only the last row
df = self._panel.iloc[:, -1, :-1]
columns_to_names = OrderedDict([
('cum_size', ['Shares', '{:,.0f}']),
('current_weight', ['Portfolio weight', '{:.2f}%']),
('cum_cost_basis', ['Current cost basis', '{:,.2f}']),
('cum_value_close', ['Current value', '{:,.2f}']),
('cum_realized_gain', ['Realized gain', '{:,.2f}']),
('cum_dividends', ['Dividends', '{:,.2f}']),
('cum_unrealized_gain', ['Unrealized gain', '{:,.2f}']),
('cum_total_return', ['Total return', '{:,.2f}']),
('current_return_rate', ['Total return rate', '{:,.2f}%'])
])
# convert ratios to percent
df['current_weight'] = df['current_weight'] * 100
# add total row
df = df.copy() # avoid chained assignment warning
df.loc['Portfolio', :] = df.sum(axis=0)
df.loc['Portfolio', 'current_return_rate'] =\
df.loc['Portfolio', 'cum_total_return'] /\
df.loc['Portfolio', 'cum_cost_basis'] * 100
# re-order
df = df[list(columns_to_names.keys())]
# format
df = df.apply(
lambda x: x.map(columns_to_names[x.name][1].format), axis=0)
# rename for HTML
df.columns =\
df.columns.to_series().apply(lambda x: columns_to_names[x][0])
# apply styles
self.portfolio['returns'] = df.style.\
set_table_attributes('border=1 class="dataframe"').\
render()
return self
def _get_sell_orders(self):
"""
Get three best/worst closed positions by realized gains
"""
df = pd.read_pickle(self.datadir, 'closed')
df1 = df.nlargest(min(3, df.shape[0]), 'realized_gains')
df2 = df.nsmallest(min(3, df.shape[0]), 'realized_gains')
df = pd.concat([df1, df2]).sort_values(by='realized_gains')
df['buy_price'] = df['current_cost_basis'] / df['signed_size']
# create unique index to avoid issues with styler
df.reset_index(inplace=True)
columns_to_names = OrderedDict([
('date', 'Date'),
('symbol', 'Security'),
('current_size', 'Shares'),
('buy_price', 'Average buy price'),
('average_price', 'Average sell price'),
('realized_gains', 'Realized gain')
])
self.trades['closed'] = pd.DataFrame()
for k in columns_to_names:
self.trades['closed'][columns_to_names[k]] = df[k]
# apply styles
self.trades['closed'] = self.trades['closed'].style.\
set_table_attributes('border=1 class="dataframe"').\
bar(subset=['Realized gain'], align='mid',
color=['#fc8d59', '#91bfdb']).\
set_table_styles([
{'selector': '.row_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}]).\
format({
'Date': self._date_fmt,
'Shares': '{:,.0f}',
'Average buy price': '{:,.2f}',
'Average sell price': '{:,.2f}',
'Realized gain': "{:+,.2f}"
}).\
render()
return self
def _get_buy_orders(self):
"""
Get three best/worst open positions by unrealized gains
"""
market_prices = self._panel['close'].iloc[-1]
df = pd.read_pickle(self.datadir, 'open')
df['current_price'] =\
df.apply(lambda x: market_prices[x.symbol], axis=1)
df['unrealized_gains'] =\
(df['current_price'] - df['average_price']) * df['final_size']
df1 = df.nlargest(min(3, df.shape[0]), 'unrealized_gains').copy()
df2 = df.nsmallest(min(3, df.shape[0]), 'unrealized_gains').copy()
df = pd.concat([df1, df2]).sort_values(by='unrealized_gains')
# create unique index to avoid issues with styler
df.reset_index(inplace=True)
columns_to_names = OrderedDict([
('date', 'Date'),
('symbol', 'Security'),
('final_size', 'Shares'),
('average_price', 'Average buy price'),
('current_price', 'Current market price'),
('unrealized_gains', 'Unrealized gain')
])
self.trades['open'] = pd.DataFrame()
for k in columns_to_names:
self.trades['open'][columns_to_names[k]] = df[k]
# apply styles
self.trades['open'] = self.trades['open'].style.\
set_table_attributes('border=1 class="dataframe"').\
bar(subset=['Unrealized gain'], align='mid',
color=['#fc8d59', '#91bfdb']).\
set_table_styles([
{'selector': '.row_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}]).\
format({
'Date': self._date_fmt,
'Shares': '{:,.0f}',
'Average buy price': '{:,.2f}',
'Current market price': '{:,.2f}',
'Unrealized gain': "{:+,.2f}"
}).\
render()
return self
def _get_all_orders(self):
cl = pd.read_pickle(self.datadir, 'closed')
op = pd.read_pickle(self.datadir, 'open')
mkt = self._panel['close'].iloc[-1]
cl['average_buy_price'] = cl['current_cost_basis'] / cl['signed_size']
cl.rename(columns={'average_price': 'average_sell_price'},
inplace=True)
op['current_price'] =\
op.apply(lambda x: mkt[x.symbol], axis=1)
op['unrealized_gains'] =\
(op['current_price'] - op['average_price']) * op['final_size']
op.rename(columns={'average_price': 'average_buy_price'},
inplace=True)
ord = pd.concat([cl, op]).sort_values(by='date')
columns_to_names = OrderedDict([
('date', 'Date'),
('symbol', 'Security'),
('signed_size', 'Order size'),
('final_size', 'Current size'),
('current_price', 'Current market price'),
('average_buy_price', 'Average buy price'),
('average_sell_price', 'Average sell price'),
('realized_gains', 'Realized gain'),
('unrealized_gains', 'Unrealized gain')
])
self.trades['all'] = pd.DataFrame()
for k in columns_to_names:
self.trades['all'][columns_to_names[k]] = ord[k]
self.trades['all'] = self.trades['all'].style.\
set_table_attributes(
'border=1 class="dataframe orders"' +
'style="display:none"').\
set_table_styles([
{'selector': '.row_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}]).\
format({
'Date': self._date_fmt,
'Execution size': '{:,.0f}',
'Final size': '{:,.0f}',
'Average buy price': '{:,.2f}',
'Average sell price': '{:,.2f}',
'Current market price': '{:,.2f}',
'Realized gain': '{:,.2f}',
'Unrealized gain': '{:,.2f}'
}).\
render()
self.trades['total_orders'] = ord.shape[0]
self.trades['open_orders'] = op.shape[0]
self.trades['closed_orders'] = cl.shape[0]
self.trades['fees'] = round(ord['fees'].sum(), 2)
return self
def _format_portfolio_stats_series(self, df, horizontal=True):
pct_val = [
"Total return",
"Market return",
"Annual return",
"Annual volatility",
"Max drawdown",
"Daily value at risk",
"Alpha"
]
for col in pct_val:
try:
df[col] = df[col] * 100
except:
None
# format values
df = df.to_frame().apply(
lambda x:
'{:.2f}'.format(x[0]) if x.name not in pct_val
else '{:.2f}%'.format(x[0]), axis=1)
# apply styles
if horizontal:
df = df.to_frame().transpose().style
else:
df = df.to_frame().style
df = df.set_table_attributes('border=1 class="dataframe"')
if horizontal:
df = df.set_table_styles([
{'selector': '.row_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}])
else:
df = df.set_table_styles([
{'selector': '.col_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}])
return df.render()
def _format_stock_stats_frame(self, df):
# apply styles
pct_val = [
"Annual return",
"Annual volatility",
"Max drawdown",
"Daily value at risk",
"Alpha"
]
for col in pct_val:
df[col] = df[col] * 100
# parse percentage values to strings
for i in df.columns:
df[i] = df[i].apply(
lambda x: '{:.2f}'.format(x) if i not in pct_val
else '{:.2f}%'.format(x))
def color_values(val):
if val[0] == '-':
return 'background-color: {}'.format('#fc8d59')
else:
return 'background-color: {}'.format('#91bfdb')
# apply styles
res = df.style.\
set_table_attributes('border=1 class="dataframe"').\
applymap(lambda x: color_values(x),
subset=['Alpha', 'Annual return', 'Sharpe ratio']).\
render()
return res
def _get_portfolio_stats(self):
"""
Get actual portfolio stats
"""
self.portfolio['daily'] = self._panel['cum_total_return'].sum(axis=1)
pf_stats = self._ptfm.actual_portfolio_stats()
self.portfolio['stats'] = self._format_portfolio_stats_series(pf_stats)
self.portfolio['total_return'] =\
'{:.2f}%'.format(pf_stats['Total return'])
self.portfolio['annual_return'] =\
'{:.2f}%'.format(pf_stats['Annual return'])
self.portfolio['market_return'] =\
'{:.2f}%'.format(pf_stats['Market return'])
return self
def _get_stock_stats(self):
df = self._ptfm.stock_risk_analysis(False)
self.stock['risk'] = self._format_stock_stats_frame(df)
self.stock['corr'], _ = self._ptfm.stock_correlation_matrix()
return self
def _get_markowitz(self):
mrk = self._ptfm.markowitz_portfolios()
for c in mrk:
c['stats'] = self._format_portfolio_stats_series(c['stats'], False)
c['weights'] = c['weights'].apply(
lambda x: '{:.2f}%'.format(x*100))
c['weights'] = c['weights'].to_frame().style.\
set_table_attributes('border=1 class="dataframe"').\
set_table_styles([
{'selector': '.col_heading',
'props': [('display', 'none')]},
{'selector': '.blank.level0',
'props': [('display', 'none')]}]).\
render()
self.markowitz = mrk
return self
def calculate_all(self):
"""
Run all calculations and save values to internal class props
--------
Returns:
"""
self._get_daily_portfolio_panel()
self._get_latest_portfolio_snapshot()
self._get_portfolio_stats()
self._get_stock_stats()
self._get_buy_orders()
self._get_sell_orders()
self._get_all_orders()
self._get_markowitz()
return self
if __name__ == '__main__':
bc = BackendClass('data/data.h5', 'user.pkl')
bc = bc.calculate_all()
|
{
"content_hash": "95d2946cde80d343948ed5dedc488a9e",
"timestamp": "",
"source": "github",
"line_count": 487,
"max_line_length": 79,
"avg_line_length": 35.59753593429158,
"alnum_prop": 0.5062874942316566,
"repo_name": "omdv/robinhood-portfolio",
"id": "edc46132c53b0d5034ac0bc223fb0c4afdefbc8d",
"size": "17336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/backend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1795"
},
{
"name": "Dockerfile",
"bytes": "685"
},
{
"name": "HTML",
"bytes": "9607"
},
{
"name": "Jupyter Notebook",
"bytes": "522952"
},
{
"name": "Python",
"bytes": "53063"
}
],
"symlink_target": ""
}
|
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest.openstack.common import log as logging
import tempest.test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class BaseVolumeTest(tempest.test.BaseTestCase):
"""Base test case class for all Cinder API tests."""
@classmethod
def setUpClass(cls):
cls.set_network_resources()
super(BaseVolumeTest, cls).setUpClass()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
cls.os = cls.get_client_manager()
cls.servers_client = cls.os.servers_client
cls.image_ref = CONF.compute.image_ref
cls.flavor_ref = CONF.compute.flavor_ref
cls.build_interval = CONF.volume.build_interval
cls.build_timeout = CONF.volume.build_timeout
cls.snapshots = []
cls.volumes = []
@classmethod
def tearDownClass(cls):
cls.clear_snapshots()
cls.clear_volumes()
cls.clear_isolated_creds()
super(BaseVolumeTest, cls).tearDownClass()
@classmethod
def create_snapshot(cls, volume_id=1, **kwargs):
"""Wrapper utility that returns a test snapshot."""
resp, snapshot = cls.snapshots_client.create_snapshot(volume_id,
**kwargs)
assert 200 == resp.status
cls.snapshots.append(snapshot)
cls.snapshots_client.wait_for_snapshot_status(snapshot['id'],
'available')
return snapshot
# NOTE(afazekas): these create_* and clean_* could be defined
# only in a single location in the source, and could be more general.
@classmethod
def create_volume(cls, size=1, **kwargs):
"""Wrapper utility that returns a test volume."""
vol_name = data_utils.rand_name('Volume')
resp, volume = cls.volumes_client.create_volume(size,
display_name=vol_name,
**kwargs)
assert 200 == resp.status
cls.volumes.append(volume)
cls.volumes_client.wait_for_volume_status(volume['id'], 'available')
return volume
@classmethod
def clear_volumes(cls):
for volume in cls.volumes:
try:
cls.volumes_client.delete_volume(volume['id'])
except Exception:
pass
for volume in cls.volumes:
try:
cls.volumes_client.wait_for_resource_deletion(volume['id'])
except Exception:
pass
@classmethod
def clear_snapshots(cls):
for snapshot in cls.snapshots:
try:
cls.snapshots_client.delete_snapshot(snapshot['id'])
except Exception:
pass
for snapshot in cls.snapshots:
try:
cls.snapshots_client.wait_for_resource_deletion(snapshot['id'])
except Exception:
pass
class BaseVolumeV1Test(BaseVolumeTest):
@classmethod
def setUpClass(cls):
if not CONF.volume_feature_enabled.api_v1:
msg = "Volume API v1 not supported"
raise cls.skipException(msg)
super(BaseVolumeV1Test, cls).setUpClass()
cls.snapshots_client = cls.os.snapshots_client
cls.volumes_client = cls.os.volumes_client
cls.volumes_extension_client = cls.os.volumes_extension_client
class BaseVolumeV1AdminTest(BaseVolumeV1Test):
"""Base test case class for all Volume Admin API tests."""
@classmethod
def setUpClass(cls):
super(BaseVolumeV1AdminTest, cls).setUpClass()
cls.adm_user = CONF.identity.admin_username
cls.adm_pass = CONF.identity.admin_password
cls.adm_tenant = CONF.identity.admin_tenant_name
if not all((cls.adm_user, cls.adm_pass, cls.adm_tenant)):
msg = ("Missing Volume Admin API credentials "
"in configuration.")
raise cls.skipException(msg)
if CONF.compute.allow_tenant_isolation:
creds = cls.isolated_creds.get_admin_creds()
admin_username, admin_tenant_name, admin_password = creds
cls.os_adm = clients.Manager(username=admin_username,
password=admin_password,
tenant_name=admin_tenant_name,
interface=cls._interface)
else:
cls.os_adm = clients.AdminManager(interface=cls._interface)
cls.client = cls.os_adm.volume_types_client
cls.hosts_client = cls.os_adm.volume_hosts_client
|
{
"content_hash": "1807dddb0dbc0b59be81023a688731fa",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 79,
"avg_line_length": 37.01526717557252,
"alnum_prop": 0.5889874200866158,
"repo_name": "ntymtsiv/tempest",
"id": "de2b2403a89d3b6943b78f45c00154001976dde8",
"size": "5485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tempest/api/volume/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2312198"
},
{
"name": "Shell",
"bytes": "9160"
}
],
"symlink_target": ""
}
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from fobi.base import BaseFormFieldPluginForm, get_theme
from fobi.helpers import validate_initial_for_multiple_choices
__title__ = 'fobi.contrib.plugins.form_elements.fields.select_multiple.forms'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('SelectMultipleInputForm',)
theme = get_theme(request=None, as_instance=True)
class SelectMultipleInputForm(forms.Form, BaseFormFieldPluginForm):
"""Form for ``SelectMultipleInputPlugin``."""
plugin_data_fields = [
("label", ""),
("name", ""),
("choices", ""),
("help_text", ""),
("initial", ""),
("required", False)
]
label = forms.CharField(
label=_("Label"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
name = forms.CharField(
label=_("Name"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
choices = forms.CharField(
label=_("Choices"),
required=False,
help_text=_("Enter single values/pairs per line. Example:<code><br/>"
" 1<br/>"
" 2<br/>"
" alpha, Alpha<br/>"
" beta, Beta<br/>"
" omega"
"</code><br/>"
"It finally transforms into the following HTML "
"code:<code><br/>"
' '
'<select id="id_NAME_OF_THE_ELEMENT" '
'name="NAME_OF_THE_ELEMENT"><br/>'
' '
'<option value="1">1</option><br/>'
' '
'<option value="2">2</option><br/>'
' '
'<option value="alpha">Alpha</option><br/>'
' '
'<option value="beta">Beta</option><br/>'
' '
'<option value="omega">omega</option><br/>'
' </select>'
"</code>"),
widget=forms.widgets.Textarea(
attrs={'class': theme.form_element_html_class}
)
)
help_text = forms.CharField(
label=_("Help text"),
required=False,
widget=forms.widgets.Textarea(
attrs={'class': theme.form_element_html_class}
)
)
initial = forms.CharField(
label=_("Initial"),
required=False,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
required = forms.BooleanField(
label=_("Required"),
required=False,
widget=forms.widgets.CheckboxInput(
attrs={'class': theme.form_element_checkbox_html_class}
)
)
def clean_initial(self):
"""Validating the initial value."""
return validate_initial_for_multiple_choices(self,
'choices',
'initial')
|
{
"content_hash": "0a4fbafc5ca03fe22eb0741aae5dd768",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 77,
"avg_line_length": 37.92929292929293,
"alnum_prop": 0.5158455392809588,
"repo_name": "mansonul/events",
"id": "c2307f1c03c59315225707488e354e3774a87e4d",
"size": "3755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/contrib/plugins/form_elements/fields/select_multiple/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "90251"
},
{
"name": "HTML",
"bytes": "186225"
},
{
"name": "JavaScript",
"bytes": "43221"
},
{
"name": "Python",
"bytes": "804726"
},
{
"name": "Shell",
"bytes": "4196"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('linkcheck', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='url',
name='redirect_to',
field=models.CharField(default='', max_length=255),
),
]
|
{
"content_hash": "19bfd09b7e9f9cd18a2e55a4c2907da9",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 63,
"avg_line_length": 21.22222222222222,
"alnum_prop": 0.5837696335078534,
"repo_name": "claudep/django-linkcheck",
"id": "560dd73243cd9297fffbb1c3f92a5eec8d3a7313",
"size": "406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linkcheck/migrations/0002_url_redirect_to.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "11357"
},
{
"name": "Python",
"bytes": "81297"
}
],
"symlink_target": ""
}
|
"""Integration tests for TensorBoard.
These tests start up a full-fledged TensorBoard server.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import gzip
import json
import numbers
import os
import shutil
import socket
import tempfile
import threading
from six import BytesIO
from six.moves import http_client
from six.moves import xrange # pylint: disable=redefined-builtin
from werkzeug import serving
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import summary_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.util import event_pb2
from tensorflow.python.platform import test
from tensorflow.python.summary.writer import writer as writer_lib
from tensorflow.tensorboard import tensorboard
from tensorflow.tensorboard.backend import application
from tensorflow.tensorboard.backend.event_processing import event_multiplexer
from tensorflow.tensorboard.plugins import base_plugin
class FakePlugin(base_plugin.TBPlugin):
"""A plugin with no functionality."""
def __init__(self, plugin_name, is_active_value):
"""Constructs a fake plugin.
Args:
plugin_name: The name of this plugin.
is_active_value: Whether the plugin is active.
"""
self.plugin_name = plugin_name
self._is_active_value = is_active_value
def get_plugin_apps(self, multiplexer, logdir):
"""Returns a mapping from routes to handlers offered by this plugin.
Args:
multiplexer: The event multiplexer.
logdir: The path to the directory containing logs.
Returns:
An empty dict. This plugin offers no routes.
"""
return {}
def is_active(self):
"""Returns whether this plugin is active.
Returns:
A boolean. Whether this plugin is active.
"""
return self._is_active_value
class TensorboardServerTest(test.TestCase):
_only_use_meta_graph = False # Server data contains only a GraphDef
# Number of scalar-containing events to make.
_SCALAR_COUNT = 99
def setUp(self):
self.temp_dir = self._GenerateTestData()
multiplexer = event_multiplexer.EventMultiplexer(
size_guidance=application.DEFAULT_SIZE_GUIDANCE,
purge_orphaned_data=True)
plugins = [
FakePlugin(plugin_name='foo', is_active_value=True),
FakePlugin(plugin_name='bar', is_active_value=False)
]
app = application.TensorBoardWSGIApp(
self.temp_dir, plugins, multiplexer, reload_interval=0)
try:
self._server = serving.BaseWSGIServer('localhost', 0, app)
# 0 to pick an unused port.
except IOError:
# BaseWSGIServer has a preference for IPv4. If that didn't work, try again
# with an explicit IPv6 address.
self._server = serving.BaseWSGIServer('::1', 0, app)
self._server_thread = threading.Thread(target=self._server.serve_forever)
self._server_thread.daemon = True
self._server_thread.start()
self._connection = http_client.HTTPConnection(
'localhost', self._server.server_address[1])
def tearDown(self):
self._connection.close()
self._server.shutdown()
self._server.server_close()
def _get(self, path, headers=None):
"""Perform a GET request for the given path."""
if headers is None:
headers = {}
self._connection.request('GET', path, None, headers)
return self._connection.getresponse()
def _getJson(self, path):
"""Perform a GET request and decode the result as JSON."""
self._connection.request('GET', path)
response = self._connection.getresponse()
self.assertEqual(response.status, 200)
data = response.read()
if response.getheader('Content-Encoding') == 'gzip':
data = gzip.GzipFile('', 'rb', 9, BytesIO(data)).read()
return json.loads(data.decode('utf-8'))
def testBasicStartup(self):
"""Start the server up and then shut it down immediately."""
pass
def testRequestMainPage(self):
"""Navigate to the main page and verify that it returns a 200."""
response = self._get('/')
self.assertEqual(response.status, 200)
def testRequestNonexistentPage(self):
"""Request a page that doesn't exist; it should 404."""
response = self._get('/asdf')
self.assertEqual(response.status, 404)
def testDirectoryTraversal(self):
"""Attempt a directory traversal attack."""
response = self._get('/..' * 30 + '/etc/passwd')
self.assertEqual(response.status, 400)
def testLogdir(self):
"""Test the format of the data/logdir endpoint."""
parsed_object = self._getJson('/data/logdir')
self.assertEqual(parsed_object, {'logdir': self.temp_dir})
def testPluginsListing(self):
"""Test the format of the data/plugins_listing endpoint."""
parsed_object = self._getJson('/data/plugins_listing')
# Plugin foo is active. Plugin bar is not.
self.assertEqual(parsed_object, {'foo': True, 'bar': False})
def testRuns(self):
"""Test the format of the /data/runs endpoint."""
run_json = self._getJson('/data/runs')
# Don't check the actual timestamp since it's time-dependent.
self.assertTrue(
isinstance(run_json['run1']['firstEventTimestamp'], numbers.Number))
del run_json['run1']['firstEventTimestamp']
self.assertEqual(
run_json,
{
'run1': {
'compressedHistograms': ['histogram'],
'scalars': ['simple_values'],
'histograms': ['histogram'],
'images': ['image'],
'audio': ['audio'],
# if only_use_meta_graph, the graph is from the metagraph
'graph': True,
'meta_graph': self._only_use_meta_graph,
'run_metadata': ['test run'],
'tensors': [],
}
})
def testApplicationPaths_getCached(self):
"""Test the format of the /data/runs endpoint."""
for path in ('/',): # TODO(jart): '/app.js' in open source
connection = http_client.HTTPConnection('localhost',
self._server.server_address[1])
connection.request('GET', path)
response = connection.getresponse()
self.assertEqual(response.status, 200, msg=path)
self.assertEqual(
response.getheader('Cache-Control'),
'private, max-age=3600',
msg=path)
connection.close()
def testDataPaths_disableAllCaching(self):
"""Test the format of the /data/runs endpoint."""
for path in ('/data/runs', '/data/logdir',
'/data/scalars?run=run1&tag=simple_values',
'/data/scalars?run=run1&tag=simple_values&format=csv',
'/data/images?run=run1&tag=image',
'/data/individualImage?run=run1&tag=image&index=0',
'/data/audio?run=run1&tag=audio',
'/data/run_metadata?run=run1&tag=test%20run'):
connection = http_client.HTTPConnection('localhost',
self._server.server_address[1])
connection.request('GET', path)
response = connection.getresponse()
self.assertEqual(response.status, 200, msg=path)
self.assertEqual(response.getheader('Expires'), '0', msg=path)
response.read()
connection.close()
def testHistograms(self):
"""Test the format of /data/histograms."""
self.assertEqual(
self._getJson('/data/histograms?tag=histogram&run=run1'),
[[0, 0, [0, 2.0, 3.0, 6.0, 5.0, [0.0, 1.0, 2.0], [1.0, 1.0, 1.0]]]])
def testImages(self):
"""Test listing images and retrieving an individual image."""
image_json = self._getJson('/data/images?tag=image&run=run1')
image_query = image_json[0]['query']
# We don't care about the format of the image query.
del image_json[0]['query']
self.assertEqual(image_json, [{
'wall_time': 0,
'step': 0,
'height': 1,
'width': 1
}])
response = self._get('/data/individualImage?%s' % image_query)
self.assertEqual(response.status, 200)
def testAudio(self):
"""Test listing audio and retrieving an individual audio clip."""
audio_json = self._getJson('/data/audio?tag=audio&run=run1')
audio_query = audio_json[0]['query']
# We don't care about the format of the audio query.
del audio_json[0]['query']
self.assertEqual(audio_json, [{
'wall_time': 0,
'step': 0,
'content_type': 'audio/wav'
}])
response = self._get('/data/individualAudio?%s' % audio_query)
self.assertEqual(response.status, 200)
def testGraph(self):
"""Test retrieving the graph definition."""
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
'&large_attrs_key=_very_large_attrs')
self.assertEqual(response.status, 200)
graph_pbtxt = response.read()
# Parse the graph from pbtxt into a graph message.
graph = graph_pb2.GraphDef()
graph = text_format.Parse(graph_pbtxt, graph)
self.assertEqual(len(graph.node), 2)
self.assertEqual(graph.node[0].name, 'a')
self.assertEqual(graph.node[1].name, 'b')
# Make sure the second node has an attribute that was filtered out because
# it was too large and was added to the "too large" attributes list.
self.assertEqual(list(graph.node[1].attr.keys()), ['_very_large_attrs'])
self.assertEqual(graph.node[1].attr['_very_large_attrs'].list.s,
[b'very_large_attr'])
def testAcceptGzip_compressesResponse(self):
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
'&large_attrs_key=_very_large_attrs',
{'Accept-Encoding': 'gzip'})
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader('Content-Encoding'), 'gzip')
pbtxt = gzip.GzipFile('', 'rb', 9, BytesIO(response.read())).read()
graph = text_format.Parse(pbtxt, graph_pb2.GraphDef())
self.assertEqual(len(graph.node), 2)
def testAcceptAnyEncoding_compressesResponse(self):
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
'&large_attrs_key=_very_large_attrs',
{'Accept-Encoding': '*'})
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader('Content-Encoding'), 'gzip')
pbtxt = gzip.GzipFile('', 'rb', 9, BytesIO(response.read())).read()
graph = text_format.Parse(pbtxt, graph_pb2.GraphDef())
self.assertEqual(len(graph.node), 2)
def testAcceptDoodleEncoding_doesNotCompressResponse(self):
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
'&large_attrs_key=_very_large_attrs',
{'Accept-Encoding': 'doodle'})
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader('Content-Encoding'))
graph = text_format.Parse(response.read(), graph_pb2.GraphDef())
self.assertEqual(len(graph.node), 2)
def testAcceptGzip_doesNotCompressImage(self):
response = self._get('/data/individualImage?run=run1&tag=image&index=0',
{'Accept-Encoding': 'gzip'})
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader('Content-Encoding'), None)
def testRunMetadata(self):
"""Test retrieving the run metadata information."""
response = self._get('/data/run_metadata?run=run1&tag=test%20run')
self.assertEqual(response.status, 200)
run_metadata_pbtxt = response.read()
# Parse from pbtxt into a message.
run_metadata = config_pb2.RunMetadata()
text_format.Parse(run_metadata_pbtxt, run_metadata)
self.assertEqual(len(run_metadata.step_stats.dev_stats), 1)
self.assertEqual(run_metadata.step_stats.dev_stats[0].device, 'test device')
def _GenerateTestData(self):
"""Generates the test data directory.
The test data has a single run named run1 which contains:
- a histogram
- an image at timestamp and step 0
- scalar events containing the value i at step 10 * i and wall time
100 * i, for i in [1, _SCALAR_COUNT).
- a graph definition
Returns:
temp_dir: The directory the test data is generated under.
"""
temp_dir = tempfile.mkdtemp(prefix=self.get_temp_dir())
self.addCleanup(shutil.rmtree, temp_dir)
run1_path = os.path.join(temp_dir, 'run1')
os.makedirs(run1_path)
writer = writer_lib.FileWriter(run1_path)
histogram_value = summary_pb2.HistogramProto(
min=0,
max=2,
num=3,
sum=6,
sum_squares=5,
bucket_limit=[0, 1, 2],
bucket=[1, 1, 1])
# Add a simple graph event.
graph_def = graph_pb2.GraphDef()
node1 = graph_def.node.add()
node1.name = 'a'
node2 = graph_def.node.add()
node2.name = 'b'
node2.attr['very_large_attr'].s = b'a' * 2048 # 2 KB attribute
meta_graph_def = meta_graph_pb2.MetaGraphDef(graph_def=graph_def)
if self._only_use_meta_graph:
writer.add_meta_graph(meta_graph_def)
else:
writer.add_graph(graph_def)
# Add a simple run metadata event.
run_metadata = config_pb2.RunMetadata()
device_stats = run_metadata.step_stats.dev_stats.add()
device_stats.device = 'test device'
writer.add_run_metadata(run_metadata, 'test run')
# 1x1 transparent GIF.
encoded_image = base64.b64decode(
'R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7')
image_value = summary_pb2.Summary.Image(
height=1, width=1, colorspace=1, encoded_image_string=encoded_image)
audio_value = summary_pb2.Summary.Audio(
sample_rate=44100,
length_frames=22050,
num_channels=2,
encoded_audio_string=b'',
content_type='audio/wav')
writer.add_event(
event_pb2.Event(
wall_time=0,
step=0,
summary=summary_pb2.Summary(value=[
summary_pb2.Summary.Value(
tag='histogram', histo=histogram_value),
summary_pb2.Summary.Value(
tag='image', image=image_value), summary_pb2.Summary.Value(
tag='audio', audio=audio_value)
])))
# Write 100 simple values.
for i in xrange(1, self._SCALAR_COUNT + 1):
writer.add_event(
event_pb2.Event(
# We use different values for wall time, step, and the value so we
# can tell them apart.
wall_time=100 * i,
step=10 * i,
summary=summary_pb2.Summary(value=[
summary_pb2.Summary.Value(
tag='simple_values', simple_value=i)
])))
writer.flush()
writer.close()
return temp_dir
class TensorboardServerUsingMetagraphOnlyTest(TensorboardServerTest):
# Tests new ability to use only the MetaGraphDef
_only_use_meta_graph = True # Server data contains only a MetaGraphDef
class ParseEventFilesSpecTest(test.TestCase):
def testRunName(self):
logdir = 'lol:/cat'
expected = {'/cat': 'lol'}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testPathWithColonThatComesAfterASlash_isNotConsideredARunName(self):
logdir = '/lol:/cat'
expected = {'/lol:/cat': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testMultipleDirectories(self):
logdir = '/a,/b'
expected = {'/a': None, '/b': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testNormalizesPaths(self):
logdir = '/lol/.//cat/../cat'
expected = {'/lol/cat': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testAbsolutifies(self):
logdir = 'lol/cat'
expected = {os.path.realpath('lol/cat'): None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testRespectsGCSPath(self):
logdir = 'gs://foo/path'
expected = {'gs://foo/path': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testRespectsHDFSPath(self):
logdir = 'hdfs://foo/path'
expected = {'hdfs://foo/path': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testDoesNotExpandUserInGCSPath(self):
logdir = 'gs://~/foo/path'
expected = {'gs://~/foo/path': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testDoesNotNormalizeGCSPath(self):
logdir = 'gs://foo/./path//..'
expected = {'gs://foo/./path//..': None}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
def testRunNameWithGCSPath(self):
logdir = 'lol:gs://foo/path'
expected = {'gs://foo/path': 'lol'}
self.assertEqual(application.parse_event_files_spec(logdir), expected)
class TensorBoardAssetsTest(test.TestCase):
def testTagFound(self):
tag = application.get_tensorboard_tag()
self.assertTrue(tag)
app = application.standard_tensorboard_wsgi('', True, 60)
self.assertEqual(app.tag, tag)
class TensorboardSimpleServerConstructionTest(test.TestCase):
"""Tests that the default HTTP server is constructed without error.
Mostly useful for IPv4/IPv6 testing. This test should run with only IPv4, only
IPv6, and both IPv4 and IPv6 enabled.
"""
class _StubApplication(object):
tag = ''
def testMakeServerBlankHost(self):
# Test that we can bind to all interfaces without throwing an error
server, url = tensorboard.make_simple_server(
self._StubApplication(),
host='',
port=0) # Grab any available port
self.assertTrue(server)
self.assertTrue(url)
def testSpecifiedHost(self):
one_passed = False
try:
_, url = tensorboard.make_simple_server(
self._StubApplication(),
host='127.0.0.1',
port=0)
self.assertStartsWith(actual=url, expected_start='http://127.0.0.1:')
one_passed = True
except socket.error:
# IPv4 is not supported
pass
try:
_, url = tensorboard.make_simple_server(
self._StubApplication(),
host='::1',
port=0)
self.assertStartsWith(actual=url, expected_start='http://[::1]:')
one_passed = True
except socket.error:
# IPv6 is not supported
pass
self.assertTrue(one_passed) # We expect either IPv4 or IPv6 to be supported
class TensorBoardApplcationConstructionTest(test.TestCase):
def testExceptions(self):
logdir = '/fake/foo'
multiplexer = event_multiplexer.EventMultiplexer()
# Fails if there is an unnamed plugin
with self.assertRaises(ValueError):
# This plugin lacks a name.
plugins = [FakePlugin(plugin_name=None, is_active_value=True)]
application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0)
# Fails if there are two plugins with same name
with self.assertRaises(ValueError):
plugins = [
FakePlugin(plugin_name='foo', is_active_value=True),
FakePlugin(plugin_name='foo', is_active_value=True),
]
application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0)
if __name__ == '__main__':
test.main()
|
{
"content_hash": "bb9e4b832326b202bebbfbf2612d4710",
"timestamp": "",
"source": "github",
"line_count": 535,
"max_line_length": 80,
"avg_line_length": 36.106542056074765,
"alnum_prop": 0.6493244292592018,
"repo_name": "abhitopia/tensorflow",
"id": "002709cd5b09ca4a2232b067af8fe9ed29573712",
"size": "20006",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/tensorboard/backend/application_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7481"
},
{
"name": "C",
"bytes": "177254"
},
{
"name": "C++",
"bytes": "22804170"
},
{
"name": "CMake",
"bytes": "140337"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "794578"
},
{
"name": "HTML",
"bytes": "593171"
},
{
"name": "Java",
"bytes": "286562"
},
{
"name": "JavaScript",
"bytes": "13906"
},
{
"name": "Jupyter Notebook",
"bytes": "1833654"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37240"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64166"
},
{
"name": "Protocol Buffer",
"bytes": "209604"
},
{
"name": "Python",
"bytes": "20006785"
},
{
"name": "Shell",
"bytes": "331908"
},
{
"name": "TypeScript",
"bytes": "789019"
}
],
"symlink_target": ""
}
|
from .rpmsection import Section
class RpmPrep(Section):
'''
Try to simplify to %setup -q when possible.
Replace %patch with %patch0
'''
def add(self, line):
line = self._complete_cleanup(line)
if not self.minimal:
line = self._cleanup_setup(line)
line = self._prepare_patch(line)
Section.add(self, line)
def _cleanup_setup(self, line):
"""
Remove the useless stuff from %setup line
"""
# NOTE: not using regexp as this covers 99% cases for now
if line.startswith('%setup'):
line = line.replace(' -qn', ' -q -n')
line = line.replace(' -q', '')
line = self.reg.re_setup.sub(' ', line)
line = self.strip_useless_spaces(line)
line = line.replace('%setup', '%setup -q')
return line
def _prepare_patch(self, line):
"""
Convert patchlines to something pretty
"""
# -p0 is default
line = line.replace('-p0', '')
# %patch0 is desired
if (line.startswith('%patch ') or line == '%patch') and '-P' not in line:
line = line.replace('%patch', '%patch0')
# convert the %patch -P 50 -p10 to %patch50 -p10
# this apply only if there is ONE -P on the line, not multiple ones
if self.reg.re_patch_prep.match(line):
match = self.reg.re_patch_prep.match(line)
line = self.strip_useless_spaces('%%patch%s %s %s' % (match.group(2), match.group(1), match.group(3)))
return line
|
{
"content_hash": "98570d4a1680d3017ee347eb5d331321",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 114,
"avg_line_length": 32.895833333333336,
"alnum_prop": 0.5465484483850538,
"repo_name": "plusky/spec-cleaner",
"id": "2551da553676dab80d7919ef338525e86d2e1ba9",
"size": "1618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec_cleaner/rpmprep.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "4027"
},
{
"name": "Makefile",
"bytes": "932"
},
{
"name": "Perl",
"bytes": "442"
},
{
"name": "Python",
"bytes": "102220"
},
{
"name": "Ruby",
"bytes": "952"
},
{
"name": "Shell",
"bytes": "5740"
}
],
"symlink_target": ""
}
|
import os
import sys
import time
import json
import ephem
import random
import socket
import httplib
import httplib2
import datetime
import calendar
from timerasp import gmail, flickr
from apiclient.discovery import build
from oauth2client.file import Storage
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.tools import argparser, run_flow
from oauth2client.client import flow_from_clientsecrets
# ephem, httplib2, flickrapi, exifread, poster, apiclient, urllib3
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://cloud.google.com/console.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = os.path.expanduser("~/.limited/client_secrets.json")
# This OAuth 2.0 access scope allows an application to upload files to the
# authenticated user's YouTube channel, but doesn't allow other types of access.
YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://cloud.google.com/console
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
VALID_PRIVACY_STATUSES = ("public", "private", "unlisted")
H264_FILENAME = os.path.expanduser("~/tmp/timelapse/todays_video.h264")
MP4_FILENAME = os.path.expanduser("~/tmp/timelapse/todays_video.mp4")
DAILY_FILENAME = os.path.expanduser("~/tmp/timelapse/%d.mp4"%calendar.timegm(time.gmtime()))
def get_authenticated_service(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage(os.path.expanduser('~/.limited/youtube-oauth2.json'))
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(youtube, options, title, description, private=True):
if private:
privacy = VALID_PRIVACY_STATUSES[1]
else:
privacy = VALID_PRIVACY_STATUSES[0]
tags = None
body=dict(
snippet=dict(
title=title,
description=description,
tags="timerasp,timelapse,maryland,baltimore,JHU,raspberry pi",
categoryId="1"
),
status=dict(
privacyStatus=privacy,
)
)
# Call the API's videos.insert method to create and upload the video.
insert_request = youtube.videos().insert(
part=",".join(body.keys()),
body=body,
# The chunksize parameter specifies the size of each chunk of data, in
# bytes, that will be uploaded at a time. Set a higher value for
# reliable connections as fewer chunks lead to faster uploads. Set a lower
# value for better recovery on less reliable connections.
#
# Setting "chunksize" equal to -1 in the code below means that the entire
# file will be uploaded in a single HTTP request. (If the upload fails,
# it will still be retried where it left off.) This is usually a best
# practice, but if you're using Python older than 2.6 or if you're
# running on App Engine, you should set the chunksize to something like
# 1024 * 1024 (1 megabyte).
media_body=MediaFileUpload(MP4_FILENAME, chunksize=-1, resumable=True)
)
return resumable_upload(insert_request)
# This method implements an exponential backoff strategy to resume a
# failed upload.
def resumable_upload(insert_request):
response = None
error = None
retry = 0
while response is None:
try:
status, response = insert_request.next_chunk()
if 'id' in response:
print "Video id '%s' was successfully uploaded." % response['id']
return response['id']
else:
# exit("The upload failed with an unexpected response: %s" % response)
raise ValueError("The upload failed with an unexpected response: %s" % response)
except HttpError, e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS, e:
error = "A retriable error occurred: %s" % e
if error is not None:
print error
retry += 1
if retry > MAX_RETRIES:
# exit("No longer attempting to retry.")
raise ValueError('No longer attempting to retry.')
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print "Sleeping %f seconds and then retrying..." % sleep_seconds
time.sleep(sleep_seconds)
openmax_dir = os.path.expanduser('~/tmp/timelapse/rpi-openmax-demos-master')
def utc_mktime(utc_tuple):
"""Returns number of seconds elapsed since epoch
Note that no timezone are taken into consideration.
utc tuple must be: (year, month, day, hour, minute, second)
"""
if len(utc_tuple) == 6:
utc_tuple += (0, 0, 0)
return time.mktime(utc_tuple) - time.mktime((1970, 1, 1, 0, 0, 0, 0, 0, 0))
def datetime_to_timestamp(dt):
"""Converts a datetime object to UTC timestamp"""
return int(utc_mktime(dt.timetuple()))
# if __name__ == '__main__':
# # exiftool exifread flickrapi
# from scintillate.api import Upload
# flickr = Upload()
# now = datetime.datetime.today()
# flickr.push(filename='output.mp4',
# title='{:d} : {}'.format(datetime_to_timestamp(now), now.strftime("%Y-%m-%d")),
# description='Raspberry Pi IR camera',
# tags =['IR','timelapse', 'movie'],
# ispublic=True)
def getarg(item):
ISITEM = (item in sys.argv)
if ISITEM:
sys.argv.pop(sys.argv.index(item))
return ISITEM
if __name__ == '__main__':
START = getarg('start')
DEBUG = getarg('debug')
OFFLINE = getarg('offline')
HOUR = getarg('hour')
FRAME = getarg('frame')
NIGHT = getarg('night')
NORUN = getarg('norun')
private = False
if NIGHT:
H264_FILENAME = H264_FILENAME.replace('todays','night')
MP4_FILENAME = MP4_FILENAME.replace('todays','night')
# private = True
# Parse everything else
sys.argv.append('--noauth_local_webserver')
args = argparser.parse_args()
now = datetime.datetime.now()
here = ephem.Observer()
here.lon, here.lat = '-76.623434', '39.331628'
sunrise = here.next_rising(ephem.Sun())
sunset = here.next_setting(ephem.Sun())
time_before = datetime.timedelta(minutes=60)
time_after = datetime.timedelta(minutes=60)
sunrise = ephem.localtime(sunrise) - time_before
if START:
sunrise = now
else:
if sunrise > ephem.localtime(sunset):
sunrise = now
sunset = ephem.localtime(sunset) + time_after
if HOUR:
sunset = sunrise + datetime.timedelta(minutes=60)
if NIGHT:
sunrise = datetime.datetime(now.year, now.month, now.day, 22)
sunset = sunrise + datetime.timedelta(hours=6)
if sunrise < now:
sunrise = now
video_length = (sunset - sunrise).total_seconds() * 1000
total_frames = 5 * 60 * 24
if NIGHT:
total_frames = 2 * 60 * 24
frame_time = video_length / total_frames
if FRAME:
frame_time = 12 * 1000
# ensure some sort of sanity
min_frame_time = int(5.0*1000)
if frame_time < min_frame_time:
frame_time = min_frame_time
sleep_time = (sunrise - now).total_seconds()
waveband = 'IR'
if NIGHT:
waveband = 'Night IR'
title = waveband+' '+datetime.datetime.today().strftime("%Y-%m-%d")
hostname = socket.gethostname()
description='''{waveband} time-lapse video from a Raspberry PI
Hostname: {hostname}
Run Time: {runtime:d}
Sunrise: {sunrise}
Sunset: {sunset}
delta: {frame_time:0.2f} seconds
'''.format(waveband=waveband, sunrise=sunrise, sunset=sunset, frame_time=frame_time/1000,
hostname=hostname, runtime=calendar.timegm(time.gmtime()))
if NORUN:
print(" Video title: {}".format(title))
print(" Video Description: {}".format(description))
sys.exit()
output = dict(
sunrise='%s'%sunrise,
sunset='%s'%sunset,
runtime=calendar.timegm(time.gmtime()),
frame_time=frame_time/1000.0,
video_length=video_length,
sleep_time=sleep_time,
title=title,
description=description,
hostname=hostname,
)
json.dump(output, open(DAILY_FILENAME.replace('.mp4','.json'),'a'), indent=2)
if DEBUG:
sleep_time = 2
frame_time = 3 # every 3 seconds -- simple and quick
video_length = 16*1000 # for 13 seconds
if NIGHT:
extra = '-ss 10000000 -ISO 1600'
else:
# extra = '-awb off -ex verylong'
extra = '-awb on -ex verylong'
# backup if something went wrong.
# if os.path.exists(H264_FILENAME):
# tmp = '.{:d}.h264'.format(calendar.timegm(time.gmtime()))
# cmd = 'rsync -ravpP {} {}'.format(H264_FILENAME, H264_FILENAME.replace('.h264',tmp))
# os.system(cmd)
#-awb auto -ex verylong
RECORD_COMMAND = "raspiyuv %(extra)s -h 1072 -w 1920 -t %(length)d -tl %(slice)d -o - | %(dir)s/rpi-encode-yuv > %(file)s"
cmd = RECORD_COMMAND % {"length": video_length,
"slice": frame_time,
"file": H264_FILENAME,
'dir':openmax_dir,
"extra":extra}
CONVERT_COMMAND = "MP4Box -fps 24 -add %(in_file)s %(out_file)s"
cmd2 = CONVERT_COMMAND % {"in_file": H264_FILENAME, "out_file": MP4_FILENAME}
# RSYNC_COMMAND = 'rsync -ravpP %(in_file)s %(out_file)s'
# cmd3 = RSYNC_COMMAND % {'in_file':MP4_FILENAME, 'out_file':DAILY_FILENAME}
print('Record Command:\n {}'.format(cmd))
print(" Sleeping for %d seconds" % sleep_time)
print(" Video Description: {}".format(description))
if not OFFLINE:
try:
gmail.send_email(hostname+' : Start in {} : {:0.1f}hr'.format(sleep_time,sleep_time/3600.),
'Time-lapse \n {}'.format(description))
except:
print 'Failed to email'
time.sleep(sleep_time)
if not OFFLINE:
try:
gmail.send_email(hostname+' : Time-lapse Start!',
'Starting time-lapse \n {}'.format(description))
except:
print 'failed to email'
os.system(cmd)
description += '\n Captured Time: {}'.format(datetime.datetime.now())
os.system(cmd2)
# os.system(cmd3)
if DEBUG:
sys.exit()
if not OFFLINE:
if not os.path.exists(MP4_FILENAME):
exit("No video to upload")
gmail.send_email(hostname+' : Time-lapse Failure', 'Failed to find Mp4')
sys.exit()
# youtube upload
try:
youtube = get_authenticated_service(args)
youtube_id = initialize_upload(youtube, args, title, description, private)
if youtube_id is not None:
description += '\n Youtube: http://youtu.be/{} (higher resolution and nicer playback)'.format(youtube_id)
except HttpError as e:
print "An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)
gmail.send_email(hostname+' : Time-lapse Failure',
'Failed to upload to youtube:\n{}'.format(e))
# flickr upload
try:
print flickr.upload(MP4_FILENAME, title, description,
'"Raspberry Pi" IR timelapse timerasp JHU Baltimore Maryland',
public=True)
except Exception as e:
gmail.send_email(hostname+' : Time-lapse Failure',
'Failed to upload to flickr:\n{}'.format(e))
# clean up
os.remove(H264_FILENAME)
# os.remove(MP4_FILENAME)
if NIGHT:
os.rename(MP4_FILENAME, MP4_FILENAME.replace('night','previous_night'))
else:
os.rename(MP4_FILENAME, MP4_FILENAME.replace('todays','previous'))
if not OFFLINE:
gmail.send_email(hostname+' : Time-lapse Finished!','Everything is good? \n\n{}'.format(description))
|
{
"content_hash": "c011afa8dc0edd3d0b4dfb264682453d",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 124,
"avg_line_length": 32.44549763033175,
"alnum_prop": 0.6570260005842828,
"repo_name": "ajmendez/timerasp",
"id": "d1d3381ec242938a8194fec01807a07a6e80d195",
"size": "13711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/time_lapse_upload.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1455324"
},
{
"name": "Python",
"bytes": "110306"
},
{
"name": "Shell",
"bytes": "4477"
}
],
"symlink_target": ""
}
|
import math
import pdb
import random
import numpy as np
import sys
import time
import Orange
sys.path.extend(['.', '..'])
from itertools import chain, repeat
from collections import defaultdict
from rtree.index import Index as RTree
from rtree.index import Property as RProp
from operator import mul, and_, or_
from sklearn.neighbors import NearestNeighbors, DistanceMetric
from ..util import rm_attr_from_domain, get_logger
from ..util.table import *
from ..bottomup.bounding_box import *
from ..bottomup.cluster import *
_logger = get_logger()
class FeatureMapper(object):
"""
For discrete features
Maps discrete attribute values to a numerical ID
"""
def __init__(self, domain, cont_dists):
self.feature_mappers = {}
self.cont_dists = cont_dists
self.ranges = { col: cont_dists[col].max - cont_dists[col].min for col in cont_dists.keys() if cont_dists[col] }
for attr in domain:
if attr.var_type == Orange.feature.Type.Discrete:
self.feature_mappers[attr.name] = self.get_feature_mapper(attr)
def get_feature_mapper(self, attr):
name = attr.name
val2idx = {v:idx for idx,v in enumerate(attr.values)}
return val2idx
def __attrs__(self):
return self.feature_mappers.keys()
attrs = property(__attrs__)
def nvals(self, name):
return len(self.feature_mappers.get(name, []))
def __call__(self, cluster, name):
vals = cluster.discretes.get(name, None)
mapping = self.feature_mappers.get(name, {})
if vals is None:
return np.ones(len(mapping))
vect = np.zeros(len(mapping)).astype(int)
for v in vals:
vect[v] = 1
return vect
class AdjacencyGraph(object):
"""
Stores versions of adjacency graphs and manages insert buffering
"""
def __init__(self, clusters, domain, cont_dists):
"""
Args
domain: orange table domain
"""
self.feature_mapper = FeatureMapper(domain, cont_dists)
self.versions = []
self.insert_bufs = defaultdict(set)
if clusters:
self.insert(clusters)
self.sync()
def sync(self):
"""
apply all of the pending inserts and deletes to the existing
versions
"""
for idx, v in enumerate(self.versions):
buf = self.insert_bufs[idx]
if not buf: continue
buf.update(v.clusters)
self.versions[idx] = AdjacencyVersion(self.feature_mapper)
self.versions[idx].bulk_init(list(buf))
self.insert_bufs[idx] = set()
def new_version(self, clusters=None):
clusters = clusters or []
v = AdjacencyVersion(self.feature_mapper)
self.versions.append(v)
def ensure_version(self, version):
if version is None: return
while len(self.versions) <= version:
self.new_version()
return self.versions[version]
def __len__(self):
return len(self.versions)
def insert(self, clusters, version=0):
self.ensure_version(version)
if not isinstance(clusters, list):
clusters = list(clusters)
v = self.versions[version]
clusters = [c for c in clusters if not v.contains(c)]
self.insert_bufs[version].update(clusters)
return len(clusters)
def remove(self, clusters, version=0):
self.ensure_version(version)
if not isinstance(clusters, list):
clusters = list(clusters)
v = self.versions[version]
rms = [v.remove(cluster) for cluster in clusters]
return rms
def neighbors(self, cluster, version=None):
self.ensure_version(version)
if version is not None:
return self.versions[version].neighbors(cluster)
ret = []
for v in self.versions:
ret.extend(v.neighbors(cluster))
return ret
class AdjacencyVersion(object):
def __init__(self, feature_mapper):
#self.partitions_complete = partitions_complete
self.cid = 0
self.disc_idxs = {}
self.feature_mapper = feature_mapper
self.radius = .15
self.metric = 'hamming'
self._rtree = None # internal datastructure
self._ndim = None
self.clusters = []
self.id2c = dict()
self.c2id = dict()
def to_json(self):
data = {
'clusters' : [c and c.__dict__ or None for c in self.clusters],
'id2c' : [(key, c.__dict__) for key, c in self.id2c.items()],
'c2id' : [(c.__dict__, val) for c, val in self.c2id.items()],
'cid' : self.cid,
'_ndim' : self._ndim,
'_rtreename' : 'BLAH'
}
return json.dumps(data)
def from_json(self, encoded):
data = json.loads(encoded)
self.clusters = [c and Cluster.from_dict(c) or None for c in data['clusters']]
self.id2c = dict([(key, Cluster.from_dict(val)) for key, val in data['id2c']])
self.c2id = dict([(Cluster.from_dict(key), val) for key, val in data['c2id']])
self.cid = data['cid']
self._ndim = data['_ndim']
self._rtree = None
def setup_rtree(self, ndim, clusters=None):
if self._rtree:
return self._rtree
self._ndim = ndim
if not ndim:
class k(object):
def __init__(self, graph):
self.graph = graph
def insert(self, *args, **kwargs):
pass
def delete(self, *args, **kwargs):
pass
def intersection(self, *args, **kwargs):
return xrange(len(self.graph.clusters))
self._rtree = k(self)
return self._rtree
p = RProp()
p.dimension = max(2, ndim)
p.dat_extension = 'data'
p.idx_extension = 'index'
if clusters:
gen_func = ((i, self.bbox_rtree(c, enlarge=0.005), None) for i, c in enumerate(clusters))
self._rtree = RTree(gen_func, properties=p)
else:
self._rtree = RTree(properties=p)
return self._rtree
def bbox_rtree(self, cluster, enlarge=0.):
cols = cluster.cols
bbox = cluster.bbox
lower, higher = map(list, bbox)
if self._ndim == 1:
lower.append(0)
higher.append(1)
if enlarge != 0:
for idx, col in enumerate(cols):
rng = enlarge * self.feature_mapper.ranges[col]
lower[idx] -= rng
higher[idx] += rng
bbox = lower + higher
return bbox
def insert_rtree(self, idx, cluster):
self.setup_rtree(len(cluster.bbox[0]))
self._rtree.insert(idx,self.bbox_rtree(cluster))
return cluster
def remove_rtree(self, idx, cluster):
self.setup_rtree(len(cluster.bbox[0]))
self._rtree.delete(idx, self.bbox_rtree(cluster))
return cluster
def search_rtree(self, cluster):
self.setup_rtree(len(cluster.bbox[0]))
bbox = self.bbox_rtree(cluster, enlarge=0.01)
return self._rtree.intersection(bbox)
res = [self.clusters[idx] for idx in self._rtree.intersection(bbox)]
return filter(bool, res)
def bulk_init(self, clusters):
if not clusters: return
self.setup_rtree(len(clusters[0].bbox[0]), clusters)
self.clusters = clusters
for cid, c in enumerate(clusters):
self.id2c[cid] = c
self.c2id[c] = cid
for dim in self.feature_mapper.attrs:
Xs = []
for cidx, c in enumerate(clusters):
Xs.append(self.feature_mapper(c, dim))
idx = NearestNeighbors(
radius=self.radius,
algorithm='ball_tree',
metric=self.metric
)
self.disc_idxs[dim] = idx
self.disc_idxs[dim].fit(np.array(Xs))
def contains(self, cluster):
return cluster in self.c2id
def remove(self, cluster):
if cluster in self.c2id:
cid = self.c2id[cluster]
self.remove_rtree(cid, cluster)
del self.c2id[cluster]
del self.id2c[cid]
self.clusters[cid] = None
return True
return False
def neighbors(self, cluster):
ret = None
for name, vals in cluster.discretes.iteritems():
if name not in self.disc_idxs:
return []
vect = self.feature_mapper(cluster, name)
index = self.disc_idxs[name]
dists, idxs = index.radius_neighbors(vect, radius=self.radius)
idxs = set(idxs[0].tolist())
if ret is None:
ret = idxs
else:
ret.intersection_update(idxs)
#ret.update(idxs)
if not ret: return []
idxs = self.search_rtree(cluster)
if ret is None:
ret = set(idxs)
else:
ret.intersection_update(set(idxs))
return filter(bool, [self.clusters[idx] for idx in ret])
"""
def neighbors(self, cluster):
if not self.partitions_complete:
return filter(bool, self.clusters)
if cluster in self.graph:
return self.graph[cluster]
ret = set()
intersects = self.search_rtree(cluster)
for key in filter(cluster.adjacent, intersects):
if box_completely_contained(key.bbox, cluster.bbox):
continue
ret.update(self.graph[key])
return ret
"""
|
{
"content_hash": "36eb70225f93f21be0ebc4526a5dd625",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 116,
"avg_line_length": 27.639240506329113,
"alnum_prop": 0.62651705976643,
"repo_name": "sirrice/scorpion",
"id": "ed07d18c08c0438db39c2f27e74ddb950ccccfe7",
"size": "8734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scorpion/sigmod/adjgraph.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "389180"
},
{
"name": "R",
"bytes": "3041"
},
{
"name": "Shell",
"bytes": "1322"
}
],
"symlink_target": ""
}
|
from pyx import *
DEFAULT_LINE_HEIGHT = 200
DEFAULT_LINE_SPACE = 200
class TimeLine(object):
def __init__(self, name, start, end):
self.name = name
assert start <= end
self.global_start = start
self.global_end = end
self.lines = list()
self.canvas = canvas.canvas()
self.line_height = DEFAULT_LINE_HEIGHT
self.line_space = DEFAULT_LINE_SPACE
def add_line(self, name):
idx = len(self.lines)
self.lines.append(Line(name))
return idx
def add_instance(self, line, name, start, end, color):
assert line < len(self.lines)
assert start >= self.global_start
assert end <= self.global_end
self.lines[line].add_instance(name, start, end, color)
def add_axis(self, splits):
pass
def write_pdf(self, xpixels):
xscale = (self.global_end - self.global_start)/xpixels
line_offset = 0
line_size = self.line_height = self.line_space
for line in self.lines:
line.draw(self.canvas, self.global_start, xscale, line_offset, line_size)
line_offset = line_offset + line_size
self.canvas.writePDFfile(self.name)
class Line(object):
def __init__(self, name):
self.name = name
self.instances = list()
def add_instance(self, name, start, end, color):
self.instances.append(LineInstance(name, start, end, color))
def draw(self, canvas, xstart, xscale, ystart, ysize):
for inst in self.instances:
inst.draw(canvas, xstart, xscale, ystart, ysize)
class LineInstance(object):
def __init__(self, name, start, end, color):
self.name = name
self.start = start
self.end = end
self.color = color
def draw(self, canvas, xstart, xscale, ystart, ysize):
canvas.fill(path.rect((self.start - xstart)/xscale, ystart, (self.end-self.start)/xscale, ysize))
|
{
"content_hash": "e9b0a2ab6d2431c4867491809a852dde",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 106,
"avg_line_length": 31.126984126984127,
"alnum_prop": 0.604793472718001,
"repo_name": "StanfordLegion/legion",
"id": "d4bbdc7c47c8a945bcdb3f288dfea91f94442094",
"size": "2571",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "deprecated/tools/spy_timeline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "335761"
},
{
"name": "C++",
"bytes": "17156793"
},
{
"name": "CMake",
"bytes": "240564"
},
{
"name": "Cuda",
"bytes": "29542"
},
{
"name": "Dockerfile",
"bytes": "612"
},
{
"name": "Fortran",
"bytes": "346250"
},
{
"name": "HTML",
"bytes": "3653"
},
{
"name": "JavaScript",
"bytes": "94778"
},
{
"name": "Makefile",
"bytes": "119231"
},
{
"name": "Perl",
"bytes": "145756"
},
{
"name": "Python",
"bytes": "1661733"
},
{
"name": "Raku",
"bytes": "34306"
},
{
"name": "Rouge",
"bytes": "2303312"
},
{
"name": "Rust",
"bytes": "222951"
},
{
"name": "Shell",
"bytes": "12892"
},
{
"name": "Terra",
"bytes": "1709732"
}
],
"symlink_target": ""
}
|
import numpy as np
from scipy.misc import imread, imresize, toimage
import cv2
from keras.preprocessing.image import img_to_array, load_img
def read_images(img_paths):
"""
Use the scipy imread function to read each image into a nunmpy array
:param img_paths: Numpy array of image paths to read
:return: 4d Numpy array containing all the images from image_paths.
"""
imgs = np.empty([len(img_paths), 160, 320, 3])
for i, path in enumerate(img_paths):
imgs[i] = imread(path)
#image = load_img(path, target_size=(160, 320))
#imgs[i] = img_to_array(image)
return imgs
def crop_and_resize(imgs, shape=(32, 16, 3)):
"""
Crop and Resize images to given shape.
"""
height, width, channels = shape
imgs_resized = np.empty([len(imgs), height, width, channels])
for i, img in enumerate(imgs):
cropped = img[55:135, :, :]
imgs_resized[i] = imresize(cropped, shape)
#imgs_resized[i] = cv2.resize(img, (16, 32))
return imgs_resized
def rgb2gray(imgs):
"""
Convert images to grayscale.
"""
return np.mean(imgs, axis=3, keepdims=True)
def rgb2hsv(imgs):
"""
Convert RGB images array into HSV and zero-out all but the V dimension!
"""
hsv_imgs = np.empty_like(imgs)
for i, image in enumerate(imgs):
hsv = cv2.cvtColor(image.astype("uint8"), cv2.COLOR_RGB2HSV)
hsv[:, :, 0] = hsv[:, :, 0] * 0
hsv[:, :, 1] = hsv[:, :, 1] * 0
hsv_imgs[i] = hsv
return hsv_imgs
def normalize(imgs):
"""
Normalize images between [-1, 1]. Why not [0, 1] ?
"""
return imgs / (255.0 / 2) - 1
def preprocess(imgs):
"""
Pre-process the images. Note that pre-processing must be applied
for training and predictions.
:param imgs: Numpy array of images
:return: Numpy array of pre-processed images
"""
imgs_processed = crop_and_resize(imgs)
imgs_processed = normalize(imgs_processed)
return imgs_processed
def random_flip(imgs, angles):
"""
Augment the data by randomly flipping some images/angles horizontally.
"""
new_imgs = np.empty_like(imgs)
new_angles = np.empty_like(angles)
for i, (img, angle) in enumerate(zip(imgs, angles)):
if np.random.rand() > 0.5: # 50 percent chance to see the right angle
new_imgs[i] = np.fliplr(img)
new_angles[i] = angle * -1
else:
new_imgs[i] = img
new_angles[i] = angle
return new_imgs, new_angles
def augment_brightness(images):
"""
Randomly adjust brightness of provided images.
:param images: Numpy array of images
:return: Numpy array of brightness adjusted imahes
"""
new_imgs = np.empty_like(images)
for i, image in enumerate(images):
# convert to HSV so that its easy to adjust brightness
hsv = cv2.cvtColor(image.astype("uint8"), cv2.COLOR_RGB2HSV)
# randomly generate the brightness reduction factor
# Add a constant so that it prevents the image from being completely dark
random_bright = .25+np.random.uniform()
# Apply the brightness reduction to the V channel
hsv[:,:,2] = hsv[:,:,2]*random_bright
# convert to RBG again
new_imgs[i] = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB)
return new_imgs
def augment(imgs, angles):
"""
Perform dynamic image augmentation by randomly adjust the provdied images
for brightness and flipping horizontally.
:param imgs: Numpy array of images
:param angles: Numpy array of angles
:return: The augmented images and angles as a tuple
"""
augmented_brightness_imgs = augment_brightness(imgs)
imgs_augmented, angles_augmented = random_flip(augmented_brightness_imgs, angles)
return imgs_augmented, angles_augmented
def batch_generator(image_filenames, angles, batch_size):
"""
Generates random batches of the input data by randomly selecting indices into
the provided data; reading the raw images files, augmenting and pre-processing.
:param imgs: Numpy array of image filenames
:param angles: Numpy array of the steering values associated with each image
:param batch_size: The size of each minibatch
:yield: A tuple (images, angles), where both images and angles have batch_size elements.
"""
while True:
indices = np.random.choice(len(image_filenames), batch_size)
batch_imgs_raw, angles_raw = read_images(image_filenames[indices]), angles[indices].astype(float)
batch_imgs, batch_angles = augment(batch_imgs_raw, angles_raw)
batch_imgs = preprocess(batch_imgs)
yield batch_imgs, batch_angles
|
{
"content_hash": "f170d42763dba48e76f00c0d9813afc3",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 105,
"avg_line_length": 29.810126582278482,
"alnum_prop": 0.648619957537155,
"repo_name": "js1972/CarND-Behavioral-Cloning",
"id": "9b169b0d00745afa2d8019b7f7609f07895a8539",
"size": "4710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "956637"
},
{
"name": "Python",
"bytes": "15749"
}
],
"symlink_target": ""
}
|
"""Utilities to support packages."""
from functools import singledispatch as simplegeneric
import importlib
import importlib.util
import importlib.machinery
import os
import os.path
import sys
from types import ModuleType
import warnings
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules', 'get_data',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
def _get_spec(finder, name):
"""Return the finder-specific module spec."""
# Works with legacy finders.
try:
find_spec = finder.find_spec
except AttributeError:
loader = finder.find_module(name)
if loader is None:
return None
return importlib.util.spec_from_loader(name, loader)
else:
return find_spec(name)
def read_code(stream):
# This helper is needed in order for the PEP 302 emulation to
# correctly handle compiled files
import marshal
magic = stream.read(4)
if (magic != importlib.util.MAGIC_NUMBER
# Issue #29537: handle issue27286 bytecode incompatibility
# See Lib/importlib/_bootstrap_external.py
and magic != importlib.util._BACKCOMPAT_MAGIC_NUMBER):
return None
stream.read(8) # Skip timestamp and size
return marshal.load(stream)
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_finder, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
yield from walk_packages(path, name+'.', onerror)
def iter_modules(path=None, prefix=''):
"""Yields (module_finder, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
# Implement a file walker for the normal importlib path hook
def _iter_file_finder_modules(importer, prefix=''):
if importer.path is None or not os.path.isdir(importer.path):
return
yielded = {}
import inspect
try:
filenames = os.listdir(importer.path)
except OSError:
# ignore unreadable directories like import does
filenames = []
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(importer.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
try:
dircontents = os.listdir(path)
except OSError:
# ignore unreadable directories like import does
dircontents = []
for fn in dircontents:
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
iter_importer_modules.register(
importlib.machinery.FileFinder, _iter_file_finder_modules)
def _import_imp():
global imp
with warnings.catch_warnings():
warnings.simplefilter('ignore', PendingDeprecationWarning)
imp = importlib.import_module('imp')
class ImpImporter:
"""PEP 302 Finder that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 finder that searches that
directory. ImpImporter(None) produces a PEP 302 finder that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
global imp
warnings.warn("This emulation is deprecated, use 'importlib' instead",
DeprecationWarning)
_import_imp()
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
try:
filenames = os.listdir(self.path)
except OSError:
# ignore unreadable directories like import does
filenames = []
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
try:
dircontents = os.listdir(path)
except OSError:
# ignore unreadable directories like import does
dircontents = []
for fn in dircontents:
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
warnings.warn("This emulation is deprecated, use 'importlib' instead",
DeprecationWarning)
_import_imp()
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
def get_data(self, pathname):
with open(pathname, "rb") as file:
return file.read()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'r')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is None:
fullname = self.fullname
elif fullname != self.fullname:
raise ImportError("Loader for module %s cannot handle "
"module %s" % (self.fullname, fullname))
return fullname
def is_package(self, fullname):
fullname = self._fix_name(fullname)
return self.etc[2]==imp.PKG_DIRECTORY
def get_code(self, fullname=None):
fullname = self._fix_name(fullname)
if self.code is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
source = self.get_source(fullname)
self.code = compile(source, self.filename, 'exec')
elif mod_type==imp.PY_COMPILED:
self._reopen()
try:
self.code = read_code(self.file)
finally:
self.file.close()
elif mod_type==imp.PKG_DIRECTORY:
self.code = self._get_delegate().get_code()
return self.code
def get_source(self, fullname=None):
fullname = self._fix_name(fullname)
if self.source is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self._reopen()
try:
self.source = self.file.read()
finally:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
with open(self.filename[:-1], 'r') as f:
self.source = f.read()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
def _get_delegate(self):
finder = ImpImporter(self.filename)
spec = _get_spec(finder, '__init__')
return spec.loader
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
if mod_type==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
try:
import zipimport
from zipimport import zipimporter
def iter_zipimport_modules(importer, prefix=''):
dirlist = sorted(zipimport._zip_directory_cache[importer.archive])
_prefix = importer.prefix
plen = len(_prefix)
yielded = {}
import inspect
for fn in dirlist:
if not fn.startswith(_prefix):
continue
fn = fn[plen:].split(os.sep)
if len(fn)==2 and fn[1].startswith('__init__.py'):
if fn[0] not in yielded:
yielded[fn[0]] = 1
yield prefix + fn[0], True
if len(fn)!=1:
continue
modname = inspect.getmodulename(fn[0])
if modname=='__init__':
continue
if modname and '.' not in modname and modname not in yielded:
yielded[modname] = 1
yield prefix + modname, False
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
except ImportError:
pass
def get_importer(path_item):
"""Retrieve a finder for the given path item
The returned finder is cached in sys.path_importer_cache
if it was newly created by a path hook.
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
sys.path_importer_cache.setdefault(path_item, importer)
break
except ImportError:
pass
else:
importer = None
return importer
def iter_importers(fullname=""):
"""Yield finders for the given module name
If fullname contains a '.', the finders will be for the package
containing fullname, otherwise they will be all registered top level
finders (i.e. those on both sys.meta_path and sys.path_hooks).
If the named module is in a package, that package is imported as a side
effect of invoking this function.
If no module name is specified, all top level finders are produced.
"""
if fullname.startswith('.'):
msg = "Relative module name {!r} not supported".format(fullname)
raise ImportError(msg)
if '.' in fullname:
# Get the containing package's __path__
pkg_name = fullname.rpartition(".")[0]
pkg = importlib.import_module(pkg_name)
path = getattr(pkg, '__path__', None)
if path is None:
return
else:
yield from sys.meta_path
path = sys.path
for item in path:
yield get_importer(item)
def get_loader(module_or_name):
"""Get a "loader" object for module_or_name
Returns None if the module cannot be found or imported.
If the named module is not already imported, its containing package
(if any) is imported, in order to establish the package __path__.
"""
if module_or_name in sys.modules:
module_or_name = sys.modules[module_or_name]
if module_or_name is None:
return None
if isinstance(module_or_name, ModuleType):
module = module_or_name
loader = getattr(module, '__loader__', None)
if loader is not None:
return loader
if getattr(module, '__spec__', None) is None:
return None
fullname = module.__name__
else:
fullname = module_or_name
return find_loader(fullname)
def find_loader(fullname):
"""Find a "loader" object for fullname
This is a backwards compatibility wrapper around
importlib.util.find_spec that converts most failures to ImportError
and only returns the loader rather than the full spec
"""
if fullname.startswith('.'):
msg = "Relative module name {!r} not supported".format(fullname)
raise ImportError(msg)
try:
spec = importlib.util.find_spec(fullname)
except (ImportError, AttributeError, TypeError, ValueError) as ex:
# This hack fixes an impedance mismatch between pkgutil and
# importlib, where the latter raises other errors for cases where
# pkgutil previously raised ImportError
msg = "Error while finding loader for {!r} ({}: {})"
raise ImportError(msg.format(fullname, type(ex), ex)) from ex
return spec.loader if spec is not None else None
def extend_path(path, name):
"""Extend a package's path.
Intended use is to place the following code in a package's __init__.py:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
This will add to the package's __path__ all subdirectories of
directories on sys.path named after the package. This is useful
if one wants to distribute different parts of a single logical
package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
except that it doesn't special-case lines starting with 'import'.
A *.pkg file is trusted at face value: apart from checking for
duplicates, all entries found in a *.pkg file are added to the
path, regardless of whether they are exist the filesystem. (This
is a feature.)
If the input path is not a list (as is the case for frozen
packages) it is returned unchanged. The input path is not
modified; an extended copy is returned. Items are only appended
to the copy at the end.
It is assumed that sys.path is a sequence. Items of sys.path that
are not (unicode or 8-bit) strings referring to existing
directories are ignored. Unicode items of sys.path that cause
errors when used as filenames may cause this function to raise an
exception (in line with os.path.isdir() behavior).
"""
if not isinstance(path, list):
# This could happen e.g. when this is called from inside a
# frozen package. Return the path unchanged in that case.
return path
sname_pkg = name + ".pkg"
path = path[:] # Start with a copy of the existing path
parent_package, _, final_name = name.rpartition('.')
if parent_package:
try:
search_path = sys.modules[parent_package].__path__
except (KeyError, AttributeError):
# We can't do anything: find_loader() returns None when
# passed a dotted name.
return path
else:
search_path = sys.path
for dir in search_path:
if not isinstance(dir, str):
continue
finder = get_importer(dir)
if finder is not None:
portions = []
if hasattr(finder, 'find_spec'):
spec = finder.find_spec(final_name)
if spec is not None:
portions = spec.submodule_search_locations or []
# Is this finder PEP 420 compliant?
elif hasattr(finder, 'find_loader'):
_, portions = finder.find_loader(final_name)
for portion in portions:
# XXX This may still add duplicate entries to path on
# case-insensitive filesystems
if portion not in path:
path.append(portion)
# XXX Is this the right thing for subpackages like zope.app?
# It looks for a file named "zope.app.pkg"
pkgfile = os.path.join(dir, sname_pkg)
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
except OSError as msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
with f:
for line in f:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
path.append(line) # Don't check for existence!
return path
def get_data(package, resource):
"""Get a resource from a package.
This is a wrapper round the PEP 302 loader get_data API. The package
argument should be the name of a package, in standard module format
(foo.bar). The resource argument should be in the form of a relative
filename, using '/' as the path separator. The parent directory name '..'
is not allowed, and nor is a rooted name (starting with a '/').
The function returns a binary string, which is the contents of the
specified resource.
For packages located in the filesystem, which have already been imported,
this is the rough equivalent of
d = os.path.dirname(sys.modules[package].__file__)
data = open(os.path.join(d, resource), 'rb').read()
If the package cannot be located or loaded, or it uses a PEP 302 loader
which does not support get_data(), then None is returned.
"""
spec = importlib.util.find_spec(package)
if spec is None:
return None
loader = spec.loader
if loader is None or not hasattr(loader, 'get_data'):
return None
# XXX needs test
mod = (sys.modules.get(package) or
importlib._bootstrap._load(spec))
if mod is None or not hasattr(mod, '__file__'):
return None
# Modify the resource name to be compatible with the loader.get_data
# signature - an os.path format "filename" starting with the dirname of
# the package's __file__
parts = resource.split('/')
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return loader.get_data(resource_name)
|
{
"content_hash": "cee70384a22799250b58527af2bdae6a",
"timestamp": "",
"source": "github",
"line_count": 632,
"max_line_length": 79,
"avg_line_length": 33.789556962025316,
"alnum_prop": 0.5918520252868181,
"repo_name": "batermj/algorithm-challenger",
"id": "9d1879a20c08d84ca0c6d259bd127dbc4e2f270c",
"size": "21355",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.5.9/Python-3.5.9/Lib/pkgutil.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "655185"
},
{
"name": "Batchfile",
"bytes": "127416"
},
{
"name": "C",
"bytes": "33127630"
},
{
"name": "C++",
"bytes": "1364796"
},
{
"name": "CSS",
"bytes": "3163"
},
{
"name": "Common Lisp",
"bytes": "48962"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "DTrace",
"bytes": "2196"
},
{
"name": "Go",
"bytes": "26248"
},
{
"name": "HTML",
"bytes": "385719"
},
{
"name": "Haskell",
"bytes": "33612"
},
{
"name": "Java",
"bytes": "1084"
},
{
"name": "JavaScript",
"bytes": "20754"
},
{
"name": "M4",
"bytes": "403992"
},
{
"name": "Makefile",
"bytes": "238185"
},
{
"name": "Objective-C",
"bytes": "4934684"
},
{
"name": "PHP",
"bytes": "3513"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Perl",
"bytes": "649"
},
{
"name": "PostScript",
"bytes": "27606"
},
{
"name": "PowerShell",
"bytes": "21737"
},
{
"name": "Python",
"bytes": "55270625"
},
{
"name": "R",
"bytes": "29951"
},
{
"name": "Rich Text Format",
"bytes": "14551"
},
{
"name": "Roff",
"bytes": "292490"
},
{
"name": "Ruby",
"bytes": "519"
},
{
"name": "Scala",
"bytes": "846446"
},
{
"name": "Shell",
"bytes": "491113"
},
{
"name": "Swift",
"bytes": "881"
},
{
"name": "TeX",
"bytes": "337654"
},
{
"name": "VBScript",
"bytes": "140"
},
{
"name": "XSLT",
"bytes": "153"
}
],
"symlink_target": ""
}
|
"""
Zotero OAuth1 backends, docs at:
https://python-social-auth.readthedocs.io/en/latest/backends/zotero.html
"""
from .oauth import BaseOAuth1
class ZoteroOAuth(BaseOAuth1):
"""Zotero OAuth authorization mechanism"""
name = 'zotero'
AUTHORIZATION_URL = 'https://www.zotero.org/oauth/authorize'
REQUEST_TOKEN_URL = 'https://www.zotero.org/oauth/request'
ACCESS_TOKEN_URL = 'https://www.zotero.org/oauth/access'
def get_user_id(self, details, response):
"""
Return user unique id provided by service. For Ubuntu One
the nickname should be original.
"""
return details['userID']
def get_user_details(self, response):
"""Return user details from Zotero API account"""
access_token = response.get('access_token', {})
return {
'username': access_token.get('username', ''),
'userID': access_token.get('userID', '')
}
|
{
"content_hash": "335452ecc6c1e2f0317fbe566849dc0a",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 76,
"avg_line_length": 32.55172413793103,
"alnum_prop": 0.6334745762711864,
"repo_name": "IKholopov/HackUPC2017",
"id": "1ee2be386a3274ac14dcd1fbdcaa97c0a153b5b1",
"size": "944",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "hackupc/env/lib/python3.5/site-packages/social_core/backends/zotero.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "63043"
},
{
"name": "HTML",
"bytes": "40996"
},
{
"name": "JavaScript",
"bytes": "272171"
},
{
"name": "Python",
"bytes": "40280"
}
],
"symlink_target": ""
}
|
a.b
# OPEN_PAREN CLOSE_PAREN
x()
# OPEN_PAREN argument CLOSE_PAREN
x(a)
# OPEN_PAREN argument COMMA argument COMMA CLOSE_PAREN
x(a, b,)
# OPEN_PAREN argument COMMA argument COMMA argument CLOSE_PAREN
x(a, b, c)
# OPEN_BRACKET subscript CLOSE_BRACKET
x[a]
# OPEN_BRACKET subscript COMMA subscript COMMA CLOSE_BRACKET
x[a, b,]
# OPEN_BRACKET subscript COMMA subscript COMMA subscript CLOSE_BRACKET
x[a, b, c]
|
{
"content_hash": "15a6680590d14624640d7943bc8ce34b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 70,
"avg_line_length": 18.772727272727273,
"alnum_prop": 0.7457627118644068,
"repo_name": "antlr/grammars-v4",
"id": "1bc19351550ba356aa1bb9af32e2d97ff2a0e9a5",
"size": "599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/python/examples/trailer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "5095731"
},
{
"name": "Batchfile",
"bytes": "192"
},
{
"name": "C",
"bytes": "120"
},
{
"name": "C#",
"bytes": "170748"
},
{
"name": "C++",
"bytes": "38535"
},
{
"name": "CMake",
"bytes": "18961"
},
{
"name": "Dart",
"bytes": "9629"
},
{
"name": "Go",
"bytes": "27044"
},
{
"name": "Java",
"bytes": "1327950"
},
{
"name": "JavaScript",
"bytes": "38007"
},
{
"name": "Lex",
"bytes": "4090"
},
{
"name": "Makefile",
"bytes": "5781"
},
{
"name": "PHP",
"bytes": "3233"
},
{
"name": "PowerShell",
"bytes": "32374"
},
{
"name": "Python",
"bytes": "56074"
},
{
"name": "Shell",
"bytes": "29048"
},
{
"name": "Swift",
"bytes": "9106"
},
{
"name": "TypeScript",
"bytes": "13569"
},
{
"name": "Yacc",
"bytes": "5177"
},
{
"name": "sed",
"bytes": "718"
}
],
"symlink_target": ""
}
|
"""
web2.dav interfaces.
"""
__all__ = [ "IDAVResource" ]
from twisted.web2.iweb import IResource
class IDAVResource(IResource):
"""
WebDAV resource.
"""
def isCollection():
"""
Checks whether this resource is a collection resource.
@return: C{True} if this resource is a collection resource, C{False}
otherwise.
"""
def findChildren(depth):
"""
Returns an iterable of child resources for the given depth.
Because resources do not know their request URIs, chidren are returned
as tuples C{(resource, uri)}, where C{resource} is the child resource
and C{uri} is a URL path relative to this resource.
@param depth: the search depth (one of C{"0"}, C{"1"}, or C{"infinity"})
@return: an iterable of tuples C{(resource, uri)}.
"""
def hasProperty(property, request):
"""
Checks whether the given property is defined on this resource.
@param property: an empty L{davxml.WebDAVElement} instance or a qname
tuple.
@param request: the request being processed.
@return: a deferred value of C{True} if the given property is set on
this resource, or C{False} otherwise.
"""
def readProperty(property, request):
"""
Reads the given property on this resource.
@param property: an empty L{davxml.WebDAVElement} class or instance, or
a qname tuple.
@param request: the request being processed.
@return: a deferred L{davxml.WebDAVElement} instance
containing the value of the given property.
@raise HTTPError: (containing a response with a status code of
L{responsecode.CONFLICT}) if C{property} is not set on this
resource.
"""
def writeProperty(property, request):
"""
Writes the given property on this resource.
@param property: a L{davxml.WebDAVElement} instance.
@param request: the request being processed.
@return: an empty deferred which fires when the operation is completed.
@raise HTTPError: (containing a response with a status code of
L{responsecode.CONFLICT}) if C{property} is a read-only property.
"""
def removeProperty(property, request):
"""
Removes the given property from this resource.
@param property: a L{davxml.WebDAVElement} instance or a qname tuple.
@param request: the request being processed.
@return: an empty deferred which fires when the operation is completed.
@raise HTTPError: (containing a response with a status code of
L{responsecode.CONFLICT}) if C{property} is a read-only property or
if the property does not exist.
"""
def listProperties(request):
"""
@param request: the request being processed.
@return: a deferred iterable of qnames for all properties defined for
this resource.
"""
def principalCollections():
"""
Provides the URIs of collection resources which contain principal
resources which may be used in access control entries on this resource.
(RFC 3744, section 5.8)
@return: a sequence of URIs referring to collection resources which
implement the C{DAV:principal-property-search} C{REPORT}.
"""
def accessControlList():
"""
@return: the L{davxml.ACL} element containing the access control list
for this resource.
"""
def supportedPrivileges():
"""
@return: a sequence of the access control privileges which are
supported by this resource.
"""
class IDAVPrincipalResource (IDAVResource):
"""
WebDAV principal resource. (RFC 3744, section 2)
"""
def alternateURIs():
"""
Provides the URIs of network resources with additional descriptive
information about the principal, for example, a URI to an LDAP record.
(RFC 3744, section 4.1)
@return: a iterable of URIs.
"""
def principalURL():
"""
Provides the URL which must be used to identify this principal in ACL
requests. (RFC 3744, section 4.2)
@return: a URL.
"""
def groupMembers():
"""
Provides the principal URLs of principals that are direct members of
this (group) principal. (RFC 3744, section 4.3)
@return: a iterable of principal URLs.
"""
def groupMemberships():
"""
Provides the URLs of the group principals in which the principal is
directly a member. (RFC 3744, section 4.4)
@return: a iterable of group principal URLs.
"""
|
{
"content_hash": "0389c4474d14ad70a23b50c26ca54c11",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 80,
"avg_line_length": 36.10526315789474,
"alnum_prop": 0.6226572261557685,
"repo_name": "santisiri/popego",
"id": "fccdc1d0581b6d5f3b7a4d6676c9d634409dc4c2",
"size": "5977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/web2/dav/idav.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
}
|
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._container_registry_management_client_enums import *
class ActivationProperties(msrest.serialization.Model):
"""The activation properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status: The activation status of the connected registry. Possible values include:
"Active", "Inactive".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ActivationStatus
"""
_validation = {
'status': {'readonly': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(ActivationProperties, self).__init__(**kwargs)
self.status = None
class ActiveDirectoryObject(msrest.serialization.Model):
"""The Active Directory Object that will be used for authenticating the token of a container registry.
:ivar object_id: The user/group/application object ID for Active Directory Object that will be
used for authenticating the token of a container registry.
:vartype object_id: str
:ivar tenant_id: The tenant ID of user/group/application object Active Directory Object that
will be used for authenticating the token of a container registry.
:vartype tenant_id: str
"""
_attribute_map = {
'object_id': {'key': 'objectId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(
self,
*,
object_id: Optional[str] = None,
tenant_id: Optional[str] = None,
**kwargs
):
"""
:keyword object_id: The user/group/application object ID for Active Directory Object that will
be used for authenticating the token of a container registry.
:paramtype object_id: str
:keyword tenant_id: The tenant ID of user/group/application object Active Directory Object that
will be used for authenticating the token of a container registry.
:paramtype tenant_id: str
"""
super(ActiveDirectoryObject, self).__init__(**kwargs)
self.object_id = object_id
self.tenant_id = tenant_id
class Actor(msrest.serialization.Model):
"""The agent that initiated the event. For most situations, this could be from the authorization context of the request.
:ivar name: The subject or username associated with the request context that generated the
event.
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
**kwargs
):
"""
:keyword name: The subject or username associated with the request context that generated the
event.
:paramtype name: str
"""
super(Actor, self).__init__(**kwargs)
self.name = name
class CallbackConfig(msrest.serialization.Model):
"""The configuration of service URI and custom headers for the webhook.
All required parameters must be populated in order to send to Azure.
:ivar service_uri: Required. The service URI for the webhook to post notifications.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
"""
_validation = {
'service_uri': {'required': True},
}
_attribute_map = {
'service_uri': {'key': 'serviceUri', 'type': 'str'},
'custom_headers': {'key': 'customHeaders', 'type': '{str}'},
}
def __init__(
self,
*,
service_uri: str,
custom_headers: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword service_uri: Required. The service URI for the webhook to post notifications.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
"""
super(CallbackConfig, self).__init__(**kwargs)
self.service_uri = service_uri
self.custom_headers = custom_headers
class ProxyResource(msrest.serialization.Model):
"""The resource model definition for a ARM proxy resource. It will have everything other than required location and tags.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(ProxyResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.system_data = None
class ConnectedRegistry(ProxyResource):
"""An object that represents a connected registry for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar provisioning_state: Provisioning state of the resource. Possible values include:
"Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar mode: The mode of the connected registry resource that indicates the permissions of the
registry. Possible values include: "ReadWrite", "ReadOnly", "Registry", "Mirror".
:vartype mode: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectedRegistryMode
:ivar version: The current version of ACR runtime on the connected registry.
:vartype version: str
:ivar connection_state: The current connection state of the connected registry. Possible values
include: "Online", "Offline", "Syncing", "Unhealthy".
:vartype connection_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectionState
:ivar last_activity_time: The last activity time of the connected registry.
:vartype last_activity_time: ~datetime.datetime
:ivar activation: The activation properties of the connected registry.
:vartype activation:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ActivationProperties
:ivar parent: The parent of the connected registry.
:vartype parent: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ParentProperties
:ivar client_token_ids: The list of the ACR token resource IDs used to authenticate clients to
the connected registry.
:vartype client_token_ids: list[str]
:ivar login_server: The login server properties of the connected registry.
:vartype login_server:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoginServerProperties
:ivar logging: The logging properties of the connected registry.
:vartype logging: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoggingProperties
:ivar status_details: The list of current statuses of the connected registry.
:vartype status_details:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.StatusDetailProperties]
:ivar notifications_list: The list of notifications subscription information for the connected
registry.
:vartype notifications_list: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'version': {'readonly': True},
'connection_state': {'readonly': True},
'last_activity_time': {'readonly': True},
'activation': {'readonly': True},
'status_details': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'mode': {'key': 'properties.mode', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'connection_state': {'key': 'properties.connectionState', 'type': 'str'},
'last_activity_time': {'key': 'properties.lastActivityTime', 'type': 'iso-8601'},
'activation': {'key': 'properties.activation', 'type': 'ActivationProperties'},
'parent': {'key': 'properties.parent', 'type': 'ParentProperties'},
'client_token_ids': {'key': 'properties.clientTokenIds', 'type': '[str]'},
'login_server': {'key': 'properties.loginServer', 'type': 'LoginServerProperties'},
'logging': {'key': 'properties.logging', 'type': 'LoggingProperties'},
'status_details': {'key': 'properties.statusDetails', 'type': '[StatusDetailProperties]'},
'notifications_list': {'key': 'properties.notificationsList', 'type': '[str]'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "ConnectedRegistryMode"]] = None,
parent: Optional["ParentProperties"] = None,
client_token_ids: Optional[List[str]] = None,
login_server: Optional["LoginServerProperties"] = None,
logging: Optional["LoggingProperties"] = None,
notifications_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword mode: The mode of the connected registry resource that indicates the permissions of
the registry. Possible values include: "ReadWrite", "ReadOnly", "Registry", "Mirror".
:paramtype mode: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectedRegistryMode
:keyword parent: The parent of the connected registry.
:paramtype parent: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ParentProperties
:keyword client_token_ids: The list of the ACR token resource IDs used to authenticate clients
to the connected registry.
:paramtype client_token_ids: list[str]
:keyword login_server: The login server properties of the connected registry.
:paramtype login_server:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoginServerProperties
:keyword logging: The logging properties of the connected registry.
:paramtype logging: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoggingProperties
:keyword notifications_list: The list of notifications subscription information for the
connected registry.
:paramtype notifications_list: list[str]
"""
super(ConnectedRegistry, self).__init__(**kwargs)
self.provisioning_state = None
self.mode = mode
self.version = None
self.connection_state = None
self.last_activity_time = None
self.activation = None
self.parent = parent
self.client_token_ids = client_token_ids
self.login_server = login_server
self.logging = logging
self.status_details = None
self.notifications_list = notifications_list
class ConnectedRegistryListResult(msrest.serialization.Model):
"""The result of a request to list connected registries for a container registry.
:ivar value: The list of connected registries. Since this list may be incomplete, the nextLink
field should be used to request the next list of connected registries.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectedRegistry]
:ivar next_link: The URI that can be used to request the next list of connected registries.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ConnectedRegistry]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ConnectedRegistry"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of connected registries. Since this list may be incomplete, the
nextLink field should be used to request the next list of connected registries.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectedRegistry]
:keyword next_link: The URI that can be used to request the next list of connected registries.
:paramtype next_link: str
"""
super(ConnectedRegistryListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class ConnectedRegistryUpdateParameters(msrest.serialization.Model):
"""The parameters for updating a connected registry.
:ivar sync_properties: The sync properties of the connected registry with its parent.
:vartype sync_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.SyncUpdateProperties
:ivar logging: The logging properties of the connected registry.
:vartype logging: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoggingProperties
:ivar client_token_ids: The list of the ACR token resource IDs used to authenticate clients to
the connected registry.
:vartype client_token_ids: list[str]
:ivar notifications_list: The list of notifications subscription information for the connected
registry.
:vartype notifications_list: list[str]
"""
_attribute_map = {
'sync_properties': {'key': 'properties.syncProperties', 'type': 'SyncUpdateProperties'},
'logging': {'key': 'properties.logging', 'type': 'LoggingProperties'},
'client_token_ids': {'key': 'properties.clientTokenIds', 'type': '[str]'},
'notifications_list': {'key': 'properties.notificationsList', 'type': '[str]'},
}
def __init__(
self,
*,
sync_properties: Optional["SyncUpdateProperties"] = None,
logging: Optional["LoggingProperties"] = None,
client_token_ids: Optional[List[str]] = None,
notifications_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword sync_properties: The sync properties of the connected registry with its parent.
:paramtype sync_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.SyncUpdateProperties
:keyword logging: The logging properties of the connected registry.
:paramtype logging: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LoggingProperties
:keyword client_token_ids: The list of the ACR token resource IDs used to authenticate clients
to the connected registry.
:paramtype client_token_ids: list[str]
:keyword notifications_list: The list of notifications subscription information for the
connected registry.
:paramtype notifications_list: list[str]
"""
super(ConnectedRegistryUpdateParameters, self).__init__(**kwargs)
self.sync_properties = sync_properties
self.logging = logging
self.client_token_ids = client_token_ids
self.notifications_list = notifications_list
class EncryptionProperty(msrest.serialization.Model):
"""EncryptionProperty.
:ivar status: Indicates whether or not the encryption is enabled for container registry.
Possible values include: "enabled", "disabled".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionStatus
:ivar key_vault_properties: Key vault properties.
:vartype key_vault_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.KeyVaultProperties
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
}
def __init__(
self,
*,
status: Optional[Union[str, "EncryptionStatus"]] = None,
key_vault_properties: Optional["KeyVaultProperties"] = None,
**kwargs
):
"""
:keyword status: Indicates whether or not the encryption is enabled for container registry.
Possible values include: "enabled", "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionStatus
:keyword key_vault_properties: Key vault properties.
:paramtype key_vault_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.KeyVaultProperties
"""
super(EncryptionProperty, self).__init__(**kwargs)
self.status = status
self.key_vault_properties = key_vault_properties
class ErrorResponse(msrest.serialization.Model):
"""An error response from the Azure Container Registry service.
:ivar error: Azure container registry build API error body.
:vartype error: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ErrorResponseBody
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorResponseBody'},
}
def __init__(
self,
*,
error: Optional["ErrorResponseBody"] = None,
**kwargs
):
"""
:keyword error: Azure container registry build API error body.
:paramtype error: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ErrorResponseBody
"""
super(ErrorResponse, self).__init__(**kwargs)
self.error = error
class ErrorResponseBody(msrest.serialization.Model):
"""An error response from the Azure Container Registry service.
All required parameters must be populated in order to send to Azure.
:ivar code: Required. error code.
:vartype code: str
:ivar message: Required. error message.
:vartype message: str
:ivar target: target of the particular error.
:vartype target: str
:ivar details: an array of additional nested error response info objects, as described by this
contract.
:vartype details:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.InnerErrorDescription]
"""
_validation = {
'code': {'required': True},
'message': {'required': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[InnerErrorDescription]'},
}
def __init__(
self,
*,
code: str,
message: str,
target: Optional[str] = None,
details: Optional[List["InnerErrorDescription"]] = None,
**kwargs
):
"""
:keyword code: Required. error code.
:paramtype code: str
:keyword message: Required. error message.
:paramtype message: str
:keyword target: target of the particular error.
:paramtype target: str
:keyword details: an array of additional nested error response info objects, as described by
this contract.
:paramtype details:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.InnerErrorDescription]
"""
super(ErrorResponseBody, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
class EventInfo(msrest.serialization.Model):
"""The basic information of an event.
:ivar id: The event ID.
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
"""
:keyword id: The event ID.
:paramtype id: str
"""
super(EventInfo, self).__init__(**kwargs)
self.id = id
class Event(EventInfo):
"""The event for a webhook.
:ivar id: The event ID.
:vartype id: str
:ivar event_request_message: The event request message sent to the service URI.
:vartype event_request_message:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventRequestMessage
:ivar event_response_message: The event response message received from the service URI.
:vartype event_response_message:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventResponseMessage
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'event_request_message': {'key': 'eventRequestMessage', 'type': 'EventRequestMessage'},
'event_response_message': {'key': 'eventResponseMessage', 'type': 'EventResponseMessage'},
}
def __init__(
self,
*,
id: Optional[str] = None,
event_request_message: Optional["EventRequestMessage"] = None,
event_response_message: Optional["EventResponseMessage"] = None,
**kwargs
):
"""
:keyword id: The event ID.
:paramtype id: str
:keyword event_request_message: The event request message sent to the service URI.
:paramtype event_request_message:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventRequestMessage
:keyword event_response_message: The event response message received from the service URI.
:paramtype event_response_message:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventResponseMessage
"""
super(Event, self).__init__(id=id, **kwargs)
self.event_request_message = event_request_message
self.event_response_message = event_response_message
class EventContent(msrest.serialization.Model):
"""The content of the event request message.
:ivar id: The event ID.
:vartype id: str
:ivar timestamp: The time at which the event occurred.
:vartype timestamp: ~datetime.datetime
:ivar action: The action that encompasses the provided event.
:vartype action: str
:ivar target: The target of the event.
:vartype target: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Target
:ivar request: The request that generated the event.
:vartype request: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Request
:ivar actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:vartype actor: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Actor
:ivar source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:vartype source: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Source
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'Target'},
'request': {'key': 'request', 'type': 'Request'},
'actor': {'key': 'actor', 'type': 'Actor'},
'source': {'key': 'source', 'type': 'Source'},
}
def __init__(
self,
*,
id: Optional[str] = None,
timestamp: Optional[datetime.datetime] = None,
action: Optional[str] = None,
target: Optional["Target"] = None,
request: Optional["Request"] = None,
actor: Optional["Actor"] = None,
source: Optional["Source"] = None,
**kwargs
):
"""
:keyword id: The event ID.
:paramtype id: str
:keyword timestamp: The time at which the event occurred.
:paramtype timestamp: ~datetime.datetime
:keyword action: The action that encompasses the provided event.
:paramtype action: str
:keyword target: The target of the event.
:paramtype target: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Target
:keyword request: The request that generated the event.
:paramtype request: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Request
:keyword actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:paramtype actor: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Actor
:keyword source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:paramtype source: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Source
"""
super(EventContent, self).__init__(**kwargs)
self.id = id
self.timestamp = timestamp
self.action = action
self.target = target
self.request = request
self.actor = actor
self.source = source
class EventListResult(msrest.serialization.Model):
"""The result of a request to list events for a webhook.
:ivar value: The list of events. Since this list may be incomplete, the nextLink field should
be used to request the next list of events.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Event]
:ivar next_link: The URI that can be used to request the next list of events.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Event]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Event"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of events. Since this list may be incomplete, the nextLink field
should be used to request the next list of events.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Event]
:keyword next_link: The URI that can be used to request the next list of events.
:paramtype next_link: str
"""
super(EventListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class EventRequestMessage(msrest.serialization.Model):
"""The event request message sent to the service URI.
:ivar content: The content of the event request message.
:vartype content: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventContent
:ivar headers: The headers of the event request message.
:vartype headers: dict[str, str]
:ivar method: The HTTP method used to send the event request message.
:vartype method: str
:ivar request_uri: The URI used to send the event request message.
:vartype request_uri: str
:ivar version: The HTTP message version.
:vartype version: str
"""
_attribute_map = {
'content': {'key': 'content', 'type': 'EventContent'},
'headers': {'key': 'headers', 'type': '{str}'},
'method': {'key': 'method', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
content: Optional["EventContent"] = None,
headers: Optional[Dict[str, str]] = None,
method: Optional[str] = None,
request_uri: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword content: The content of the event request message.
:paramtype content: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.EventContent
:keyword headers: The headers of the event request message.
:paramtype headers: dict[str, str]
:keyword method: The HTTP method used to send the event request message.
:paramtype method: str
:keyword request_uri: The URI used to send the event request message.
:paramtype request_uri: str
:keyword version: The HTTP message version.
:paramtype version: str
"""
super(EventRequestMessage, self).__init__(**kwargs)
self.content = content
self.headers = headers
self.method = method
self.request_uri = request_uri
self.version = version
class EventResponseMessage(msrest.serialization.Model):
"""The event response message received from the service URI.
:ivar content: The content of the event response message.
:vartype content: str
:ivar headers: The headers of the event response message.
:vartype headers: dict[str, str]
:ivar reason_phrase: The reason phrase of the event response message.
:vartype reason_phrase: str
:ivar status_code: The status code of the event response message.
:vartype status_code: str
:ivar version: The HTTP message version.
:vartype version: str
"""
_attribute_map = {
'content': {'key': 'content', 'type': 'str'},
'headers': {'key': 'headers', 'type': '{str}'},
'reason_phrase': {'key': 'reasonPhrase', 'type': 'str'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
content: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
reason_phrase: Optional[str] = None,
status_code: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword content: The content of the event response message.
:paramtype content: str
:keyword headers: The headers of the event response message.
:paramtype headers: dict[str, str]
:keyword reason_phrase: The reason phrase of the event response message.
:paramtype reason_phrase: str
:keyword status_code: The status code of the event response message.
:paramtype status_code: str
:keyword version: The HTTP message version.
:paramtype version: str
"""
super(EventResponseMessage, self).__init__(**kwargs)
self.content = content
self.headers = headers
self.reason_phrase = reason_phrase
self.status_code = status_code
self.version = version
class ExportPipeline(ProxyResource):
"""An object that represents an export pipeline for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar location: The location of the export pipeline.
:vartype location: str
:ivar identity: The identity of the export pipeline.
:vartype identity: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:ivar target: The target properties of the export pipeline.
:vartype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipelineTargetProperties
:ivar options: The list of all options configured for the pipeline.
:vartype options: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineOptions]
:ivar provisioning_state: The provisioning state of the pipeline at the time the operation was
called. Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed",
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'IdentityProperties'},
'target': {'key': 'properties.target', 'type': 'ExportPipelineTargetProperties'},
'options': {'key': 'properties.options', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
location: Optional[str] = None,
identity: Optional["IdentityProperties"] = None,
target: Optional["ExportPipelineTargetProperties"] = None,
options: Optional[List[Union[str, "PipelineOptions"]]] = None,
**kwargs
):
"""
:keyword location: The location of the export pipeline.
:paramtype location: str
:keyword identity: The identity of the export pipeline.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:keyword target: The target properties of the export pipeline.
:paramtype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipelineTargetProperties
:keyword options: The list of all options configured for the pipeline.
:paramtype options: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineOptions]
"""
super(ExportPipeline, self).__init__(**kwargs)
self.location = location
self.identity = identity
self.target = target
self.options = options
self.provisioning_state = None
class ExportPipelineListResult(msrest.serialization.Model):
"""The result of a request to list export pipelines for a container registry.
:ivar value: The list of export pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of export pipelines.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipeline]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExportPipeline]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ExportPipeline"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of export pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of export pipelines.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipeline]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super(ExportPipelineListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExportPipelineTargetProperties(msrest.serialization.Model):
"""The properties of the export pipeline target.
All required parameters must be populated in order to send to Azure.
:ivar type: The type of target for the export pipeline.
:vartype type: str
:ivar uri: The target uri of the export pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:vartype uri: str
:ivar key_vault_uri: Required. They key vault secret uri to obtain the target storage SAS
token.
:vartype key_vault_uri: str
"""
_validation = {
'key_vault_uri': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'},
}
def __init__(
self,
*,
key_vault_uri: str,
type: Optional[str] = None,
uri: Optional[str] = None,
**kwargs
):
"""
:keyword type: The type of target for the export pipeline.
:paramtype type: str
:keyword uri: The target uri of the export pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:paramtype uri: str
:keyword key_vault_uri: Required. They key vault secret uri to obtain the target storage SAS
token.
:paramtype key_vault_uri: str
"""
super(ExportPipelineTargetProperties, self).__init__(**kwargs)
self.type = type
self.uri = uri
self.key_vault_uri = key_vault_uri
class ExportPolicy(msrest.serialization.Model):
"""The export policy for a container registry.
:ivar status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled". Default value: "enabled".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPolicyStatus
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "ExportPolicyStatus"]] = "enabled",
**kwargs
):
"""
:keyword status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled". Default value: "enabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPolicyStatus
"""
super(ExportPolicy, self).__init__(**kwargs)
self.status = status
class GenerateCredentialsParameters(msrest.serialization.Model):
"""The parameters used to generate credentials for a specified token or user of a container registry.
:ivar token_id: The resource ID of the token for which credentials have to be generated.
:vartype token_id: str
:ivar expiry: The expiry date of the generated credentials after which the credentials become
invalid.
:vartype expiry: ~datetime.datetime
:ivar name: Specifies name of the password which should be regenerated if any -- password1 or
password2. Possible values include: "password1", "password2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPasswordName
"""
_attribute_map = {
'token_id': {'key': 'tokenId', 'type': 'str'},
'expiry': {'key': 'expiry', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
token_id: Optional[str] = None,
expiry: Optional[datetime.datetime] = None,
name: Optional[Union[str, "TokenPasswordName"]] = None,
**kwargs
):
"""
:keyword token_id: The resource ID of the token for which credentials have to be generated.
:paramtype token_id: str
:keyword expiry: The expiry date of the generated credentials after which the credentials
become invalid.
:paramtype expiry: ~datetime.datetime
:keyword name: Specifies name of the password which should be regenerated if any -- password1
or password2. Possible values include: "password1", "password2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPasswordName
"""
super(GenerateCredentialsParameters, self).__init__(**kwargs)
self.token_id = token_id
self.expiry = expiry
self.name = name
class GenerateCredentialsResult(msrest.serialization.Model):
"""The response from the GenerateCredentials operation.
:ivar username: The username for a container registry.
:vartype username: str
:ivar passwords: The list of passwords for a container registry.
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPassword]
"""
_attribute_map = {
'username': {'key': 'username', 'type': 'str'},
'passwords': {'key': 'passwords', 'type': '[TokenPassword]'},
}
def __init__(
self,
*,
username: Optional[str] = None,
passwords: Optional[List["TokenPassword"]] = None,
**kwargs
):
"""
:keyword username: The username for a container registry.
:paramtype username: str
:keyword passwords: The list of passwords for a container registry.
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPassword]
"""
super(GenerateCredentialsResult, self).__init__(**kwargs)
self.username = username
self.passwords = passwords
class IdentityProperties(msrest.serialization.Model):
"""Managed identity for the resource.
:ivar principal_id: The principal ID of resource identity.
:vartype principal_id: str
:ivar tenant_id: The tenant ID of resource.
:vartype tenant_id: str
:ivar type: The identity type. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssigned, UserAssigned", "None".
:vartype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ResourceIdentityType
:ivar user_assigned_identities: The list of user identities associated with the resource. The
user identity
dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/
providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:vartype user_assigned_identities: dict[str,
~azure.mgmt.containerregistry.v2021_08_01_preview.models.UserIdentityProperties]
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserIdentityProperties}'},
}
def __init__(
self,
*,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
type: Optional[Union[str, "ResourceIdentityType"]] = None,
user_assigned_identities: Optional[Dict[str, "UserIdentityProperties"]] = None,
**kwargs
):
"""
:keyword principal_id: The principal ID of resource identity.
:paramtype principal_id: str
:keyword tenant_id: The tenant ID of resource.
:paramtype tenant_id: str
:keyword type: The identity type. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssigned, UserAssigned", "None".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ResourceIdentityType
:keyword user_assigned_identities: The list of user identities associated with the resource.
The user identity
dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/
providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:paramtype user_assigned_identities: dict[str,
~azure.mgmt.containerregistry.v2021_08_01_preview.models.UserIdentityProperties]
"""
super(IdentityProperties, self).__init__(**kwargs)
self.principal_id = principal_id
self.tenant_id = tenant_id
self.type = type
self.user_assigned_identities = user_assigned_identities
class ImportImageParameters(msrest.serialization.Model):
"""ImportImageParameters.
All required parameters must be populated in order to send to Azure.
:ivar source: Required. The source of the image.
:vartype source: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportSource
:ivar target_tags: List of strings of the form repo[:tag]. When tag is omitted the source will
be used (or 'latest' if source tag is also omitted).
:vartype target_tags: list[str]
:ivar untagged_target_repositories: List of strings of repository names to do a manifest only
copy. No tag will be created.
:vartype untagged_target_repositories: list[str]
:ivar mode: When Force, any existing target tags will be overwritten. When NoForce, any
existing target tags will fail the operation before any copying begins. Possible values
include: "NoForce", "Force". Default value: "NoForce".
:vartype mode: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportMode
"""
_validation = {
'source': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ImportSource'},
'target_tags': {'key': 'targetTags', 'type': '[str]'},
'untagged_target_repositories': {'key': 'untaggedTargetRepositories', 'type': '[str]'},
'mode': {'key': 'mode', 'type': 'str'},
}
def __init__(
self,
*,
source: "ImportSource",
target_tags: Optional[List[str]] = None,
untagged_target_repositories: Optional[List[str]] = None,
mode: Optional[Union[str, "ImportMode"]] = "NoForce",
**kwargs
):
"""
:keyword source: Required. The source of the image.
:paramtype source: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportSource
:keyword target_tags: List of strings of the form repo[:tag]. When tag is omitted the source
will be used (or 'latest' if source tag is also omitted).
:paramtype target_tags: list[str]
:keyword untagged_target_repositories: List of strings of repository names to do a manifest
only copy. No tag will be created.
:paramtype untagged_target_repositories: list[str]
:keyword mode: When Force, any existing target tags will be overwritten. When NoForce, any
existing target tags will fail the operation before any copying begins. Possible values
include: "NoForce", "Force". Default value: "NoForce".
:paramtype mode: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportMode
"""
super(ImportImageParameters, self).__init__(**kwargs)
self.source = source
self.target_tags = target_tags
self.untagged_target_repositories = untagged_target_repositories
self.mode = mode
class ImportPipeline(ProxyResource):
"""An object that represents an import pipeline for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar location: The location of the import pipeline.
:vartype location: str
:ivar identity: The identity of the import pipeline.
:vartype identity: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:ivar source: The source properties of the import pipeline.
:vartype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipelineSourceProperties
:ivar trigger: The properties that describe the trigger of the import pipeline.
:vartype trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineTriggerProperties
:ivar options: The list of all options configured for the pipeline.
:vartype options: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineOptions]
:ivar provisioning_state: The provisioning state of the pipeline at the time the operation was
called. Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed",
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'IdentityProperties'},
'source': {'key': 'properties.source', 'type': 'ImportPipelineSourceProperties'},
'trigger': {'key': 'properties.trigger', 'type': 'PipelineTriggerProperties'},
'options': {'key': 'properties.options', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
location: Optional[str] = None,
identity: Optional["IdentityProperties"] = None,
source: Optional["ImportPipelineSourceProperties"] = None,
trigger: Optional["PipelineTriggerProperties"] = None,
options: Optional[List[Union[str, "PipelineOptions"]]] = None,
**kwargs
):
"""
:keyword location: The location of the import pipeline.
:paramtype location: str
:keyword identity: The identity of the import pipeline.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:keyword source: The source properties of the import pipeline.
:paramtype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipelineSourceProperties
:keyword trigger: The properties that describe the trigger of the import pipeline.
:paramtype trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineTriggerProperties
:keyword options: The list of all options configured for the pipeline.
:paramtype options: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineOptions]
"""
super(ImportPipeline, self).__init__(**kwargs)
self.location = location
self.identity = identity
self.source = source
self.trigger = trigger
self.options = options
self.provisioning_state = None
class ImportPipelineListResult(msrest.serialization.Model):
"""The result of a request to list import pipelines for a container registry.
:ivar value: The list of import pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of import pipelines.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipeline]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ImportPipeline]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ImportPipeline"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of import pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of import pipelines.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipeline]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super(ImportPipelineListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class ImportPipelineSourceProperties(msrest.serialization.Model):
"""The properties of the import pipeline source.
All required parameters must be populated in order to send to Azure.
:ivar type: The type of source for the import pipeline. Possible values include:
"AzureStorageBlobContainer". Default value: "AzureStorageBlobContainer".
:vartype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceType
:ivar uri: The source uri of the import pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:vartype uri: str
:ivar key_vault_uri: Required. They key vault secret uri to obtain the source storage SAS
token.
:vartype key_vault_uri: str
"""
_validation = {
'key_vault_uri': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'},
}
def __init__(
self,
*,
key_vault_uri: str,
type: Optional[Union[str, "PipelineSourceType"]] = "AzureStorageBlobContainer",
uri: Optional[str] = None,
**kwargs
):
"""
:keyword type: The type of source for the import pipeline. Possible values include:
"AzureStorageBlobContainer". Default value: "AzureStorageBlobContainer".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceType
:keyword uri: The source uri of the import pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:paramtype uri: str
:keyword key_vault_uri: Required. They key vault secret uri to obtain the source storage SAS
token.
:paramtype key_vault_uri: str
"""
super(ImportPipelineSourceProperties, self).__init__(**kwargs)
self.type = type
self.uri = uri
self.key_vault_uri = key_vault_uri
class ImportSource(msrest.serialization.Model):
"""ImportSource.
All required parameters must be populated in order to send to Azure.
:ivar resource_id: The resource identifier of the source Azure Container Registry.
:vartype resource_id: str
:ivar registry_uri: The address of the source registry (e.g. 'mcr.microsoft.com').
:vartype registry_uri: str
:ivar credentials: Credentials used when importing from a registry uri.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportSourceCredentials
:ivar source_image: Required. Repository name of the source image.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:vartype source_image: str
"""
_validation = {
'source_image': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'registry_uri': {'key': 'registryUri', 'type': 'str'},
'credentials': {'key': 'credentials', 'type': 'ImportSourceCredentials'},
'source_image': {'key': 'sourceImage', 'type': 'str'},
}
def __init__(
self,
*,
source_image: str,
resource_id: Optional[str] = None,
registry_uri: Optional[str] = None,
credentials: Optional["ImportSourceCredentials"] = None,
**kwargs
):
"""
:keyword resource_id: The resource identifier of the source Azure Container Registry.
:paramtype resource_id: str
:keyword registry_uri: The address of the source registry (e.g. 'mcr.microsoft.com').
:paramtype registry_uri: str
:keyword credentials: Credentials used when importing from a registry uri.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportSourceCredentials
:keyword source_image: Required. Repository name of the source image.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:paramtype source_image: str
"""
super(ImportSource, self).__init__(**kwargs)
self.resource_id = resource_id
self.registry_uri = registry_uri
self.credentials = credentials
self.source_image = source_image
class ImportSourceCredentials(msrest.serialization.Model):
"""ImportSourceCredentials.
All required parameters must be populated in order to send to Azure.
:ivar username: The username to authenticate with the source registry.
:vartype username: str
:ivar password: Required. The password used to authenticate with the source registry.
:vartype password: str
"""
_validation = {
'password': {'required': True},
}
_attribute_map = {
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
}
def __init__(
self,
*,
password: str,
username: Optional[str] = None,
**kwargs
):
"""
:keyword username: The username to authenticate with the source registry.
:paramtype username: str
:keyword password: Required. The password used to authenticate with the source registry.
:paramtype password: str
"""
super(ImportSourceCredentials, self).__init__(**kwargs)
self.username = username
self.password = password
class InnerErrorDescription(msrest.serialization.Model):
"""inner error.
All required parameters must be populated in order to send to Azure.
:ivar code: Required. error code.
:vartype code: str
:ivar message: Required. error message.
:vartype message: str
:ivar target: target of the particular error.
:vartype target: str
"""
_validation = {
'code': {'required': True},
'message': {'required': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
*,
code: str,
message: str,
target: Optional[str] = None,
**kwargs
):
"""
:keyword code: Required. error code.
:paramtype code: str
:keyword message: Required. error message.
:paramtype message: str
:keyword target: target of the particular error.
:paramtype target: str
"""
super(InnerErrorDescription, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
class IPRule(msrest.serialization.Model):
"""IP rule with specific IP or IP range in CIDR format.
All required parameters must be populated in order to send to Azure.
:ivar action: The action of IP ACL rule. Possible values include: "Allow".
:vartype action: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Action
:ivar ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4
address is allowed.
:vartype ip_address_or_range: str
"""
_validation = {
'ip_address_or_range': {'required': True},
}
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
'ip_address_or_range': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
ip_address_or_range: str,
action: Optional[Union[str, "Action"]] = None,
**kwargs
):
"""
:keyword action: The action of IP ACL rule. Possible values include: "Allow".
:paramtype action: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Action
:keyword ip_address_or_range: Required. Specifies the IP or IP range in CIDR format. Only IPV4
address is allowed.
:paramtype ip_address_or_range: str
"""
super(IPRule, self).__init__(**kwargs)
self.action = action
self.ip_address_or_range = ip_address_or_range
class KeyVaultProperties(msrest.serialization.Model):
"""KeyVaultProperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar key_identifier: Key vault uri to access the encryption key.
:vartype key_identifier: str
:ivar versioned_key_identifier: The fully qualified key identifier that includes the version of
the key that is actually used for encryption.
:vartype versioned_key_identifier: str
:ivar identity: The client id of the identity which will be used to access key vault.
:vartype identity: str
:ivar key_rotation_enabled: Auto key rotation status for a CMK enabled registry.
:vartype key_rotation_enabled: bool
:ivar last_key_rotation_timestamp: Timestamp of the last successful key rotation.
:vartype last_key_rotation_timestamp: ~datetime.datetime
"""
_validation = {
'versioned_key_identifier': {'readonly': True},
'key_rotation_enabled': {'readonly': True},
'last_key_rotation_timestamp': {'readonly': True},
}
_attribute_map = {
'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
'versioned_key_identifier': {'key': 'versionedKeyIdentifier', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'key_rotation_enabled': {'key': 'keyRotationEnabled', 'type': 'bool'},
'last_key_rotation_timestamp': {'key': 'lastKeyRotationTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
*,
key_identifier: Optional[str] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword key_identifier: Key vault uri to access the encryption key.
:paramtype key_identifier: str
:keyword identity: The client id of the identity which will be used to access key vault.
:paramtype identity: str
"""
super(KeyVaultProperties, self).__init__(**kwargs)
self.key_identifier = key_identifier
self.versioned_key_identifier = None
self.identity = identity
self.key_rotation_enabled = None
self.last_key_rotation_timestamp = None
class LoggingProperties(msrest.serialization.Model):
"""The logging properties of the connected registry.
:ivar log_level: The verbosity of logs persisted on the connected registry. Possible values
include: "Debug", "Information", "Warning", "Error", "None". Default value: "Information".
:vartype log_level: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LogLevel
:ivar audit_log_status: Indicates whether audit logs are enabled on the connected registry.
Possible values include: "Enabled", "Disabled". Default value: "Disabled".
:vartype audit_log_status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.AuditLogStatus
"""
_attribute_map = {
'log_level': {'key': 'logLevel', 'type': 'str'},
'audit_log_status': {'key': 'auditLogStatus', 'type': 'str'},
}
def __init__(
self,
*,
log_level: Optional[Union[str, "LogLevel"]] = "Information",
audit_log_status: Optional[Union[str, "AuditLogStatus"]] = "Disabled",
**kwargs
):
"""
:keyword log_level: The verbosity of logs persisted on the connected registry. Possible values
include: "Debug", "Information", "Warning", "Error", "None". Default value: "Information".
:paramtype log_level: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.LogLevel
:keyword audit_log_status: Indicates whether audit logs are enabled on the connected registry.
Possible values include: "Enabled", "Disabled". Default value: "Disabled".
:paramtype audit_log_status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.AuditLogStatus
"""
super(LoggingProperties, self).__init__(**kwargs)
self.log_level = log_level
self.audit_log_status = audit_log_status
class LoginServerProperties(msrest.serialization.Model):
"""The login server properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar host: The host of the connected registry. Can be FQDN or IP.
:vartype host: str
:ivar tls: The TLS properties of the connected registry login server.
:vartype tls: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TlsProperties
"""
_validation = {
'host': {'readonly': True},
'tls': {'readonly': True},
}
_attribute_map = {
'host': {'key': 'host', 'type': 'str'},
'tls': {'key': 'tls', 'type': 'TlsProperties'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(LoginServerProperties, self).__init__(**kwargs)
self.host = None
self.tls = None
class NetworkRuleSet(msrest.serialization.Model):
"""The network rule set for a container registry.
All required parameters must be populated in order to send to Azure.
:ivar default_action: Required. The default action of allow or deny when no other rules match.
Possible values include: "Allow", "Deny". Default value: "Allow".
:vartype default_action: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.DefaultAction
:ivar virtual_network_rules: The virtual network rules.
:vartype virtual_network_rules:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.VirtualNetworkRule]
:ivar ip_rules: The IP ACL rules.
:vartype ip_rules: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.IPRule]
"""
_validation = {
'default_action': {'required': True},
}
_attribute_map = {
'default_action': {'key': 'defaultAction', 'type': 'str'},
'virtual_network_rules': {'key': 'virtualNetworkRules', 'type': '[VirtualNetworkRule]'},
'ip_rules': {'key': 'ipRules', 'type': '[IPRule]'},
}
def __init__(
self,
*,
default_action: Union[str, "DefaultAction"] = "Allow",
virtual_network_rules: Optional[List["VirtualNetworkRule"]] = None,
ip_rules: Optional[List["IPRule"]] = None,
**kwargs
):
"""
:keyword default_action: Required. The default action of allow or deny when no other rules
match. Possible values include: "Allow", "Deny". Default value: "Allow".
:paramtype default_action: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.DefaultAction
:keyword virtual_network_rules: The virtual network rules.
:paramtype virtual_network_rules:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.VirtualNetworkRule]
:keyword ip_rules: The IP ACL rules.
:paramtype ip_rules: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.IPRule]
"""
super(NetworkRuleSet, self).__init__(**kwargs)
self.default_action = default_action
self.virtual_network_rules = virtual_network_rules
self.ip_rules = ip_rules
class OperationDefinition(msrest.serialization.Model):
"""The definition of a container registry operation.
:ivar origin: The origin information of the container registry operation.
:vartype origin: str
:ivar name: Operation name: {provider}/{resource}/{operation}.
:vartype name: str
:ivar display: The display information for the container registry operation.
:vartype display:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationDisplayDefinition
:ivar is_data_action: This property indicates if the operation is an action or a data action
ref:
https://docs.microsoft.com/en-us/azure/role-based-access-control/role-definitions#management-and-data-operations.
:vartype is_data_action: bool
:ivar service_specification: The definition of Azure Monitoring service.
:vartype service_specification:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationServiceSpecificationDefinition
"""
_attribute_map = {
'origin': {'key': 'origin', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplayDefinition'},
'is_data_action': {'key': 'isDataAction', 'type': 'bool'},
'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecificationDefinition'},
}
def __init__(
self,
*,
origin: Optional[str] = None,
name: Optional[str] = None,
display: Optional["OperationDisplayDefinition"] = None,
is_data_action: Optional[bool] = None,
service_specification: Optional["OperationServiceSpecificationDefinition"] = None,
**kwargs
):
"""
:keyword origin: The origin information of the container registry operation.
:paramtype origin: str
:keyword name: Operation name: {provider}/{resource}/{operation}.
:paramtype name: str
:keyword display: The display information for the container registry operation.
:paramtype display:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationDisplayDefinition
:keyword is_data_action: This property indicates if the operation is an action or a data action
ref:
https://docs.microsoft.com/en-us/azure/role-based-access-control/role-definitions#management-and-data-operations.
:paramtype is_data_action: bool
:keyword service_specification: The definition of Azure Monitoring service.
:paramtype service_specification:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationServiceSpecificationDefinition
"""
super(OperationDefinition, self).__init__(**kwargs)
self.origin = origin
self.name = name
self.display = display
self.is_data_action = is_data_action
self.service_specification = service_specification
class OperationDisplayDefinition(msrest.serialization.Model):
"""The display information for a container registry operation.
:ivar provider: The resource provider name: Microsoft.ContainerRegistry.
:vartype provider: str
:ivar resource: The resource on which the operation is performed.
:vartype resource: str
:ivar operation: The operation that users can perform.
:vartype operation: str
:ivar description: The description for the operation.
:vartype description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
"""
:keyword provider: The resource provider name: Microsoft.ContainerRegistry.
:paramtype provider: str
:keyword resource: The resource on which the operation is performed.
:paramtype resource: str
:keyword operation: The operation that users can perform.
:paramtype operation: str
:keyword description: The description for the operation.
:paramtype description: str
"""
super(OperationDisplayDefinition, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class OperationListResult(msrest.serialization.Model):
"""The result of a request to list container registry operations.
:ivar value: The list of container registry operations. Since this list may be incomplete, the
nextLink field should be used to request the next list of operations.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationDefinition]
:ivar next_link: The URI that can be used to request the next list of container registry
operations.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[OperationDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["OperationDefinition"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of container registry operations. Since this list may be incomplete,
the nextLink field should be used to request the next list of operations.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationDefinition]
:keyword next_link: The URI that can be used to request the next list of container registry
operations.
:paramtype next_link: str
"""
super(OperationListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class OperationLogSpecificationDefinition(msrest.serialization.Model):
"""The definition of Azure Monitoring log.
:ivar name: Log name.
:vartype name: str
:ivar display_name: Log display name.
:vartype display_name: str
:ivar blob_duration: Log blob duration.
:vartype blob_duration: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
blob_duration: Optional[str] = None,
**kwargs
):
"""
:keyword name: Log name.
:paramtype name: str
:keyword display_name: Log display name.
:paramtype display_name: str
:keyword blob_duration: Log blob duration.
:paramtype blob_duration: str
"""
super(OperationLogSpecificationDefinition, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.blob_duration = blob_duration
class OperationMetricSpecificationDefinition(msrest.serialization.Model):
"""The definition of Azure Monitoring metric.
:ivar name: Metric name.
:vartype name: str
:ivar display_name: Metric display name.
:vartype display_name: str
:ivar display_description: Metric description.
:vartype display_description: str
:ivar unit: Metric unit.
:vartype unit: str
:ivar aggregation_type: Metric aggregation type.
:vartype aggregation_type: str
:ivar internal_metric_name: Internal metric name.
:vartype internal_metric_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'internal_metric_name': {'key': 'internalMetricName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
display_description: Optional[str] = None,
unit: Optional[str] = None,
aggregation_type: Optional[str] = None,
internal_metric_name: Optional[str] = None,
**kwargs
):
"""
:keyword name: Metric name.
:paramtype name: str
:keyword display_name: Metric display name.
:paramtype display_name: str
:keyword display_description: Metric description.
:paramtype display_description: str
:keyword unit: Metric unit.
:paramtype unit: str
:keyword aggregation_type: Metric aggregation type.
:paramtype aggregation_type: str
:keyword internal_metric_name: Internal metric name.
:paramtype internal_metric_name: str
"""
super(OperationMetricSpecificationDefinition, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.display_description = display_description
self.unit = unit
self.aggregation_type = aggregation_type
self.internal_metric_name = internal_metric_name
class OperationServiceSpecificationDefinition(msrest.serialization.Model):
"""The definition of Azure Monitoring list.
:ivar metric_specifications: A list of Azure Monitoring metrics definition.
:vartype metric_specifications:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationMetricSpecificationDefinition]
:ivar log_specifications: A list of Azure Monitoring log definitions.
:vartype log_specifications:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationLogSpecificationDefinition]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecificationDefinition]'},
'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecificationDefinition]'},
}
def __init__(
self,
*,
metric_specifications: Optional[List["OperationMetricSpecificationDefinition"]] = None,
log_specifications: Optional[List["OperationLogSpecificationDefinition"]] = None,
**kwargs
):
"""
:keyword metric_specifications: A list of Azure Monitoring metrics definition.
:paramtype metric_specifications:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationMetricSpecificationDefinition]
:keyword log_specifications: A list of Azure Monitoring log definitions.
:paramtype log_specifications:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.OperationLogSpecificationDefinition]
"""
super(OperationServiceSpecificationDefinition, self).__init__(**kwargs)
self.metric_specifications = metric_specifications
self.log_specifications = log_specifications
class ParentProperties(msrest.serialization.Model):
"""The properties of the connected registry parent.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID of the parent to which the connected registry will be associated.
:vartype id: str
:ivar sync_properties: Required. The sync properties of the connected registry with its parent.
:vartype sync_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.SyncProperties
"""
_validation = {
'sync_properties': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'sync_properties': {'key': 'syncProperties', 'type': 'SyncProperties'},
}
def __init__(
self,
*,
sync_properties: "SyncProperties",
id: Optional[str] = None,
**kwargs
):
"""
:keyword id: The resource ID of the parent to which the connected registry will be associated.
:paramtype id: str
:keyword sync_properties: Required. The sync properties of the connected registry with its
parent.
:paramtype sync_properties:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.SyncProperties
"""
super(ParentProperties, self).__init__(**kwargs)
self.id = id
self.sync_properties = sync_properties
class PipelineRun(ProxyResource):
"""An object that represents a pipeline run for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar provisioning_state: The provisioning state of a pipeline run. Possible values include:
"Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar request: The request parameters for a pipeline run.
:vartype request: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunRequest
:ivar response: The response of a pipeline run.
:vartype response: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunResponse
:ivar force_update_tag: How the pipeline run should be forced to recreate even if the pipeline
run configuration has not changed.
:vartype force_update_tag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'response': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'request': {'key': 'properties.request', 'type': 'PipelineRunRequest'},
'response': {'key': 'properties.response', 'type': 'PipelineRunResponse'},
'force_update_tag': {'key': 'properties.forceUpdateTag', 'type': 'str'},
}
def __init__(
self,
*,
request: Optional["PipelineRunRequest"] = None,
force_update_tag: Optional[str] = None,
**kwargs
):
"""
:keyword request: The request parameters for a pipeline run.
:paramtype request: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunRequest
:keyword force_update_tag: How the pipeline run should be forced to recreate even if the
pipeline run configuration has not changed.
:paramtype force_update_tag: str
"""
super(PipelineRun, self).__init__(**kwargs)
self.provisioning_state = None
self.request = request
self.response = None
self.force_update_tag = force_update_tag
class PipelineRunListResult(msrest.serialization.Model):
"""The result of a request to list pipeline runs for a container registry.
:ivar value: The list of pipeline runs. Since this list may be incomplete, the nextLink field
should be used to request the next list of pipeline runs.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRun]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineRun]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PipelineRun"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of pipeline runs. Since this list may be incomplete, the nextLink
field should be used to request the next list of pipeline runs.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRun]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super(PipelineRunListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class PipelineRunRequest(msrest.serialization.Model):
"""The request properties provided for a pipeline run.
:ivar pipeline_resource_id: The resource ID of the pipeline to run.
:vartype pipeline_resource_id: str
:ivar artifacts: List of source artifacts to be transferred by the pipeline.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:vartype artifacts: list[str]
:ivar source: The source properties of the pipeline run.
:vartype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunSourceProperties
:ivar target: The target properties of the pipeline run.
:vartype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunTargetProperties
:ivar catalog_digest: The digest of the tar used to transfer the artifacts.
:vartype catalog_digest: str
"""
_attribute_map = {
'pipeline_resource_id': {'key': 'pipelineResourceId', 'type': 'str'},
'artifacts': {'key': 'artifacts', 'type': '[str]'},
'source': {'key': 'source', 'type': 'PipelineRunSourceProperties'},
'target': {'key': 'target', 'type': 'PipelineRunTargetProperties'},
'catalog_digest': {'key': 'catalogDigest', 'type': 'str'},
}
def __init__(
self,
*,
pipeline_resource_id: Optional[str] = None,
artifacts: Optional[List[str]] = None,
source: Optional["PipelineRunSourceProperties"] = None,
target: Optional["PipelineRunTargetProperties"] = None,
catalog_digest: Optional[str] = None,
**kwargs
):
"""
:keyword pipeline_resource_id: The resource ID of the pipeline to run.
:paramtype pipeline_resource_id: str
:keyword artifacts: List of source artifacts to be transferred by the pipeline.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:paramtype artifacts: list[str]
:keyword source: The source properties of the pipeline run.
:paramtype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunSourceProperties
:keyword target: The target properties of the pipeline run.
:paramtype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunTargetProperties
:keyword catalog_digest: The digest of the tar used to transfer the artifacts.
:paramtype catalog_digest: str
"""
super(PipelineRunRequest, self).__init__(**kwargs)
self.pipeline_resource_id = pipeline_resource_id
self.artifacts = artifacts
self.source = source
self.target = target
self.catalog_digest = catalog_digest
class PipelineRunResponse(msrest.serialization.Model):
"""The response properties returned for a pipeline run.
:ivar status: The current status of the pipeline run.
:vartype status: str
:ivar imported_artifacts: The artifacts imported in the pipeline run.
:vartype imported_artifacts: list[str]
:ivar progress: The current progress of the copy operation.
:vartype progress: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProgressProperties
:ivar start_time: The time the pipeline run started.
:vartype start_time: ~datetime.datetime
:ivar finish_time: The time the pipeline run finished.
:vartype finish_time: ~datetime.datetime
:ivar source: The source of the pipeline run.
:vartype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipelineSourceProperties
:ivar target: The target of the pipeline run.
:vartype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipelineTargetProperties
:ivar catalog_digest: The digest of the tar used to transfer the artifacts.
:vartype catalog_digest: str
:ivar trigger: The trigger that caused the pipeline run.
:vartype trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineTriggerDescriptor
:ivar pipeline_run_error_message: The detailed error message for the pipeline run in the case
of failure.
:vartype pipeline_run_error_message: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'imported_artifacts': {'key': 'importedArtifacts', 'type': '[str]'},
'progress': {'key': 'progress', 'type': 'ProgressProperties'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'finish_time': {'key': 'finishTime', 'type': 'iso-8601'},
'source': {'key': 'source', 'type': 'ImportPipelineSourceProperties'},
'target': {'key': 'target', 'type': 'ExportPipelineTargetProperties'},
'catalog_digest': {'key': 'catalogDigest', 'type': 'str'},
'trigger': {'key': 'trigger', 'type': 'PipelineTriggerDescriptor'},
'pipeline_run_error_message': {'key': 'pipelineRunErrorMessage', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[str] = None,
imported_artifacts: Optional[List[str]] = None,
progress: Optional["ProgressProperties"] = None,
start_time: Optional[datetime.datetime] = None,
finish_time: Optional[datetime.datetime] = None,
source: Optional["ImportPipelineSourceProperties"] = None,
target: Optional["ExportPipelineTargetProperties"] = None,
catalog_digest: Optional[str] = None,
trigger: Optional["PipelineTriggerDescriptor"] = None,
pipeline_run_error_message: Optional[str] = None,
**kwargs
):
"""
:keyword status: The current status of the pipeline run.
:paramtype status: str
:keyword imported_artifacts: The artifacts imported in the pipeline run.
:paramtype imported_artifacts: list[str]
:keyword progress: The current progress of the copy operation.
:paramtype progress:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProgressProperties
:keyword start_time: The time the pipeline run started.
:paramtype start_time: ~datetime.datetime
:keyword finish_time: The time the pipeline run finished.
:paramtype finish_time: ~datetime.datetime
:keyword source: The source of the pipeline run.
:paramtype source:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ImportPipelineSourceProperties
:keyword target: The target of the pipeline run.
:paramtype target:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPipelineTargetProperties
:keyword catalog_digest: The digest of the tar used to transfer the artifacts.
:paramtype catalog_digest: str
:keyword trigger: The trigger that caused the pipeline run.
:paramtype trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineTriggerDescriptor
:keyword pipeline_run_error_message: The detailed error message for the pipeline run in the
case of failure.
:paramtype pipeline_run_error_message: str
"""
super(PipelineRunResponse, self).__init__(**kwargs)
self.status = status
self.imported_artifacts = imported_artifacts
self.progress = progress
self.start_time = start_time
self.finish_time = finish_time
self.source = source
self.target = target
self.catalog_digest = catalog_digest
self.trigger = trigger
self.pipeline_run_error_message = pipeline_run_error_message
class PipelineRunSourceProperties(msrest.serialization.Model):
"""PipelineRunSourceProperties.
:ivar type: The type of the source. Possible values include: "AzureStorageBlob". Default value:
"AzureStorageBlob".
:vartype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunSourceType
:ivar name: The name of the source.
:vartype name: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "PipelineRunSourceType"]] = "AzureStorageBlob",
name: Optional[str] = None,
**kwargs
):
"""
:keyword type: The type of the source. Possible values include: "AzureStorageBlob". Default
value: "AzureStorageBlob".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunSourceType
:keyword name: The name of the source.
:paramtype name: str
"""
super(PipelineRunSourceProperties, self).__init__(**kwargs)
self.type = type
self.name = name
class PipelineRunTargetProperties(msrest.serialization.Model):
"""PipelineRunTargetProperties.
:ivar type: The type of the target. Possible values include: "AzureStorageBlob". Default value:
"AzureStorageBlob".
:vartype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunTargetType
:ivar name: The name of the target.
:vartype name: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "PipelineRunTargetType"]] = "AzureStorageBlob",
name: Optional[str] = None,
**kwargs
):
"""
:keyword type: The type of the target. Possible values include: "AzureStorageBlob". Default
value: "AzureStorageBlob".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineRunTargetType
:keyword name: The name of the target.
:paramtype name: str
"""
super(PipelineRunTargetProperties, self).__init__(**kwargs)
self.type = type
self.name = name
class PipelineSourceTriggerDescriptor(msrest.serialization.Model):
"""PipelineSourceTriggerDescriptor.
:ivar timestamp: The timestamp when the source update happened.
:vartype timestamp: ~datetime.datetime
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
}
def __init__(
self,
*,
timestamp: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword timestamp: The timestamp when the source update happened.
:paramtype timestamp: ~datetime.datetime
"""
super(PipelineSourceTriggerDescriptor, self).__init__(**kwargs)
self.timestamp = timestamp
class PipelineSourceTriggerProperties(msrest.serialization.Model):
"""PipelineSourceTriggerProperties.
All required parameters must be populated in order to send to Azure.
:ivar status: Required. The current status of the source trigger. Possible values include:
"Enabled", "Disabled". Default value: "Enabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TriggerStatus
"""
_validation = {
'status': {'required': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
status: Union[str, "TriggerStatus"] = "Enabled",
**kwargs
):
"""
:keyword status: Required. The current status of the source trigger. Possible values include:
"Enabled", "Disabled". Default value: "Enabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TriggerStatus
"""
super(PipelineSourceTriggerProperties, self).__init__(**kwargs)
self.status = status
class PipelineTriggerDescriptor(msrest.serialization.Model):
"""PipelineTriggerDescriptor.
:ivar source_trigger: The source trigger that caused the pipeline run.
:vartype source_trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceTriggerDescriptor
"""
_attribute_map = {
'source_trigger': {'key': 'sourceTrigger', 'type': 'PipelineSourceTriggerDescriptor'},
}
def __init__(
self,
*,
source_trigger: Optional["PipelineSourceTriggerDescriptor"] = None,
**kwargs
):
"""
:keyword source_trigger: The source trigger that caused the pipeline run.
:paramtype source_trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceTriggerDescriptor
"""
super(PipelineTriggerDescriptor, self).__init__(**kwargs)
self.source_trigger = source_trigger
class PipelineTriggerProperties(msrest.serialization.Model):
"""PipelineTriggerProperties.
:ivar source_trigger: The source trigger properties of the pipeline.
:vartype source_trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceTriggerProperties
"""
_attribute_map = {
'source_trigger': {'key': 'sourceTrigger', 'type': 'PipelineSourceTriggerProperties'},
}
def __init__(
self,
*,
source_trigger: Optional["PipelineSourceTriggerProperties"] = None,
**kwargs
):
"""
:keyword source_trigger: The source trigger properties of the pipeline.
:paramtype source_trigger:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PipelineSourceTriggerProperties
"""
super(PipelineTriggerProperties, self).__init__(**kwargs)
self.source_trigger = source_trigger
class Policies(msrest.serialization.Model):
"""The policies for a container registry.
:ivar quarantine_policy: The quarantine policy for a container registry.
:vartype quarantine_policy:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.QuarantinePolicy
:ivar trust_policy: The content trust policy for a container registry.
:vartype trust_policy: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TrustPolicy
:ivar retention_policy: The retention policy for a container registry.
:vartype retention_policy:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.RetentionPolicy
:ivar export_policy: The export policy for a container registry.
:vartype export_policy: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPolicy
"""
_attribute_map = {
'quarantine_policy': {'key': 'quarantinePolicy', 'type': 'QuarantinePolicy'},
'trust_policy': {'key': 'trustPolicy', 'type': 'TrustPolicy'},
'retention_policy': {'key': 'retentionPolicy', 'type': 'RetentionPolicy'},
'export_policy': {'key': 'exportPolicy', 'type': 'ExportPolicy'},
}
def __init__(
self,
*,
quarantine_policy: Optional["QuarantinePolicy"] = None,
trust_policy: Optional["TrustPolicy"] = None,
retention_policy: Optional["RetentionPolicy"] = None,
export_policy: Optional["ExportPolicy"] = None,
**kwargs
):
"""
:keyword quarantine_policy: The quarantine policy for a container registry.
:paramtype quarantine_policy:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.QuarantinePolicy
:keyword trust_policy: The content trust policy for a container registry.
:paramtype trust_policy: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TrustPolicy
:keyword retention_policy: The retention policy for a container registry.
:paramtype retention_policy:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.RetentionPolicy
:keyword export_policy: The export policy for a container registry.
:paramtype export_policy: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.ExportPolicy
"""
super(Policies, self).__init__(**kwargs)
self.quarantine_policy = quarantine_policy
self.trust_policy = trust_policy
self.retention_policy = retention_policy
self.export_policy = export_policy
class PrivateEndpoint(msrest.serialization.Model):
"""The Private Endpoint resource.
:ivar id: This is private endpoint resource created with Microsoft.Network resource provider.
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
"""
:keyword id: This is private endpoint resource created with Microsoft.Network resource
provider.
:paramtype id: str
"""
super(PrivateEndpoint, self).__init__(**kwargs)
self.id = id
class PrivateEndpointConnection(ProxyResource):
"""An object that represents a private endpoint connection for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar private_endpoint: The resource of private endpoint.
:vartype private_endpoint:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateEndpoint
:ivar private_link_service_connection_state: A collection of information about the state of the
connection between service consumer and provider.
:vartype private_link_service_connection_state:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: The provisioning state of private endpoint connection resource.
Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
private_endpoint: Optional["PrivateEndpoint"] = None,
private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
**kwargs
):
"""
:keyword private_endpoint: The resource of private endpoint.
:paramtype private_endpoint:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateEndpoint
:keyword private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
:paramtype private_link_service_connection_state:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateLinkServiceConnectionState
"""
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.provisioning_state = None
class PrivateEndpointConnectionListResult(msrest.serialization.Model):
"""The result of a request to list private endpoint connections for a container registry.
:ivar value: The list of private endpoint connections. Since this list may be incomplete, the
nextLink field should be used to request the next list of private endpoint connections.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateEndpointConnection]
:ivar next_link: The URI that can be used to request the next list of private endpoint
connections.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PrivateEndpointConnection"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of private endpoint connections. Since this list may be incomplete,
the nextLink field should be used to request the next list of private endpoint connections.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateEndpointConnection]
:keyword next_link: The URI that can be used to request the next list of private endpoint
connections.
:paramtype next_link: str
"""
super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class PrivateLinkResource(msrest.serialization.Model):
"""A resource that supports private link capabilities.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The resource type is private link resource.
:vartype type: str
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar group_id: The private link resource group id.
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
:ivar required_zone_names: The private link resource Private link DNS zone name.
:vartype required_zone_names: list[str]
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'group_id': {'key': 'properties.groupId', 'type': 'str'},
'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
group_id: Optional[str] = None,
required_members: Optional[List[str]] = None,
required_zone_names: Optional[List[str]] = None,
**kwargs
):
"""
:keyword id: The resource ID.
:paramtype id: str
:keyword name: The name of the resource.
:paramtype name: str
:keyword group_id: The private link resource group id.
:paramtype group_id: str
:keyword required_members: The private link resource required member names.
:paramtype required_members: list[str]
:keyword required_zone_names: The private link resource Private link DNS zone name.
:paramtype required_zone_names: list[str]
"""
super(PrivateLinkResource, self).__init__(**kwargs)
self.type = None
self.id = id
self.name = name
self.group_id = group_id
self.required_members = required_members
self.required_zone_names = required_zone_names
class PrivateLinkResourceListResult(msrest.serialization.Model):
"""The result of a request to list private link resources for a container registry.
:ivar value: The list of private link resources. Since this list may be incomplete, the
nextLink field should be used to request the next list of private link resources.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateLinkResource]
:ivar next_link: The URI that can be used to request the next list of private link resources.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PrivateLinkResource"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of private link resources. Since this list may be incomplete, the
nextLink field should be used to request the next list of private link resources.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateLinkResource]
:keyword next_link: The URI that can be used to request the next list of private link
resources.
:paramtype next_link: str
"""
super(PrivateLinkResourceListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class PrivateLinkServiceConnectionState(msrest.serialization.Model):
"""The state of a private link service connection.
:ivar status: The private link service connection status. Possible values include: "Approved",
"Pending", "Rejected", "Disconnected".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectionStatus
:ivar description: The description for connection status. For example if connection is rejected
it can indicate reason for rejection.
:vartype description: str
:ivar actions_required: A message indicating if changes on the service provider require any
updates on the consumer. Possible values include: "None", "Recreate".
:vartype actions_required: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ActionsRequired
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'actions_required': {'key': 'actionsRequired', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "ConnectionStatus"]] = None,
description: Optional[str] = None,
actions_required: Optional[Union[str, "ActionsRequired"]] = None,
**kwargs
):
"""
:keyword status: The private link service connection status. Possible values include:
"Approved", "Pending", "Rejected", "Disconnected".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ConnectionStatus
:keyword description: The description for connection status. For example if connection is
rejected it can indicate reason for rejection.
:paramtype description: str
:keyword actions_required: A message indicating if changes on the service provider require any
updates on the consumer. Possible values include: "None", "Recreate".
:paramtype actions_required: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ActionsRequired
"""
super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
self.status = status
self.description = description
self.actions_required = actions_required
class ProgressProperties(msrest.serialization.Model):
"""ProgressProperties.
:ivar percentage: The percentage complete of the copy operation.
:vartype percentage: str
"""
_attribute_map = {
'percentage': {'key': 'percentage', 'type': 'str'},
}
def __init__(
self,
*,
percentage: Optional[str] = None,
**kwargs
):
"""
:keyword percentage: The percentage complete of the copy operation.
:paramtype percentage: str
"""
super(ProgressProperties, self).__init__(**kwargs)
self.percentage = percentage
class QuarantinePolicy(msrest.serialization.Model):
"""The quarantine policy for a container registry.
:ivar status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "PolicyStatus"]] = None,
**kwargs
):
"""
:keyword status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
super(QuarantinePolicy, self).__init__(**kwargs)
self.status = status
class RegenerateCredentialParameters(msrest.serialization.Model):
"""The parameters used to regenerate the login credential.
All required parameters must be populated in order to send to Azure.
:ivar name: Required. Specifies name of the password which should be regenerated -- password or
password2. Possible values include: "password", "password2".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PasswordName
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Union[str, "PasswordName"],
**kwargs
):
"""
:keyword name: Required. Specifies name of the password which should be regenerated -- password
or password2. Possible values include: "password", "password2".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PasswordName
"""
super(RegenerateCredentialParameters, self).__init__(**kwargs)
self.name = name
class Resource(msrest.serialization.Model):
"""An Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: Required. The location of the resource. This cannot be changed after the
resource is created.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword location: Required. The location of the resource. This cannot be changed after the
resource is created.
:paramtype location: str
:keyword tags: A set of tags. The tags of the resource.
:paramtype tags: dict[str, str]
"""
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = location
self.tags = tags
self.system_data = None
class Registry(Resource):
"""An object that represents a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: Required. The location of the resource. This cannot be changed after the
resource is created.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar sku: Required. The SKU of the container registry.
:vartype sku: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Sku
:ivar identity: The identity of the container registry.
:vartype identity: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:ivar login_server: The URL that can be used to log into the container registry.
:vartype login_server: str
:ivar creation_date: The creation date of the container registry in ISO8601 format.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: The provisioning state of the container registry at the time the
operation was called. Possible values include: "Creating", "Updating", "Deleting", "Succeeded",
"Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar status: The status of the container registry at the time the operation was called.
:vartype status: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Status
:ivar admin_user_enabled: The value that indicates whether the admin user is enabled.
:vartype admin_user_enabled: bool
:ivar network_rule_set: The network rule set for a container registry.
:vartype network_rule_set:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleSet
:ivar policies: The policies for a container registry.
:vartype policies: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Policies
:ivar encryption: The encryption settings of container registry.
:vartype encryption:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionProperty
:ivar data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:vartype data_endpoint_enabled: bool
:ivar data_endpoint_host_names: List of host names that will serve data when
dataEndpointEnabled is true.
:vartype data_endpoint_host_names: list[str]
:ivar private_endpoint_connections: List of private endpoint connections for a container
registry.
:vartype private_endpoint_connections:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.PrivateEndpointConnection]
:ivar public_network_access: Whether or not public network access is allowed for the container
registry. Possible values include: "Enabled", "Disabled".
:vartype public_network_access: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PublicNetworkAccess
:ivar network_rule_bypass_options: Whether to allow trusted Azure services to access a network
restricted registry. Possible values include: "AzureServices", "None".
:vartype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleBypassOptions
:ivar zone_redundancy: Whether or not zone redundancy is enabled for this container registry.
Possible values include: "Enabled", "Disabled".
:vartype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ZoneRedundancy
:ivar anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:vartype anonymous_pull_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
'sku': {'required': True},
'login_server': {'readonly': True},
'creation_date': {'readonly': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'data_endpoint_host_names': {'readonly': True},
'private_endpoint_connections': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'sku': {'key': 'sku', 'type': 'Sku'},
'identity': {'key': 'identity', 'type': 'IdentityProperties'},
'login_server': {'key': 'properties.loginServer', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'Status'},
'admin_user_enabled': {'key': 'properties.adminUserEnabled', 'type': 'bool'},
'network_rule_set': {'key': 'properties.networkRuleSet', 'type': 'NetworkRuleSet'},
'policies': {'key': 'properties.policies', 'type': 'Policies'},
'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
'data_endpoint_enabled': {'key': 'properties.dataEndpointEnabled', 'type': 'bool'},
'data_endpoint_host_names': {'key': 'properties.dataEndpointHostNames', 'type': '[str]'},
'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
'network_rule_bypass_options': {'key': 'properties.networkRuleBypassOptions', 'type': 'str'},
'zone_redundancy': {'key': 'properties.zoneRedundancy', 'type': 'str'},
'anonymous_pull_enabled': {'key': 'properties.anonymousPullEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
location: str,
sku: "Sku",
tags: Optional[Dict[str, str]] = None,
identity: Optional["IdentityProperties"] = None,
admin_user_enabled: Optional[bool] = False,
network_rule_set: Optional["NetworkRuleSet"] = None,
policies: Optional["Policies"] = None,
encryption: Optional["EncryptionProperty"] = None,
data_endpoint_enabled: Optional[bool] = None,
public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None,
network_rule_bypass_options: Optional[Union[str, "NetworkRuleBypassOptions"]] = None,
zone_redundancy: Optional[Union[str, "ZoneRedundancy"]] = None,
anonymous_pull_enabled: Optional[bool] = False,
**kwargs
):
"""
:keyword location: Required. The location of the resource. This cannot be changed after the
resource is created.
:paramtype location: str
:keyword tags: A set of tags. The tags of the resource.
:paramtype tags: dict[str, str]
:keyword sku: Required. The SKU of the container registry.
:paramtype sku: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Sku
:keyword identity: The identity of the container registry.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:keyword admin_user_enabled: The value that indicates whether the admin user is enabled.
:paramtype admin_user_enabled: bool
:keyword network_rule_set: The network rule set for a container registry.
:paramtype network_rule_set:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleSet
:keyword policies: The policies for a container registry.
:paramtype policies: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Policies
:keyword encryption: The encryption settings of container registry.
:paramtype encryption:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionProperty
:keyword data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:paramtype data_endpoint_enabled: bool
:keyword public_network_access: Whether or not public network access is allowed for the
container registry. Possible values include: "Enabled", "Disabled".
:paramtype public_network_access: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PublicNetworkAccess
:keyword network_rule_bypass_options: Whether to allow trusted Azure services to access a
network restricted registry. Possible values include: "AzureServices", "None".
:paramtype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleBypassOptions
:keyword zone_redundancy: Whether or not zone redundancy is enabled for this container
registry. Possible values include: "Enabled", "Disabled".
:paramtype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ZoneRedundancy
:keyword anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:paramtype anonymous_pull_enabled: bool
"""
super(Registry, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.identity = identity
self.login_server = None
self.creation_date = None
self.provisioning_state = None
self.status = None
self.admin_user_enabled = admin_user_enabled
self.network_rule_set = network_rule_set
self.policies = policies
self.encryption = encryption
self.data_endpoint_enabled = data_endpoint_enabled
self.data_endpoint_host_names = None
self.private_endpoint_connections = None
self.public_network_access = public_network_access
self.network_rule_bypass_options = network_rule_bypass_options
self.zone_redundancy = zone_redundancy
self.anonymous_pull_enabled = anonymous_pull_enabled
class RegistryListCredentialsResult(msrest.serialization.Model):
"""The response from the ListCredentials operation.
:ivar username: The username for a container registry.
:vartype username: str
:ivar passwords: The list of passwords for a container registry.
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryPassword]
"""
_attribute_map = {
'username': {'key': 'username', 'type': 'str'},
'passwords': {'key': 'passwords', 'type': '[RegistryPassword]'},
}
def __init__(
self,
*,
username: Optional[str] = None,
passwords: Optional[List["RegistryPassword"]] = None,
**kwargs
):
"""
:keyword username: The username for a container registry.
:paramtype username: str
:keyword passwords: The list of passwords for a container registry.
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryPassword]
"""
super(RegistryListCredentialsResult, self).__init__(**kwargs)
self.username = username
self.passwords = passwords
class RegistryListResult(msrest.serialization.Model):
"""The result of a request to list container registries.
:ivar value: The list of container registries. Since this list may be incomplete, the nextLink
field should be used to request the next list of container registries.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Registry]
:ivar next_link: The URI that can be used to request the next list of container registries.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Registry]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Registry"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of container registries. Since this list may be incomplete, the
nextLink field should be used to request the next list of container registries.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Registry]
:keyword next_link: The URI that can be used to request the next list of container registries.
:paramtype next_link: str
"""
super(RegistryListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class RegistryNameCheckRequest(msrest.serialization.Model):
"""A request to check whether a container registry name is available.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: Required. The name of the container registry.
:vartype name: str
:ivar type: The resource type of the container registry. This field must be set to
'Microsoft.ContainerRegistry/registries'. Has constant value:
"Microsoft.ContainerRegistry/registries".
:vartype type: str
"""
_validation = {
'name': {'required': True, 'max_length': 50, 'min_length': 5, 'pattern': r'^[a-zA-Z0-9]*$'},
'type': {'required': True, 'constant': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
type = "Microsoft.ContainerRegistry/registries"
def __init__(
self,
*,
name: str,
**kwargs
):
"""
:keyword name: Required. The name of the container registry.
:paramtype name: str
"""
super(RegistryNameCheckRequest, self).__init__(**kwargs)
self.name = name
class RegistryNameStatus(msrest.serialization.Model):
"""The result of a request to check the availability of a container registry name.
:ivar name_available: The value that indicates whether the name is available.
:vartype name_available: bool
:ivar reason: If any, the reason that the name is not available.
:vartype reason: str
:ivar message: If any, the error message that provides more detail for the reason that the name
is not available.
:vartype message: str
"""
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
name_available: Optional[bool] = None,
reason: Optional[str] = None,
message: Optional[str] = None,
**kwargs
):
"""
:keyword name_available: The value that indicates whether the name is available.
:paramtype name_available: bool
:keyword reason: If any, the reason that the name is not available.
:paramtype reason: str
:keyword message: If any, the error message that provides more detail for the reason that the
name is not available.
:paramtype message: str
"""
super(RegistryNameStatus, self).__init__(**kwargs)
self.name_available = name_available
self.reason = reason
self.message = message
class RegistryPassword(msrest.serialization.Model):
"""The login password for the container registry.
:ivar name: The password name. Possible values include: "password", "password2".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PasswordName
:ivar value: The password value.
:vartype value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[Union[str, "PasswordName"]] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword name: The password name. Possible values include: "password", "password2".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PasswordName
:keyword value: The password value.
:paramtype value: str
"""
super(RegistryPassword, self).__init__(**kwargs)
self.name = name
self.value = value
class RegistryUpdateParameters(msrest.serialization.Model):
"""The parameters for updating a container registry.
:ivar identity: The identity of the container registry.
:vartype identity: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:ivar tags: A set of tags. The tags for the container registry.
:vartype tags: dict[str, str]
:ivar sku: The SKU of the container registry.
:vartype sku: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Sku
:ivar admin_user_enabled: The value that indicates whether the admin user is enabled.
:vartype admin_user_enabled: bool
:ivar network_rule_set: The network rule set for a container registry.
:vartype network_rule_set:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleSet
:ivar policies: The policies for a container registry.
:vartype policies: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Policies
:ivar encryption: The encryption settings of container registry.
:vartype encryption:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionProperty
:ivar data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:vartype data_endpoint_enabled: bool
:ivar public_network_access: Whether or not public network access is allowed for the container
registry. Possible values include: "Enabled", "Disabled".
:vartype public_network_access: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PublicNetworkAccess
:ivar network_rule_bypass_options: Whether to allow trusted Azure services to access a network
restricted registry. Possible values include: "AzureServices", "None".
:vartype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleBypassOptions
:ivar anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:vartype anonymous_pull_enabled: bool
"""
_attribute_map = {
'identity': {'key': 'identity', 'type': 'IdentityProperties'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'admin_user_enabled': {'key': 'properties.adminUserEnabled', 'type': 'bool'},
'network_rule_set': {'key': 'properties.networkRuleSet', 'type': 'NetworkRuleSet'},
'policies': {'key': 'properties.policies', 'type': 'Policies'},
'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
'data_endpoint_enabled': {'key': 'properties.dataEndpointEnabled', 'type': 'bool'},
'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
'network_rule_bypass_options': {'key': 'properties.networkRuleBypassOptions', 'type': 'str'},
'anonymous_pull_enabled': {'key': 'properties.anonymousPullEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
identity: Optional["IdentityProperties"] = None,
tags: Optional[Dict[str, str]] = None,
sku: Optional["Sku"] = None,
admin_user_enabled: Optional[bool] = None,
network_rule_set: Optional["NetworkRuleSet"] = None,
policies: Optional["Policies"] = None,
encryption: Optional["EncryptionProperty"] = None,
data_endpoint_enabled: Optional[bool] = None,
public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None,
network_rule_bypass_options: Optional[Union[str, "NetworkRuleBypassOptions"]] = None,
anonymous_pull_enabled: Optional[bool] = None,
**kwargs
):
"""
:keyword identity: The identity of the container registry.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.IdentityProperties
:keyword tags: A set of tags. The tags for the container registry.
:paramtype tags: dict[str, str]
:keyword sku: The SKU of the container registry.
:paramtype sku: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Sku
:keyword admin_user_enabled: The value that indicates whether the admin user is enabled.
:paramtype admin_user_enabled: bool
:keyword network_rule_set: The network rule set for a container registry.
:paramtype network_rule_set:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleSet
:keyword policies: The policies for a container registry.
:paramtype policies: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Policies
:keyword encryption: The encryption settings of container registry.
:paramtype encryption:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.EncryptionProperty
:keyword data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:paramtype data_endpoint_enabled: bool
:keyword public_network_access: Whether or not public network access is allowed for the
container registry. Possible values include: "Enabled", "Disabled".
:paramtype public_network_access: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.PublicNetworkAccess
:keyword network_rule_bypass_options: Whether to allow trusted Azure services to access a
network restricted registry. Possible values include: "AzureServices", "None".
:paramtype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.NetworkRuleBypassOptions
:keyword anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:paramtype anonymous_pull_enabled: bool
"""
super(RegistryUpdateParameters, self).__init__(**kwargs)
self.identity = identity
self.tags = tags
self.sku = sku
self.admin_user_enabled = admin_user_enabled
self.network_rule_set = network_rule_set
self.policies = policies
self.encryption = encryption
self.data_endpoint_enabled = data_endpoint_enabled
self.public_network_access = public_network_access
self.network_rule_bypass_options = network_rule_bypass_options
self.anonymous_pull_enabled = anonymous_pull_enabled
class RegistryUsage(msrest.serialization.Model):
"""The quota usage for a container registry.
:ivar name: The name of the usage.
:vartype name: str
:ivar limit: The limit of the usage.
:vartype limit: long
:ivar current_value: The current value of the usage.
:vartype current_value: long
:ivar unit: The unit of measurement. Possible values include: "Count", "Bytes".
:vartype unit: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryUsageUnit
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'limit': {'key': 'limit', 'type': 'long'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
limit: Optional[int] = None,
current_value: Optional[int] = None,
unit: Optional[Union[str, "RegistryUsageUnit"]] = None,
**kwargs
):
"""
:keyword name: The name of the usage.
:paramtype name: str
:keyword limit: The limit of the usage.
:paramtype limit: long
:keyword current_value: The current value of the usage.
:paramtype current_value: long
:keyword unit: The unit of measurement. Possible values include: "Count", "Bytes".
:paramtype unit: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryUsageUnit
"""
super(RegistryUsage, self).__init__(**kwargs)
self.name = name
self.limit = limit
self.current_value = current_value
self.unit = unit
class RegistryUsageListResult(msrest.serialization.Model):
"""The result of a request to get container registry quota usages.
:ivar value: The list of container registry quota usages.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryUsage]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RegistryUsage]'},
}
def __init__(
self,
*,
value: Optional[List["RegistryUsage"]] = None,
**kwargs
):
"""
:keyword value: The list of container registry quota usages.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.RegistryUsage]
"""
super(RegistryUsageListResult, self).__init__(**kwargs)
self.value = value
class Replication(Resource):
"""An object that represents a replication for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: Required. The location of the resource. This cannot be changed after the
resource is created.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar provisioning_state: The provisioning state of the replication at the time the operation
was called. Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed",
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar status: The status of the replication at the time the operation was called.
:vartype status: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Status
:ivar region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:vartype region_endpoint_enabled: bool
:ivar zone_redundancy: Whether or not zone redundancy is enabled for this container registry
replication. Possible values include: "Enabled", "Disabled".
:vartype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ZoneRedundancy
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'Status'},
'region_endpoint_enabled': {'key': 'properties.regionEndpointEnabled', 'type': 'bool'},
'zone_redundancy': {'key': 'properties.zoneRedundancy', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
region_endpoint_enabled: Optional[bool] = True,
zone_redundancy: Optional[Union[str, "ZoneRedundancy"]] = None,
**kwargs
):
"""
:keyword location: Required. The location of the resource. This cannot be changed after the
resource is created.
:paramtype location: str
:keyword tags: A set of tags. The tags of the resource.
:paramtype tags: dict[str, str]
:keyword region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:paramtype region_endpoint_enabled: bool
:keyword zone_redundancy: Whether or not zone redundancy is enabled for this container registry
replication. Possible values include: "Enabled", "Disabled".
:paramtype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ZoneRedundancy
"""
super(Replication, self).__init__(location=location, tags=tags, **kwargs)
self.provisioning_state = None
self.status = None
self.region_endpoint_enabled = region_endpoint_enabled
self.zone_redundancy = zone_redundancy
class ReplicationListResult(msrest.serialization.Model):
"""The result of a request to list replications for a container registry.
:ivar value: The list of replications. Since this list may be incomplete, the nextLink field
should be used to request the next list of replications.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Replication]
:ivar next_link: The URI that can be used to request the next list of replications.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Replication]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Replication"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of replications. Since this list may be incomplete, the nextLink field
should be used to request the next list of replications.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Replication]
:keyword next_link: The URI that can be used to request the next list of replications.
:paramtype next_link: str
"""
super(ReplicationListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class ReplicationUpdateParameters(msrest.serialization.Model):
"""The parameters for updating a replication.
:ivar tags: A set of tags. The tags for the replication.
:vartype tags: dict[str, str]
:ivar region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:vartype region_endpoint_enabled: bool
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'region_endpoint_enabled': {'key': 'properties.regionEndpointEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
region_endpoint_enabled: Optional[bool] = None,
**kwargs
):
"""
:keyword tags: A set of tags. The tags for the replication.
:paramtype tags: dict[str, str]
:keyword region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:paramtype region_endpoint_enabled: bool
"""
super(ReplicationUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.region_endpoint_enabled = region_endpoint_enabled
class Request(msrest.serialization.Model):
"""The request that generated the event.
:ivar id: The ID of the request that initiated the event.
:vartype id: str
:ivar addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:vartype addr: str
:ivar host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:vartype host: str
:ivar method: The request method that generated the event.
:vartype method: str
:ivar useragent: The user agent header of the request.
:vartype useragent: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'addr': {'key': 'addr', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'useragent': {'key': 'useragent', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
addr: Optional[str] = None,
host: Optional[str] = None,
method: Optional[str] = None,
useragent: Optional[str] = None,
**kwargs
):
"""
:keyword id: The ID of the request that initiated the event.
:paramtype id: str
:keyword addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:paramtype addr: str
:keyword host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:paramtype host: str
:keyword method: The request method that generated the event.
:paramtype method: str
:keyword useragent: The user agent header of the request.
:paramtype useragent: str
"""
super(Request, self).__init__(**kwargs)
self.id = id
self.addr = addr
self.host = host
self.method = method
self.useragent = useragent
class RetentionPolicy(msrest.serialization.Model):
"""The retention policy for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar days: The number of days to retain an untagged manifest after which it gets purged.
:vartype days: int
:ivar last_updated_time: The timestamp when the policy was last updated.
:vartype last_updated_time: ~datetime.datetime
:ivar status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
_validation = {
'last_updated_time': {'readonly': True},
}
_attribute_map = {
'days': {'key': 'days', 'type': 'int'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
days: Optional[int] = 7,
status: Optional[Union[str, "PolicyStatus"]] = None,
**kwargs
):
"""
:keyword days: The number of days to retain an untagged manifest after which it gets purged.
:paramtype days: int
:keyword status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
super(RetentionPolicy, self).__init__(**kwargs)
self.days = days
self.last_updated_time = None
self.status = status
class ScopeMap(ProxyResource):
"""An object that represents a scope map for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar description: The user friendly description of the scope map.
:vartype description: str
:ivar type_properties_type: The type of the scope map. E.g. BuildIn scope map.
:vartype type_properties_type: str
:ivar creation_date: The creation date of scope map.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the resource. Possible values include:
"Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar actions: The list of scoped permissions for registry artifacts.
E.g. repositories/repository-name/content/read,
repositories/repository-name/metadata/write.
:vartype actions: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'type_properties_type': {'readonly': True},
'creation_date': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'description': {'key': 'properties.description', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
}
def __init__(
self,
*,
description: Optional[str] = None,
actions: Optional[List[str]] = None,
**kwargs
):
"""
:keyword description: The user friendly description of the scope map.
:paramtype description: str
:keyword actions: The list of scoped permissions for registry artifacts.
E.g. repositories/repository-name/content/read,
repositories/repository-name/metadata/write.
:paramtype actions: list[str]
"""
super(ScopeMap, self).__init__(**kwargs)
self.description = description
self.type_properties_type = None
self.creation_date = None
self.provisioning_state = None
self.actions = actions
class ScopeMapListResult(msrest.serialization.Model):
"""The result of a request to list scope maps for a container registry.
:ivar value: The list of scope maps. Since this list may be incomplete, the nextLink field
should be used to request the next list of scope maps.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ScopeMap]
:ivar next_link: The URI that can be used to request the next list of scope maps.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ScopeMap]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ScopeMap"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of scope maps. Since this list may be incomplete, the nextLink field
should be used to request the next list of scope maps.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.ScopeMap]
:keyword next_link: The URI that can be used to request the next list of scope maps.
:paramtype next_link: str
"""
super(ScopeMapListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class ScopeMapUpdateParameters(msrest.serialization.Model):
"""The properties for updating the scope map.
:ivar description: The user friendly description of the scope map.
:vartype description: str
:ivar actions: The list of scope permissions for registry artifacts.
E.g. repositories/repository-name/pull,
repositories/repository-name/delete.
:vartype actions: list[str]
"""
_attribute_map = {
'description': {'key': 'properties.description', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
}
def __init__(
self,
*,
description: Optional[str] = None,
actions: Optional[List[str]] = None,
**kwargs
):
"""
:keyword description: The user friendly description of the scope map.
:paramtype description: str
:keyword actions: The list of scope permissions for registry artifacts.
E.g. repositories/repository-name/pull,
repositories/repository-name/delete.
:paramtype actions: list[str]
"""
super(ScopeMapUpdateParameters, self).__init__(**kwargs)
self.description = description
self.actions = actions
class Sku(msrest.serialization.Model):
"""The SKU of a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: Required. The SKU name of the container registry. Required for registry creation.
Possible values include: "Classic", "Basic", "Standard", "Premium".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SkuName
:ivar tier: The SKU tier based on the SKU name. Possible values include: "Classic", "Basic",
"Standard", "Premium".
:vartype tier: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SkuTier
"""
_validation = {
'name': {'required': True},
'tier': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
}
def __init__(
self,
*,
name: Union[str, "SkuName"],
**kwargs
):
"""
:keyword name: Required. The SKU name of the container registry. Required for registry
creation. Possible values include: "Classic", "Basic", "Standard", "Premium".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SkuName
"""
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = None
class Source(msrest.serialization.Model):
"""The registry node that generated the event. Put differently, while the actor initiates the event, the source generates it.
:ivar addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:vartype addr: str
:ivar instance_id: The running instance of an application. Changes after each restart.
:vartype instance_id: str
"""
_attribute_map = {
'addr': {'key': 'addr', 'type': 'str'},
'instance_id': {'key': 'instanceID', 'type': 'str'},
}
def __init__(
self,
*,
addr: Optional[str] = None,
instance_id: Optional[str] = None,
**kwargs
):
"""
:keyword addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:paramtype addr: str
:keyword instance_id: The running instance of an application. Changes after each restart.
:paramtype instance_id: str
"""
super(Source, self).__init__(**kwargs)
self.addr = addr
self.instance_id = instance_id
class Status(msrest.serialization.Model):
"""The status of an Azure resource at the time the operation was called.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar display_status: The short label for the status.
:vartype display_status: str
:ivar message: The detailed message for the status, including alerts and error messages.
:vartype message: str
:ivar timestamp: The timestamp when the status was changed to the current value.
:vartype timestamp: ~datetime.datetime
"""
_validation = {
'display_status': {'readonly': True},
'message': {'readonly': True},
'timestamp': {'readonly': True},
}
_attribute_map = {
'display_status': {'key': 'displayStatus', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(Status, self).__init__(**kwargs)
self.display_status = None
self.message = None
self.timestamp = None
class StatusDetailProperties(msrest.serialization.Model):
"""The status detail properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The component of the connected registry corresponding to the status.
:vartype type: str
:ivar code: The code of the status.
:vartype code: str
:ivar description: The description of the status.
:vartype description: str
:ivar timestamp: The timestamp of the status.
:vartype timestamp: ~datetime.datetime
:ivar correlation_id: The correlation ID of the status.
:vartype correlation_id: str
"""
_validation = {
'type': {'readonly': True},
'code': {'readonly': True},
'description': {'readonly': True},
'timestamp': {'readonly': True},
'correlation_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(StatusDetailProperties, self).__init__(**kwargs)
self.type = None
self.code = None
self.description = None
self.timestamp = None
self.correlation_id = None
class SyncProperties(msrest.serialization.Model):
"""The sync properties of the connected registry with its parent.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar token_id: Required. The resource ID of the ACR token used to authenticate the connected
registry to its parent during sync.
:vartype token_id: str
:ivar schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:vartype schedule: str
:ivar sync_window: The time window during which sync is enabled for each schedule occurrence.
Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype sync_window: ~datetime.timedelta
:ivar message_ttl: Required. The period of time for which a message is available to sync before
it is expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype message_ttl: ~datetime.timedelta
:ivar last_sync_time: The last time a sync occurred between the connected registry and its
parent.
:vartype last_sync_time: ~datetime.datetime
:ivar gateway_endpoint: The gateway endpoint used by the connected registry to communicate with
its parent.
:vartype gateway_endpoint: str
"""
_validation = {
'token_id': {'required': True},
'message_ttl': {'required': True},
'last_sync_time': {'readonly': True},
'gateway_endpoint': {'readonly': True},
}
_attribute_map = {
'token_id': {'key': 'tokenId', 'type': 'str'},
'schedule': {'key': 'schedule', 'type': 'str'},
'sync_window': {'key': 'syncWindow', 'type': 'duration'},
'message_ttl': {'key': 'messageTtl', 'type': 'duration'},
'last_sync_time': {'key': 'lastSyncTime', 'type': 'iso-8601'},
'gateway_endpoint': {'key': 'gatewayEndpoint', 'type': 'str'},
}
def __init__(
self,
*,
token_id: str,
message_ttl: datetime.timedelta,
schedule: Optional[str] = None,
sync_window: Optional[datetime.timedelta] = None,
**kwargs
):
"""
:keyword token_id: Required. The resource ID of the ACR token used to authenticate the
connected registry to its parent during sync.
:paramtype token_id: str
:keyword schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:paramtype schedule: str
:keyword sync_window: The time window during which sync is enabled for each schedule
occurrence. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype sync_window: ~datetime.timedelta
:keyword message_ttl: Required. The period of time for which a message is available to sync
before it is expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per
ISO8601.
:paramtype message_ttl: ~datetime.timedelta
"""
super(SyncProperties, self).__init__(**kwargs)
self.token_id = token_id
self.schedule = schedule
self.sync_window = sync_window
self.message_ttl = message_ttl
self.last_sync_time = None
self.gateway_endpoint = None
class SyncUpdateProperties(msrest.serialization.Model):
"""The parameters for updating the sync properties of the connected registry with its parent.
:ivar schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:vartype schedule: str
:ivar sync_window: The time window during which sync is enabled for each schedule occurrence.
Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype sync_window: ~datetime.timedelta
:ivar message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype message_ttl: ~datetime.timedelta
"""
_attribute_map = {
'schedule': {'key': 'schedule', 'type': 'str'},
'sync_window': {'key': 'syncWindow', 'type': 'duration'},
'message_ttl': {'key': 'messageTtl', 'type': 'duration'},
}
def __init__(
self,
*,
schedule: Optional[str] = None,
sync_window: Optional[datetime.timedelta] = None,
message_ttl: Optional[datetime.timedelta] = None,
**kwargs
):
"""
:keyword schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:paramtype schedule: str
:keyword sync_window: The time window during which sync is enabled for each schedule
occurrence. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype sync_window: ~datetime.timedelta
:keyword message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype message_ttl: ~datetime.timedelta
"""
super(SyncUpdateProperties, self).__init__(**kwargs)
self.schedule = schedule
self.sync_window = sync_window
self.message_ttl = message_ttl
class SystemData(msrest.serialization.Model):
"""Metadata pertaining to creation and last modification of the resource.
:ivar created_by: The identity that created the resource.
:vartype created_by: str
:ivar created_by_type: The type of identity that created the resource. Possible values include:
"User", "Application", "ManagedIdentity", "Key".
:vartype created_by_type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.CreatedByType
:ivar created_at: The timestamp of resource creation (UTC).
:vartype created_at: ~datetime.datetime
:ivar last_modified_by: The identity that last modified the resource.
:vartype last_modified_by: str
:ivar last_modified_by_type: The type of identity that last modified the resource. Possible
values include: "User", "Application", "ManagedIdentity", "Key".
:vartype last_modified_by_type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.LastModifiedByType
:ivar last_modified_at: The timestamp of resource modification (UTC).
:vartype last_modified_at: ~datetime.datetime
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "CreatedByType"]] = None,
created_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "LastModifiedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword created_by: The identity that created the resource.
:paramtype created_by: str
:keyword created_by_type: The type of identity that created the resource. Possible values
include: "User", "Application", "ManagedIdentity", "Key".
:paramtype created_by_type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.CreatedByType
:keyword created_at: The timestamp of resource creation (UTC).
:paramtype created_at: ~datetime.datetime
:keyword last_modified_by: The identity that last modified the resource.
:paramtype last_modified_by: str
:keyword last_modified_by_type: The type of identity that last modified the resource. Possible
values include: "User", "Application", "ManagedIdentity", "Key".
:paramtype last_modified_by_type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.LastModifiedByType
:keyword last_modified_at: The timestamp of resource modification (UTC).
:paramtype last_modified_at: ~datetime.datetime
"""
super(SystemData, self).__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
class Target(msrest.serialization.Model):
"""The target of the event.
:ivar media_type: The MIME type of the referenced object.
:vartype media_type: str
:ivar size: The number of bytes of the content. Same as Length field.
:vartype size: long
:ivar digest: The digest of the content, as defined by the Registry V2 HTTP API Specification.
:vartype digest: str
:ivar length: The number of bytes of the content. Same as Size field.
:vartype length: long
:ivar repository: The repository name.
:vartype repository: str
:ivar url: The direct URL to the content.
:vartype url: str
:ivar tag: The tag name.
:vartype tag: str
:ivar name: The name of the artifact.
:vartype name: str
:ivar version: The version of the artifact.
:vartype version: str
"""
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'length': {'key': 'length', 'type': 'long'},
'repository': {'key': 'repository', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
media_type: Optional[str] = None,
size: Optional[int] = None,
digest: Optional[str] = None,
length: Optional[int] = None,
repository: Optional[str] = None,
url: Optional[str] = None,
tag: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword media_type: The MIME type of the referenced object.
:paramtype media_type: str
:keyword size: The number of bytes of the content. Same as Length field.
:paramtype size: long
:keyword digest: The digest of the content, as defined by the Registry V2 HTTP API
Specification.
:paramtype digest: str
:keyword length: The number of bytes of the content. Same as Size field.
:paramtype length: long
:keyword repository: The repository name.
:paramtype repository: str
:keyword url: The direct URL to the content.
:paramtype url: str
:keyword tag: The tag name.
:paramtype tag: str
:keyword name: The name of the artifact.
:paramtype name: str
:keyword version: The version of the artifact.
:paramtype version: str
"""
super(Target, self).__init__(**kwargs)
self.media_type = media_type
self.size = size
self.digest = digest
self.length = length
self.repository = repository
self.url = url
self.tag = tag
self.name = name
self.version = version
class TlsCertificateProperties(msrest.serialization.Model):
"""The TLS certificate properties of the connected registry login server.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of certificate location. Possible values include: "LocalDirectory".
:vartype type: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.CertificateType
:ivar location: Indicates the location of the certificates.
:vartype location: str
"""
_validation = {
'type': {'readonly': True},
'location': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(TlsCertificateProperties, self).__init__(**kwargs)
self.type = None
self.location = None
class TlsProperties(msrest.serialization.Model):
"""The TLS properties of the connected registry login server.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status: Indicates whether HTTPS is enabled for the login server. Possible values include:
"Enabled", "Disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TlsStatus
:ivar certificate: The certificate used to configure HTTPS for the login server.
:vartype certificate:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TlsCertificateProperties
"""
_validation = {
'status': {'readonly': True},
'certificate': {'readonly': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'certificate': {'key': 'certificate', 'type': 'TlsCertificateProperties'},
}
def __init__(
self,
**kwargs
):
"""
"""
super(TlsProperties, self).__init__(**kwargs)
self.status = None
self.certificate = None
class Token(ProxyResource):
"""An object that represents a token for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar creation_date: The creation date of scope map.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the resource. Possible values include:
"Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
:ivar scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:vartype scope_map_id: str
:ivar credentials: The credentials that can be used for authenticating the token.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCredentialsProperties
:ivar status: The status of the token example enabled or disabled. Possible values include:
"enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'creation_date': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'scope_map_id': {'key': 'properties.scopeMapId', 'type': 'str'},
'credentials': {'key': 'properties.credentials', 'type': 'TokenCredentialsProperties'},
'status': {'key': 'properties.status', 'type': 'str'},
}
def __init__(
self,
*,
scope_map_id: Optional[str] = None,
credentials: Optional["TokenCredentialsProperties"] = None,
status: Optional[Union[str, "TokenStatus"]] = None,
**kwargs
):
"""
:keyword scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:paramtype scope_map_id: str
:keyword credentials: The credentials that can be used for authenticating the token.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCredentialsProperties
:keyword status: The status of the token example enabled or disabled. Possible values include:
"enabled", "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenStatus
"""
super(Token, self).__init__(**kwargs)
self.creation_date = None
self.provisioning_state = None
self.scope_map_id = scope_map_id
self.credentials = credentials
self.status = status
class TokenCertificate(msrest.serialization.Model):
"""The properties of a certificate used for authenticating a token.
:ivar name: Possible values include: "certificate1", "certificate2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCertificateName
:ivar expiry: The expiry datetime of the certificate.
:vartype expiry: ~datetime.datetime
:ivar thumbprint: The thumbprint of the certificate.
:vartype thumbprint: str
:ivar encoded_pem_certificate: Base 64 encoded string of the public certificate1 in PEM format
that will be used for authenticating the token.
:vartype encoded_pem_certificate: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'expiry': {'key': 'expiry', 'type': 'iso-8601'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'encoded_pem_certificate': {'key': 'encodedPemCertificate', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[Union[str, "TokenCertificateName"]] = None,
expiry: Optional[datetime.datetime] = None,
thumbprint: Optional[str] = None,
encoded_pem_certificate: Optional[str] = None,
**kwargs
):
"""
:keyword name: Possible values include: "certificate1", "certificate2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCertificateName
:keyword expiry: The expiry datetime of the certificate.
:paramtype expiry: ~datetime.datetime
:keyword thumbprint: The thumbprint of the certificate.
:paramtype thumbprint: str
:keyword encoded_pem_certificate: Base 64 encoded string of the public certificate1 in PEM
format that will be used for authenticating the token.
:paramtype encoded_pem_certificate: str
"""
super(TokenCertificate, self).__init__(**kwargs)
self.name = name
self.expiry = expiry
self.thumbprint = thumbprint
self.encoded_pem_certificate = encoded_pem_certificate
class TokenCredentialsProperties(msrest.serialization.Model):
"""The properties of the credentials that can be used for authenticating the token.
:ivar certificates:
:vartype certificates:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCertificate]
:ivar passwords:
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPassword]
"""
_attribute_map = {
'certificates': {'key': 'certificates', 'type': '[TokenCertificate]'},
'passwords': {'key': 'passwords', 'type': '[TokenPassword]'},
}
def __init__(
self,
*,
certificates: Optional[List["TokenCertificate"]] = None,
passwords: Optional[List["TokenPassword"]] = None,
**kwargs
):
"""
:keyword certificates:
:paramtype certificates:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCertificate]
:keyword passwords:
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPassword]
"""
super(TokenCredentialsProperties, self).__init__(**kwargs)
self.certificates = certificates
self.passwords = passwords
class TokenListResult(msrest.serialization.Model):
"""The result of a request to list tokens for a container registry.
:ivar value: The list of tokens. Since this list may be incomplete, the nextLink field should
be used to request the next list of tokens.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Token]
:ivar next_link: The URI that can be used to request the next list of tokens.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Token]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Token"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of tokens. Since this list may be incomplete, the nextLink field
should be used to request the next list of tokens.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Token]
:keyword next_link: The URI that can be used to request the next list of tokens.
:paramtype next_link: str
"""
super(TokenListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class TokenPassword(msrest.serialization.Model):
"""The password that will be used for authenticating the token of a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar creation_time: The creation datetime of the password.
:vartype creation_time: ~datetime.datetime
:ivar expiry: The expiry datetime of the password.
:vartype expiry: ~datetime.datetime
:ivar name: The password name "password1" or "password2". Possible values include: "password1",
"password2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPasswordName
:ivar value: The password value.
:vartype value: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'expiry': {'key': 'expiry', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
creation_time: Optional[datetime.datetime] = None,
expiry: Optional[datetime.datetime] = None,
name: Optional[Union[str, "TokenPasswordName"]] = None,
**kwargs
):
"""
:keyword creation_time: The creation datetime of the password.
:paramtype creation_time: ~datetime.datetime
:keyword expiry: The expiry datetime of the password.
:paramtype expiry: ~datetime.datetime
:keyword name: The password name "password1" or "password2". Possible values include:
"password1", "password2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenPasswordName
"""
super(TokenPassword, self).__init__(**kwargs)
self.creation_time = creation_time
self.expiry = expiry
self.name = name
self.value = None
class TokenUpdateParameters(msrest.serialization.Model):
"""The parameters for updating a token.
:ivar scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:vartype scope_map_id: str
:ivar status: The status of the token example enabled or disabled. Possible values include:
"enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenStatus
:ivar credentials: The credentials that can be used for authenticating the token.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCredentialsProperties
"""
_attribute_map = {
'scope_map_id': {'key': 'properties.scopeMapId', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'credentials': {'key': 'properties.credentials', 'type': 'TokenCredentialsProperties'},
}
def __init__(
self,
*,
scope_map_id: Optional[str] = None,
status: Optional[Union[str, "TokenStatus"]] = None,
credentials: Optional["TokenCredentialsProperties"] = None,
**kwargs
):
"""
:keyword scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:paramtype scope_map_id: str
:keyword status: The status of the token example enabled or disabled. Possible values include:
"enabled", "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenStatus
:keyword credentials: The credentials that can be used for authenticating the token.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TokenCredentialsProperties
"""
super(TokenUpdateParameters, self).__init__(**kwargs)
self.scope_map_id = scope_map_id
self.status = status
self.credentials = credentials
class TrustPolicy(msrest.serialization.Model):
"""The content trust policy for a container registry.
:ivar type: The type of trust policy. Possible values include: "Notary". Default value:
"Notary".
:vartype type: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.TrustPolicyType
:ivar status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "TrustPolicyType"]] = "Notary",
status: Optional[Union[str, "PolicyStatus"]] = None,
**kwargs
):
"""
:keyword type: The type of trust policy. Possible values include: "Notary". Default value:
"Notary".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.TrustPolicyType
:keyword status: The value that indicates whether the policy is enabled or not. Possible values
include: "enabled", "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.PolicyStatus
"""
super(TrustPolicy, self).__init__(**kwargs)
self.type = type
self.status = status
class UserIdentityProperties(msrest.serialization.Model):
"""UserIdentityProperties.
:ivar principal_id: The principal id of user assigned identity.
:vartype principal_id: str
:ivar client_id: The client id of user assigned identity.
:vartype client_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
*,
principal_id: Optional[str] = None,
client_id: Optional[str] = None,
**kwargs
):
"""
:keyword principal_id: The principal id of user assigned identity.
:paramtype principal_id: str
:keyword client_id: The client id of user assigned identity.
:paramtype client_id: str
"""
super(UserIdentityProperties, self).__init__(**kwargs)
self.principal_id = principal_id
self.client_id = client_id
class VirtualNetworkRule(msrest.serialization.Model):
"""Virtual network rule.
All required parameters must be populated in order to send to Azure.
:ivar action: The action of virtual network rule. Possible values include: "Allow".
:vartype action: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Action
:ivar virtual_network_resource_id: Required. Resource ID of a subnet, for example:
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}.
:vartype virtual_network_resource_id: str
"""
_validation = {
'virtual_network_resource_id': {'required': True},
}
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
'virtual_network_resource_id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
virtual_network_resource_id: str,
action: Optional[Union[str, "Action"]] = None,
**kwargs
):
"""
:keyword action: The action of virtual network rule. Possible values include: "Allow".
:paramtype action: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.Action
:keyword virtual_network_resource_id: Required. Resource ID of a subnet, for example:
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}.
:paramtype virtual_network_resource_id: str
"""
super(VirtualNetworkRule, self).__init__(**kwargs)
self.action = action
self.virtual_network_resource_id = virtual_network_resource_id
class Webhook(Resource):
"""An object that represents a webhook for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: Required. The location of the resource. This cannot be changed after the
resource is created.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_08_01_preview.models.SystemData
:ivar status: The status of the webhook at the time the operation was called. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
:ivar provisioning_state: The provisioning state of the webhook at the time the operation was
called. Possible values include: "Creating", "Updating", "Deleting", "Succeeded", "Failed",
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'status': {'key': 'properties.status', 'type': 'str'},
'scope': {'key': 'properties.scope', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "WebhookAction"]]] = None,
**kwargs
):
"""
:keyword location: Required. The location of the resource. This cannot be changed after the
resource is created.
:paramtype location: str
:keyword tags: A set of tags. The tags of the resource.
:paramtype tags: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Possible
values include: "enabled", "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
"""
super(Webhook, self).__init__(location=location, tags=tags, **kwargs)
self.status = status
self.scope = scope
self.actions = actions
self.provisioning_state = None
class WebhookCreateParameters(msrest.serialization.Model):
"""The parameters for creating a webhook.
All required parameters must be populated in order to send to Azure.
:ivar tags: A set of tags. The tags for the webhook.
:vartype tags: dict[str, str]
:ivar location: Required. The location of the webhook. This cannot be changed after the
resource is created.
:vartype location: str
:ivar service_uri: The service URI for the webhook to post notifications.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
:ivar status: The status of the webhook at the time the operation was called. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
"""
_validation = {
'location': {'required': True},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'service_uri': {'key': 'properties.serviceUri', 'type': 'str'},
'custom_headers': {'key': 'properties.customHeaders', 'type': '{str}'},
'status': {'key': 'properties.status', 'type': 'str'},
'scope': {'key': 'properties.scope', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
service_uri: Optional[str] = None,
custom_headers: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "WebhookAction"]]] = None,
**kwargs
):
"""
:keyword tags: A set of tags. The tags for the webhook.
:paramtype tags: dict[str, str]
:keyword location: Required. The location of the webhook. This cannot be changed after the
resource is created.
:paramtype location: str
:keyword service_uri: The service URI for the webhook to post notifications.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Possible
values include: "enabled", "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
"""
super(WebhookCreateParameters, self).__init__(**kwargs)
self.tags = tags
self.location = location
self.service_uri = service_uri
self.custom_headers = custom_headers
self.status = status
self.scope = scope
self.actions = actions
class WebhookListResult(msrest.serialization.Model):
"""The result of a request to list webhooks for a container registry.
:ivar value: The list of webhooks. Since this list may be incomplete, the nextLink field should
be used to request the next list of webhooks.
:vartype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Webhook]
:ivar next_link: The URI that can be used to request the next list of webhooks.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Webhook]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Webhook"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of webhooks. Since this list may be incomplete, the nextLink field
should be used to request the next list of webhooks.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_08_01_preview.models.Webhook]
:keyword next_link: The URI that can be used to request the next list of webhooks.
:paramtype next_link: str
"""
super(WebhookListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class WebhookUpdateParameters(msrest.serialization.Model):
"""The parameters for updating a webhook.
:ivar tags: A set of tags. The tags for the webhook.
:vartype tags: dict[str, str]
:ivar service_uri: The service URI for the webhook to post notifications.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
:ivar status: The status of the webhook at the time the operation was called. Possible values
include: "enabled", "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'service_uri': {'key': 'properties.serviceUri', 'type': 'str'},
'custom_headers': {'key': 'properties.customHeaders', 'type': '{str}'},
'status': {'key': 'properties.status', 'type': 'str'},
'scope': {'key': 'properties.scope', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
service_uri: Optional[str] = None,
custom_headers: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "WebhookAction"]]] = None,
**kwargs
):
"""
:keyword tags: A set of tags. The tags for the webhook.
:paramtype tags: dict[str, str]
:keyword service_uri: The service URI for the webhook to post notifications.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Possible
values include: "enabled", "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_08_01_preview.models.WebhookAction]
"""
super(WebhookUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.service_uri = service_uri
self.custom_headers = custom_headers
self.status = status
self.scope = scope
self.actions = actions
|
{
"content_hash": "4872c75d37ed2f1ac4f1aad53b16302e",
"timestamp": "",
"source": "github",
"line_count": 5102,
"max_line_length": 152,
"avg_line_length": 41.23206585652685,
"alnum_prop": 0.6474953176844167,
"repo_name": "Azure/azure-sdk-for-python",
"id": "5e107e6cec76ce67a230d6da0ce83ed1e02ef8d1",
"size": "210834",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2021_08_01_preview/models/_models_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
r"""Functions for calculating relativistic quantities (:math:`v \to c`)."""
__all__ = ["Lorentz_factor", "relativistic_energy", "RelativisticBody"]
import astropy.units as u
import numpy as np
from astropy.constants import c
from numbers import Integral, Real
from numpy.typing import DTypeLike
from typing import Dict, Optional, Union
from plasmapy import utils
from plasmapy.particles._factory import _physical_particle_factory
from plasmapy.particles.particle_class import CustomParticle, Particle, ParticleLike
from plasmapy.particles.particle_collections import ParticleList
from plasmapy.utils.decorators import validate_quantities
@validate_quantities(V={"can_be_negative": True})
def Lorentz_factor(V: u.m / u.s):
r"""
Return the Lorentz factor.
Parameters
----------
V : `~astropy.units.Quantity`
The velocity in units convertible to meters per second.
Returns
-------
gamma : `float` or `~numpy.ndarray`
The Lorentz factor associated with the inputted velocities.
Raises
------
`TypeError`
If ``V`` is not a `~astropy.units.Quantity` and cannot be
converted into a `~astropy.units.Quantity`.
`~astropy.units.UnitConversionError`
If the ``V`` is not in appropriate units.
`ValueError`
If the magnitude of ``V`` is faster than the speed of light.
Warns
-----
`~astropy.units.UnitsWarning`
If units are not provided, SI units are assumed.
Notes
-----
The Lorentz factor is a dimensionless number given by
.. math::
γ = \frac{1}{\sqrt{1-\frac{V^2}{c^2}}}
The Lorentz factor is approximately one for sub-relativistic
velocities, and :math:`γ → ∞` as the velocity approaches the
speed of light.
Examples
--------
>>> from astropy import units as u
>>> velocity = 1.4e8 * u.m / u.s
>>> Lorentz_factor(velocity)
1.130885603948959
>>> Lorentz_factor(299792458 * u.m / u.s)
inf
"""
if not np.all((np.abs(V) <= c) | (np.isnan(V))):
raise utils.RelativityError(
"The Lorentz factor cannot be calculated for "
"speeds faster than the speed of light."
)
if V.size > 1:
γ = np.zeros_like(V.value)
equals_c = np.abs(V) == c
is_slow = ~equals_c
γ[is_slow] = ((1 - (V[is_slow] / c) ** 2) ** -0.5).value
γ[equals_c] = np.inf
else:
γ = np.inf if np.abs(V) == c else ((1 - (V / c) ** 2) ** -0.5).value
return γ
@validate_quantities(
m={"can_be_negative": False}, validations_on_return={"can_be_negative": False}
)
def relativistic_energy(m: u.kg, v: u.m / u.s) -> u.Joule:
"""
Calculate the relativistic energy (in joules) of an object of mass
``m`` and velocity ``v``.
.. math::
E = γ m c^2
where :math:`γ` is the `Lorentz_factor`.
Parameters
----------
m : `~astropy.units.Quantity`
The mass in units convertible to kilograms.
v : `~astropy.units.Quantity`
The velocity in units convertible to meters per second.
Returns
-------
`~astropy.units.Quantity`
The relativistic energy (in joules) of an object of mass ``m``
moving at velocity ``v``.
Raises
------
`TypeError`
If input arguments are not instances `~astropy.units.Quantity` or
convertible to a `~astropy.units.Quantity`.
`~astropy.units.UnitConversionError`
If the ``v`` is not in appropriate units.
`ValueError`
If the magnitude of ``m`` is negative or arguments are complex.
:exc:`~plasmapy.utils.exceptions.RelativityError`
If the velocity ``v`` is greater than the speed of light.
Warns
-----
: `~astropy.units.UnitsWarning`
If units are not provided, SI units are assumed.
Examples
--------
>>> from astropy import units as u
>>> velocity = 1.4e8 * u.m / u.s
>>> mass = 1 * u.kg
>>> relativistic_energy(mass, velocity)
<Quantity 1.01638929e+17 J>
>>> relativistic_energy(mass, 299792458*u.m / u.s)
<Quantity inf J>
>>> relativistic_energy(1 * u.mg, 1.4e8 * u.m / u.s)
<Quantity 1.01638929e+11 J>
>>> relativistic_energy(-mass, velocity)
Traceback (most recent call last):
...
ValueError: The argument 'm' to function relativistic_energy() can not contain negative numbers.
"""
γ = Lorentz_factor(v)
return γ * m * c**2
class RelativisticBody:
r"""
A physical body that is moving at a velocity relative to the speed
of light.
Parameters
----------
particle : |ParticleLike|, |CustomParticle|, |ParticleList|, or |Quantity|
A representation of a particle from which to get the mass
of the relativistic body. If it is a |Quantity|, then it must
have units of mass and describe the body's rest mass.
V : |Quantity|, optional
The velocity of the relativistic body in units convertible to
m/s. The absolute magnitude of ``V`` cannot be greater than
:math:`c`\ .
momentum : |Quantity|, optional
The momentum of the relativistic body in units convertible to
kg·m/s.
total_energy : |Quantity|, optional, |keyword-only|
The sum of the mass energy and the kinetic energy in units
convertible to joules. Must be non-negative.
kinetic_energy : |Quantity|, optional, |keyword-only|
The kinetic energy of the relativistic body in units convertible
to joules. Must be non-negative.
v_over_c : real number or |Quantity|, optional, |keyword-only|
The ratio of the velocity to the speed of light. Must have an
absolute magnitude :math:`≤ 1`\ .
lorentz_factor : real number or |Quantity|, optional, |keyword-only|
The Lorentz factor of the relativistic body. Must be
:math:`≥ 1`\ .
Z : integer, optional, |keyword-only|
The charge number associated with ``particle``.
mass_numb : integer, optional, |keyword-only|
The mass number associated with ``particle``.
dtype : |DTypeLike|, optional, |keyword-only|, default: `numpy.longdouble`
The `numpy` data type to use to store the inputs.
Notes
-----
At most one of ``V``, ``momentum``, ``total_energy``,
``kinetic_energy``, ``v_over_c``, and ``lorentz_factor`` must be
provided.
.. caution::
For ultra-high-energy cosmic rays (UHECRs), the velocity may be
within roundoff error of :math:`c` and :math:`\frac{V}{c}` may be
within roundoff error of 1.
Examples
--------
>>> import astropy.units as u
>>> relativistic_proton = RelativisticBody("p+", total_energy = 1 * u.GeV)
>>> relativistic_proton.particle
Particle("p+")
>>> relativistic_proton.velocity
<Quantity 1.03697...e+08 m / s>
>>> relativistic_proton.v_over_c
0.3458980898746...
>>> relativistic_proton.lorentz_factor
1.0657889247888...
>>> relativistic_proton.mass_energy.to("GeV")
<Quantity 0.93827... GeV>
>>> relativistic_proton.total_energy.to("GeV")
<Quantity 1. GeV>
>>> relativistic_proton.mass
<Quantity 1.67262...e-27 kg>
|RelativisticBody| also works with multiple particles and/or
velocities.
>>> particles = ["p+", "e-"]
>>> velocities = [2e5, 2e8] * u.m / u.s
>>> relativistic_particles = RelativisticBody(particles, velocities)
>>> relativistic_particles.momentum
<Quantity [3.345244...e-22, 2.445659...e-22] kg m / s>
"""
@staticmethod
def _get_speed_like_input(
velocity_like_arguments: Dict[str, Union[u.Quantity, Real]]
):
not_none_arguments = {
key: value
for key, value in velocity_like_arguments.items()
if value is not None
}
if len(not_none_arguments) != 1:
raise ValueError(
"RelativisticBody can accept no more than one of the following "
"arguments: V, v_over_c, momentum, total_energy, kinetic_energy, "
"and lorentz_factor."
)
return not_none_arguments or {"velocity": np.nan * u.m / u.s}
def _store_velocity_like_argument(
self, speed_like_input: Dict[str, Union[u.Quantity, Real]]
):
"""
Take the velocity-like argument and store it via the setter for
the corresponding attribute.
"""
name = list(speed_like_input.keys())[0]
value = speed_like_input[name]
if self._dtype:
value = u.Quantity(value, dtype=self._dtype)
setattr(self, name, value)
@validate_quantities(
V={"can_be_inf": False, "none_shall_pass": True},
momentum={"can_be_inf": False, "none_shall_pass": True},
total_energy={"can_be_negative": False, "none_shall_pass": True},
kinetic_energy={"can_be_negative": False, "none_shall_pass": True},
)
def __init__(
self,
particle: Union[ParticleLike, u.Quantity],
V: u.m / u.s = None,
momentum: u.kg * u.m / u.s = None,
*,
total_energy: u.J = None,
kinetic_energy: u.J = None,
v_over_c: Optional[Real] = None,
lorentz_factor: Optional[Real] = None,
Z: Optional[Integral] = None,
mass_numb: Optional[Integral] = None,
dtype: Optional[DTypeLike] = np.longdouble,
):
self._particle = _physical_particle_factory(particle, Z=Z, mass_numb=mass_numb)
self._dtype = dtype
velocity_like_inputs = {
"velocity": V,
"momentum": momentum,
"total_energy": total_energy,
"kinetic_energy": kinetic_energy,
"v_over_c": v_over_c,
"lorentz_factor": lorentz_factor,
}
speed_like_input = self._get_speed_like_input(velocity_like_inputs)
self._store_velocity_like_argument(speed_like_input)
def __repr__(self):
return f"RelativisticBody({self.particle}, {self.velocity})"
@property
def particle(self) -> Union[CustomParticle, Particle, ParticleList]:
"""
Representation of the particle(s).
Returns
-------
|Particle|, |CustomParticle|, or |ParticleList|
"""
return self._particle
@property
@validate_quantities
def mass(self) -> u.kg:
r"""
The rest mass of the body, :math:`m_0`\ .
Returns
-------
~astropy.units.Quantity
"""
return u.Quantity(self.particle.mass, dtype=self._dtype)
@property
@validate_quantities
def mass_energy(self) -> u.J:
r"""
The rest mass energy of the body, :math:`m_0 c^2`\ .
Returns
-------
~astropy.units.Quantity
"""
return self.mass * c**2
@property
@validate_quantities
def total_energy(self) -> u.J:
r"""
The sum of the rest mass energy and the kinetic energy of the
body,
.. math::
E_\mathrm{tot} ≡ γ m_0 c^2.
Returns
-------
~astropy.units.Quantity
"""
return np.sqrt(self.momentum**2 * c**2 + self.mass_energy**2)
@property
@validate_quantities
def kinetic_energy(self) -> u.J:
"""
The kinetic energy of the body,
.. math::
E_K ≡ m_0 c^2 (γ-1).
Returns
-------
~astropy.units.Quantity
"""
return self.total_energy - self.mass_energy
@property
@validate_quantities
def v_over_c(self) -> Real:
r"""
The velocity of the body divided by the velocity of light:
:math:`\frac{V}{c}`\ .
Returns
-------
float
"""
return (self.velocity / c).to(u.dimensionless_unscaled).value
@property
@validate_quantities
def velocity(self) -> u.m / u.s:
r"""
The velocity of the body, :math:`V`\ .
Returns
-------
~astropy.units.Quantity
"""
velocity = self.momentum / np.sqrt(self.mass**2 + self.momentum**2 / c**2)
return velocity.to(u.m / u.s)
@property
@validate_quantities
def lorentz_factor(self) -> Real:
r"""
The Lorentz factor of the body,
.. math::
γ ≡ \frac{1}{\sqrt{1 - \frac{V^2}{c^2}}}.
Returns
-------
float
"""
return Lorentz_factor(self.velocity)
@property
@validate_quantities
def momentum(self) -> u.kg * u.m / u.s:
r"""
The magnitude of the momentum of the body,
.. math::
p ≡ γ m_0 V.
Returns
-------
~astropy.units.Quantity
"""
return getattr(self, "_momentum")
@kinetic_energy.setter
@validate_quantities(E_K={"can_be_negative": False})
def kinetic_energy(self, E_K: u.J):
self.total_energy = E_K + self.mass_energy
@total_energy.setter
@validate_quantities(E_tot={"can_be_negative": False})
def total_energy(self, E_tot: u.J):
self._momentum = np.sqrt(E_tot**2 - self.mass_energy**2) / c
@v_over_c.setter
def v_over_c(self, v_over_c_: Real):
self.velocity = v_over_c_ * c
@velocity.setter
@validate_quantities
def velocity(self, V: u.m / u.s):
self._momentum = (Lorentz_factor(V) * self.mass * V).to(u.kg * u.m / u.s)
@lorentz_factor.setter
def lorentz_factor(self, γ: Union[Real, u.Quantity]):
if not isinstance(γ, (Real, u.Quantity)):
raise TypeError("Invalid type for Lorentz factor")
if isinstance(γ, u.Quantity):
try:
γ = γ.to(u.dimensionless_unscaled).value
except u.UnitConversionError as exc:
raise u.UnitConversionError(
"The Lorentz factor must be dimensionless."
) from exc
if γ < 1:
raise ValueError("The Lorentz factor must be ≥ 1")
self.velocity = c * np.sqrt(1 - γ**-2)
@momentum.setter
@validate_quantities
def momentum(self, p: u.kg * u.m / u.s):
self._momentum = p.to(u.kg * u.m / u.s)
def __eq__(self, other) -> bool:
_attributes_to_compare = (
"particle",
"kinetic_energy",
"mass_energy",
"total_energy",
"v_over_c",
"momentum",
)
for attr in _attributes_to_compare:
if not hasattr(other, attr):
return False
self_value = getattr(self, attr)
other_value = getattr(other, attr)
if self_value != other_value:
return False
return True
|
{
"content_hash": "b360545ca1a7d2c44e2ceddd12138a41",
"timestamp": "",
"source": "github",
"line_count": 506,
"max_line_length": 100,
"avg_line_length": 29.09881422924901,
"alnum_prop": 0.5785112741102961,
"repo_name": "StanczakDominik/PlasmaPy",
"id": "b0a488c45e05921f69a6261782a2d314e9e89b99",
"size": "14765",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "plasmapy/formulary/relativity.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1285"
},
{
"name": "Python",
"bytes": "2148684"
}
],
"symlink_target": ""
}
|
"""
Generic test utilities.
Based on scipy._libs._testutils
"""
from __future__ import division, print_function, absolute_import
import os
import sys
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name):
self.module_name = module_name
def __call__(
self,
label="fast",
verbose=1,
extra_argv=None,
doctests=False,
coverage=False,
tests=None,
parallel=None,
):
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist():
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
|
{
"content_hash": "c528c5f37ed115f8025220aa351e3e62",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 76,
"avg_line_length": 23.247191011235955,
"alnum_prop": 0.5311744804253262,
"repo_name": "kwgoodman/bottleneck",
"id": "b83103df58ee0aec08f01767b805c74d0f7a7853",
"size": "2069",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bottleneck/_pytesttester.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2510"
},
{
"name": "C",
"bytes": "207203"
},
{
"name": "Dockerfile",
"bytes": "683"
},
{
"name": "Makefile",
"bytes": "2126"
},
{
"name": "Python",
"bytes": "203727"
},
{
"name": "Shell",
"bytes": "3736"
}
],
"symlink_target": ""
}
|
import numpy as np
import itertools as it
import pylab, sys
intervs=15
comps1=[(1.0*b/intervs, 1.0*c/intervs, 1.0*(intervs-a-b-c)/intervs, 1.0*a/intervs) for a in np.arange(0,intervs+1)[::-1] for b in np.arange(0,intervs+1-a) for c in np.arange(0, intervs+1-a-b)][::-1]
comps1=np.array(comps1)
ratio_target=2.
ratio_tol=0.005
n=len(comps1[0])
inds=range(n)
inds_comb=[list(s) for i in range(1, n) for s in it.combinations(inds, i)]
numer_calc=lambda c, inds: c[inds].sum()
denom_calc=lambda c, inds: c.sum()-c[inds].sum()
within_tol_calc=lambda c, inds:np.abs(numer_calc(c, inds)/denom_calc(c, inds)-ratio_target)<=ratio_tol
comps=np.array([c for c in comps1 if True in [within_tol_calc(c, inds) for inds in inds_comb]])
print len(comps), len(comps1)
sys.path.append(r'D:\Google Drive\Documents\PythonCode\JCAP\PythonCompositionPlots')
from myquaternaryutility import QuaternaryPlot
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from quaternary_faces_shells import ternaryfaces_shells
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
try:
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar
except ImportError:
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
class plotwidget(FigureCanvas):
def __init__(self, parent, width=12, height=6, dpi=72, projection3d=False):
#plotdata can be 2d array for image plot or list of 2 1d arrays for x-y plot or 2d array for image plot or list of lists of 2 1D arrays
self.projection3d=projection3d
self.fig=Figure(figsize=(width, height), dpi=dpi)
if projection3d:
self.axes=self.fig.add_subplot(111, navigate=True, projection='3d')
else:
self.axes=self.fig.add_subplot(111, navigate=True)
self.axes.hold(True)
FigureCanvas.__init__(self, self.fig)
self.setParent(parent)
#self.parent=parent
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
#NavigationToolbar(self, parent)
NavigationToolbar(self, self)
self.mpl_connect('button_press_event', self.myclick)
self.clicklist=[]
self.cbax=None
def myclick(self, event):
if not (event.xdata is None or event.ydata is None):
arrayxy=[event.xdata, event.ydata]
print 'clicked on image: array indeces ', arrayxy, ' using button', event.button
self.clicklist+=[arrayxy]
self.emit(SIGNAL("genericclickonplot"), [event.xdata, event.ydata, event.button, event.inaxes])
class dialog(QDialog):
def __init__(self, parent=None, title='', folderpath=None):
super(dialog, self).__init__(parent)
plotw=plotwidget(self)
ax=plotw.axes
stpquat=QuaternaryPlot(ax)
ax.cla()
cols=stpquat.rgb_comp(comps)
#stpquat.scatter(comps, c=cols, s=100, edgecolors='none')
#stpquat.label()
self.tf=ternaryfaces_shells(ax, nintervals=intervs)
self.tf.label()
self.tf.scatter(comps, cols, skipinds=[0, 1, 2, 3], s='patch')
#only select comps
plotw2=plotwidget(self, projection3d=True)
ax=plotw2.axes
#unary
stpquat=QuaternaryPlot(ax)
stpquat.scatter(comps, c=cols, s=100, edgecolors='none')
stpquat.label()
QObject.connect(plotw, SIGNAL("genericclickonplot"), self.plotclick)
QObject.connect(plotw2, SIGNAL("genericclickonplot"), self.plotclick)
mainlayout=QGridLayout()
mainlayout.addWidget(plotw, 0, 0)
mainlayout.addWidget(plotw2, 1, 0)
self.setLayout(mainlayout)
def plotclick(self, coords_button_ax):
xc, yc, button, ax=coords_button_ax
print self.tf.toComp(xc, yc)
class MainMenu(QMainWindow):
def __init__(self):
super(MainMenu, self).__init__(None)
x=dialog()
x.exec_()
mainapp=QApplication(sys.argv)
form=MainMenu()
form.show()
form.setFocus()
mainapp.exec_()
|
{
"content_hash": "7cd51d70beebb6886cdb80779f7fb619",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 198,
"avg_line_length": 32.738461538461536,
"alnum_prop": 0.6567199248120301,
"repo_name": "johnmgregoire/JCAPGeneratePrintCode",
"id": "52988e009f86a5a0e3aa0aa0be47a81d84054eaf",
"size": "4256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "identify_spinel_comps.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "229136"
}
],
"symlink_target": ""
}
|
import logging
import os
import shutil
import time
from bzt.modules.gatling import GatlingExecutor, DataLogReader
from bzt.six import u
from bzt.utils import EXE_SUFFIX, get_full_path
from tests import BZTestCase, __dir__
from tests.mocks import EngineEmul
from bzt.modules.provisioning import Local
from bzt import ToolError, TaurusConfigError
class TestGatlingExecutor(BZTestCase):
def getGatling(self):
path = os.path.abspath(__dir__() + "/../gatling/gatling" + EXE_SUFFIX)
obj = GatlingExecutor()
obj.engine = EngineEmul()
obj.settings.merge({"path": path})
return obj
def test_external_jar_wrong_launcher(self):
obj = self.getGatling()
modified_launcher = obj.engine.create_artifact('wrong-gatling', EXE_SUFFIX)
origin_launcher = get_full_path(obj.settings['path'])
with open(origin_launcher) as orig_file:
with open(modified_launcher, 'w') as mod_file:
for line in orig_file.readlines():
if 'COMPILATION_CLASSPATH' not in line:
mod_file.writelines([line])
os.chmod(modified_launcher, 0o755)
obj.settings.merge({"path": modified_launcher})
obj.execution.merge({
'files': [
'tests/grinder/fake_grinder.jar',
'tests/selenium/junit/jar'],
'scenario': 'tests/gatling/bs'})
self.assertRaises(ToolError, obj.prepare)
def test_external_jar_right_launcher(self):
obj = self.getGatling()
obj.execution.merge({
'files': [
'tests/grinder/fake_grinder.jar',
'tests/selenium/junit/jar'],
'scenario': {
"script": __dir__() + "/../gatling/BasicSimulation.scala",
"simulation": "mytest.BasicSimulation"}})
obj.prepare()
obj.startup()
obj.shutdown()
jar_files = obj.jar_list
modified_launcher = obj.launcher
with open(modified_launcher) as modified:
modified_lines = modified.readlines()
self.assertIn('fake_grinder.jar', jar_files)
self.assertIn('another_dummy.jar', jar_files)
for line in modified_lines:
self.assertFalse(line.startswith('set COMPILATION_CLASSPATH=""'))
self.assertTrue(not line.startswith('COMPILATION_CLASSPATH=') or
line.endswith('":${COMPILATION_CLASSPATH}"\n'))
with open(obj.stdout_file.name) as stdout:
out_lines = stdout.readlines()
out_lines = [out_line.rstrip() for out_line in out_lines]
self.assertEqual(out_lines[-4], get_full_path(obj.settings['path'], step_up=2)) # $GATLING_HOME
self.assertIn('fake_grinder.jar', out_lines[-3]) # $COMPILATION_CLASSPATH
self.assertIn('another_dummy.jar', out_lines[-3]) # $COMPILATION_CLASSPATH
self.assertEqual(out_lines[-2], 'TRUE') # $NO_PAUSE
def test_install_Gatling(self):
path = os.path.abspath(__dir__() + "/../../build/tmp/gatling-taurus/bin/gatling" + EXE_SUFFIX)
shutil.rmtree(os.path.dirname(os.path.dirname(path)), ignore_errors=True)
download_link = "file:///" + __dir__() + "/../data/gatling-dist-{version}_{version}.zip"
gatling_version = '2.1.4'
self.assertFalse(os.path.exists(path))
obj = self.getGatling()
obj.settings.merge({
"path": path,
"download-link": download_link,
"version": gatling_version,
})
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/BasicSimulation.scala",
"simulation": "mytest.BasicSimulation"}})
obj.prepare()
self.assertTrue(os.path.exists(path))
def test_gatling_widget(self):
obj = self.getGatling()
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/BasicSimulation.scala"}})
obj.prepare()
obj.get_widget()
self.assertEqual(obj.widget.widgets[0].text, "Gatling: BasicSimulation.scala")
def test_resource_files_collection_remote2(self): # script = <dir>
obj = self.getGatling()
script_path = __dir__() + "/../gatling/bs"
obj.execution.merge({"scenario": {"script": script_path}})
res_files = obj.resource_files()
self.assertEqual(res_files, [script_path])
def test_resource_files_collection_local(self):
obj = self.getGatling()
script = "LocalBasicSimulation.scala"
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/" + script}})
obj.prepare()
artifacts = os.listdir(obj.engine.artifacts_dir)
self.assertNotIn(script, artifacts)
def test_env_type(self):
obj = self.getGatling()
script = "LocalBasicSimulation.scala"
obj.execution.merge({
"concurrency": 2,
"hold-for": 1000,
"throughput": 100,
"scenario": {"script": __dir__() + "/../gatling/" + script}})
obj.prepare()
obj.engine.artifacts_dir = u(obj.engine.artifacts_dir)
obj.startup()
obj.shutdown()
with open(obj.stdout_file.name) as fds:
lines = fds.readlines()
self.assertIn('throughput', lines[-1])
def test_warning_for_throughput_without_duration(self):
obj = self.getGatling()
script = "LocalBasicSimulation.scala"
obj.execution.merge({
"concurrency": 2,
"throughput": 100,
"scenario": {"script": __dir__() + "/../gatling/" + script}})
obj.prepare()
obj.engine.artifacts_dir = u(obj.engine.artifacts_dir)
obj.startup()
obj.shutdown()
with open(obj.stdout_file.name) as fds:
lines = fds.readlines()
self.assertNotIn('throughput', lines[-1])
def test_arequests_1(self):
obj = self.getGatling()
obj.execution.merge({
"concurrency": 10,
"iterations": 5,
"scenario": {
"think-time": 1,
"follow-redirects": False,
"default-address": "blazedemo.com",
"headers": {"H1": "V1"},
"requests": [{"url": "/reserve.php",
"headers": {"H2": "V2"},
"method": "POST",
"body": "Body Content",
"assert": [{
"contains": ["bootstrap.min"],
"not": True
}]},
{"url": "/",
"think-time": 2,
"follow-redirects": True}]
}
})
obj.prepare()
scala_file = obj.engine.artifacts_dir + '/' + obj.get_scenario().get('simulation') + '.scala'
self.assertEqualFiles(__dir__() + "/../gatling/generated1.scala", scala_file)
def test_requests_def_addr_is_none(self):
obj = self.getGatling()
obj.execution.merge({
"concurrency": 10,
"hold-for": 110,
"throughput": 33,
"ramp-up": 30,
"scenario": {
'default-address': None,
'keepalive': False,
'timeout': '100ms',
'requests': ['http://blazedemo.com', 'google.com']
}
})
obj.prepare()
def test_requests_def_addr_is_empty(self):
obj = self.getGatling()
obj.execution.merge({
"concurrency": 10,
"hold-for": 110,
"throughput": 33,
"ramp-up": 30,
"scenario": {
'default-address': '',
'keepalive': False,
'timeout': '100ms',
'requests': ['http://blazedemo.com', 'google.com']
}
})
obj.prepare()
def test_requests_3(self):
obj = self.getGatling()
obj.execution.merge({
"iterations": 55,
"scenario": {
"requests": [{'url': 'http://site.com/reserve.php',
'assert': [{
'contains': [200],
'subject': 'http-code',
'not': False
}]}]
}
})
obj.prepare()
scala_file = obj.engine.artifacts_dir + '/' + obj.get_scenario().get('simulation') + '.scala'
self.assertEqualFiles(__dir__() + "/../gatling/generated3.scala", scala_file)
def test_requests_4(self):
obj = self.getGatling()
obj.execution.merge({
"iterations": 55,
"scenario": {
"default-address": "",
"requests": [{'url': 'site.com/reserve.php',
'assert': [{
'subject': 'body',
'contains': 'boot(.*)strap.min',
'regexp': True,
'not': False
}]}]
}
})
obj.prepare()
scala_file = obj.engine.artifacts_dir + '/' + obj.get_scenario().get('simulation') + '.scala'
self.assertEqualFiles(__dir__() + "/../gatling/generated4.scala", scala_file)
def test_requests_5(self):
obj = self.getGatling()
obj.execution.merge({
"iterations": 55,
"scenario": {
"default-address": "blazedemo.com",
"requests": [{'url': '/reserve.php',
'assert': [{
'subject': 'body',
'regexp': True,
'not': False
}]}]
}
})
self.assertRaises(TaurusConfigError, obj.prepare)
def assertEqualFiles(self, name1, name2):
def without_id(lines):
id_mark = 'TaurusSimulation'
id_pos = lines.find(id_mark)
space_pos = lines.find(' ', id_pos)
return lines[:id_pos + len(id_mark)] + lines[space_pos:]
with open(name1, 'rt') as file1:
with open(name2, 'rt') as file2:
lines1 = without_id(file1.read())
lines2 = without_id(file2.read())
self.assertEqual(lines1, lines2)
def test_fail_on_zero_results(self):
obj = self.getGatling()
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/bs/BasicSimulation.scala"}})
obj.prepare()
obj.engine.prepared = [obj]
obj.engine.started = [obj]
prov = Local()
prov.engine = obj.engine
prov.executors = [obj]
obj.engine.provisioning = prov
obj.reader.buffer = ['some info']
obj.engine.provisioning.post_process()
def test_no_simulation(self):
obj = self.getGatling()
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/bs/BasicSimulation.scala"}})
obj.prepare()
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
def test_full_Gatling(self):
obj = self.getGatling()
obj.execution.merge({
"scenario": {
"script": __dir__() + "/../gatling/bs/BasicSimulation.scala",
"simulation": "fake"
}
})
obj.prepare()
obj.settings.merge({"path": __dir__() + "/../gatling/gatling" + EXE_SUFFIX})
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
def test_interactive_request(self):
obj = self.getGatling()
obj.engine.existing_artifact(__dir__() + "/../gatling/SimpleSimulation.scala")
obj.execution.merge({
"scenario": {
"script": obj.engine.artifacts_dir + "/SimpleSimulation.scala",
"simulation": "SimpleSimulation"}})
obj.prepare()
obj.settings.merge({"path": __dir__() + "/../gatling/gatling" + EXE_SUFFIX})
counter1 = 0
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
counter1 += 1
obj.shutdown()
obj.post_process()
obj = self.getGatling()
obj.engine.existing_artifact(__dir__() + "/../gatling/SimpleSimulation.scala")
obj.engine.existing_artifact(__dir__() + "/../gatling/generated1.scala")
obj.execution.merge({
"scenario": {
"script": obj.engine.artifacts_dir + "/SimpleSimulation.scala",
"simulation": "fake"}})
obj.prepare()
obj.settings.merge({"path": __dir__() + "/../gatling/gatling" + EXE_SUFFIX})
counter2 = 0
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
counter2 += 1
if counter2 > counter1 * 5:
self.fail('It seems gatling made interactive request')
obj.shutdown()
obj.post_process()
except TaurusConfigError:
return
self.fail('ValueError not found')
def test_script_jar(self):
obj = self.getGatling()
obj.execution.merge({"scenario": {"script": __dir__() + "/../gatling/simulations.jar",
"simulation": "tests.gatling.BasicSimulation"}})
obj.prepare()
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
self.assertIn('simulations.jar', obj.jar_list)
def test_files_find_file(self):
curdir = get_full_path(os.curdir)
try:
os.chdir(__dir__() + "/../")
obj = self.getGatling()
obj.engine.file_search_paths.append(__dir__() + "/../gatling/")
obj.engine.config.merge({
"execution":{
"scenario": {
"script": "simulations.jar",
"simulation": "tests.gatling.BasicSimulation"
},
"files": ["deps.jar"]
}
})
obj.execution.merge(obj.engine.config["execution"])
obj.prepare()
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
self.assertIn('simulations.jar', obj.jar_list)
self.assertIn('deps.jar', obj.jar_list)
finally:
os.chdir(curdir)
class TestDataLogReader(BZTestCase):
def test_read(self):
log_path = os.path.join(os.path.dirname(__file__), '..', 'gatling')
obj = DataLogReader(log_path, logging.getLogger(''), 'gatling-0')
list_of_values = list(obj.datapoints(True))
self.assertEqual(len(list_of_values), 23)
self.assertEqual(obj.guessed_gatling_version, "2.1")
def test_read_220_format(self):
log_path = os.path.join(os.path.dirname(__file__), '..', 'gatling')
obj = DataLogReader(log_path, logging.getLogger(''), 'gatling-220')
list_of_values = list(obj.datapoints(True))
self.assertEqual(len(list_of_values), 4)
self.assertEqual(obj.guessed_gatling_version, "2.2")
|
{
"content_hash": "b8dbb58b004f49c24699deed0ebb222e",
"timestamp": "",
"source": "github",
"line_count": 416,
"max_line_length": 105,
"avg_line_length": 37.87019230769231,
"alnum_prop": 0.5103465786466929,
"repo_name": "itaymendel/taurus",
"id": "b164cfc17db0f11b14d0cda4349b0daf03caab68",
"size": "15754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/modules/test_Gatling.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3890"
},
{
"name": "HTML",
"bytes": "2128"
},
{
"name": "Java",
"bytes": "32197"
},
{
"name": "PHP",
"bytes": "2009"
},
{
"name": "Python",
"bytes": "607553"
},
{
"name": "Scala",
"bytes": "3501"
},
{
"name": "Shell",
"bytes": "1637"
},
{
"name": "Smarty",
"bytes": "7385"
}
],
"symlink_target": ""
}
|
from testinfra.backend import base
class PodmanBackend(base.BaseBackend):
NAME = "podman"
def __init__(self, name, *args, **kwargs):
self.name, self.user = self.parse_containerspec(name)
super().__init__(self.name, *args, **kwargs)
def run(self, command, *args, **kwargs):
cmd = self.get_command(command, *args)
if self.user is not None:
out = self.run_local(
"podman exec -u %s %s /bin/sh -c %s",
self.user, self.name, cmd)
else:
out = self.run_local(
"podman exec %s /bin/sh -c %s", self.name, cmd)
out.command = self.encode(cmd)
return out
|
{
"content_hash": "714212e046b8eb917cf183231b550422",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 63,
"avg_line_length": 32.714285714285715,
"alnum_prop": 0.5458515283842795,
"repo_name": "philpep/testinfra",
"id": "ed3723aea5ae79d544b209587499f16516f5d4f0",
"size": "1232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testinfra/backend/podman.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "8145"
},
{
"name": "Makefile",
"bytes": "54"
},
{
"name": "Python",
"bytes": "206693"
}
],
"symlink_target": ""
}
|
from setuptools import setup
setup(
name='home',
packages=['home'],
include_package_data=True,
install_requires=[
'flask', 'flask-bootstrap','flask-nav'
,'RPi.GPIO','py-irsend', 'gpiozero'
],
setup_requires=[
'pytest-runner',
],
tests_require=[
'pytest',
],
)
|
{
"content_hash": "caefbe7a532d1abe597c2708d5739055",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 46,
"avg_line_length": 19.352941176470587,
"alnum_prop": 0.547112462006079,
"repo_name": "Ameeth/raspberry-pi-pocs",
"id": "755cd4556d7125c0f3815dc7cd3cccd88dfdd3d8",
"size": "329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "home/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "12111"
},
{
"name": "Python",
"bytes": "11758"
},
{
"name": "Shell",
"bytes": "193"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ion_channel', '0003_auto_20150605_0908'),
]
operations = [
migrations.AlterField(
model_name='graph',
name='experiment',
field=models.ForeignKey(blank=True, to='ion_channel.Experiment', null=True),
),
migrations.AlterField(
model_name='graph',
name='patch_clamp',
field=models.ForeignKey(blank=True, to='ion_channel.PatchClamp', null=True),
),
]
|
{
"content_hash": "a81cd0d1297a993825b4508bdb16c754",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 88,
"avg_line_length": 26.869565217391305,
"alnum_prop": 0.5922330097087378,
"repo_name": "joebowen/ChannelWorm",
"id": "55191cb7238664c7ac1db29d637aa4ad7018c772",
"size": "642",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "channelworm/ion_channel/migrations/0004_auto_20150611_1110.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11459"
},
{
"name": "HTML",
"bytes": "114574"
},
{
"name": "JavaScript",
"bytes": "588962"
},
{
"name": "PLpgSQL",
"bytes": "2505"
},
{
"name": "Python",
"bytes": "66343"
},
{
"name": "Shell",
"bytes": "2515"
}
],
"symlink_target": ""
}
|
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path[0:0] = [os.path.abspath('..')]
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'moth'
copyright = u'2014, Charles Thomas'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = open(os.path.join(sys.path[0], 'VERSION')).read().strip()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'mothdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'moth.tex', u'moth Documentation',
u'Charles Thomas', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'moth', u'moth Documentation',
[u'Charles Thomas'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'moth', u'moth Documentation',
u'Charles Thomas', 'moth', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
{
"content_hash": "9b62b4ccbd9bd42d922ea6a0e34f5278",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 80,
"avg_line_length": 32.170212765957444,
"alnum_prop": 0.7013227513227513,
"repo_name": "charlesthomas/moth",
"id": "809dfdfb7b388294381190e415ce35a48b8fb2e2",
"size": "7975",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32745"
}
],
"symlink_target": ""
}
|
from werkzeug.contrib.fixers import ProxyFix
import manage
__author__ = 'mkr'
myhoard = manage.create_app()
myhoard.wsgi_app = ProxyFix(myhoard.wsgi_app)
if __name__ == '__main__':
myhoard.run()
|
{
"content_hash": "97fa4fdf059e81cd69d7d8d996b11b14",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 45,
"avg_line_length": 18.363636363636363,
"alnum_prop": 0.6831683168316832,
"repo_name": "blstream/myHoard_Python",
"id": "ec2886271fa384e7a389580e62f49e80a84db7c6",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "46776"
}
],
"symlink_target": ""
}
|
"""Mrs Worker
The worker process executes the user's map function and reduce function.
That's it. It just does what the main slave process tells it to. The worker
process is terminated when the main process quits.
"""
import os
import traceback
from . import datasets
from . import tasks
from . import util
from logging import getLogger
logger = getLogger('mrs')
class WorkerSetupRequest(object):
"""Request the worker to run the setup function."""
def __init__(self, opts, args, default_dir):
self.id = 'worker_setup'
self.opts = opts
self.args = args
self.default_dir = default_dir
def id(self):
return self.__class__.__name__
class WorkerRemoveRequest(object):
def __init__(self, *args):
(self.directory,) = args
def id(self):
return self.__class__.__name__
class WorkerTaskRequest(object):
"""Request the to worker to run a task."""
def __init__(self, *args):
_, _, self.dataset_id, self.task_index, _, _, _, _, _ = args
self.args = args
def id(self):
return '%s_%s_%s' % (self.__class__.__name__, self.dataset_id,
self.task_index)
class WorkerQuitRequest(object):
"""Request the worker to quit."""
class WorkerFailure(object):
"""Failure response from worker."""
def __init__(self, dataset_id, task_index, exception, traceback,
request_id):
self.dataset_id = dataset_id
self.task_index = task_index
self.exception = exception
self.traceback = traceback
self.request_id = request_id
class WorkerSetupSuccess(object):
"""Successful worker setup."""
class WorkerSuccess(object):
"""Successful response from worker."""
def __init__(self, dataset_id, task_index, outdir, outurls, request_id):
self.dataset_id = dataset_id
self.task_index = task_index
self.outdir = outdir
self.outurls = outurls
self.request_id = request_id
class Worker(object):
"""Execute map tasks and reduce tasks.
The worker waits for other threads to make assignments by calling
start_map and start_reduce.
This needs to run in a daemon thread rather than in the main thread so
that it can be killed by other threads.
"""
def __init__(self, program_class, request_pipe):
self.program_class = program_class
self.request_pipe = request_pipe
self.default_dir = None
self.program = None
self.opts = None
self.args = None
def run(self):
while self.run_once():
pass
def run_once(self):
"""Runs one iteration of the event loop.
Returns True if it should keep running.
"""
request = None
response = None
try:
request = self.request_pipe.recv()
if isinstance(request, WorkerSetupRequest):
assert self.program is None
self.opts = request.opts
self.args = request.args
logger.debug('Starting to run the user setup function.')
util.log_ram_usage()
self.program = self.program_class(self.opts, self.args)
self.default_dir = request.default_dir
response = WorkerSetupSuccess()
elif isinstance(request, WorkerQuitRequest):
return False
elif isinstance(request, WorkerRemoveRequest):
util.remove_recursive(request.directory)
else:
assert self.program is not None
logger.info('Running task: %s, %s' %
(request.dataset_id, request.task_index))
util.log_ram_usage()
max_sort_size = getattr(self.opts, 'mrs__max_sort_size', None)
t = tasks.Task.from_args(*request.args, program=self.program)
t.run(self.program, self.default_dir,
max_sort_size=max_sort_size)
response = WorkerSuccess(request.dataset_id,
request.task_index, t.outdir, t.outurls(),
request.id())
logger.info('Completed task: %s, %s' %
(request.dataset_id, request.task_index))
util.log_ram_usage()
except KeyboardInterrupt:
return
except Exception as e:
logger.info('Failed task: %s, %s' %
(request.dataset_id, request.task_index))
request_id = request.id() if request else None
tb = traceback.format_exc()
response = WorkerFailure(request.dataset_id, request.task_index,
e, tb, request_id)
if response:
self.request_pipe.send(response)
return True
def profiled_run(self):
#TODO: detect the node number for other systems (e.g., pbs)
nodenum = os.getenv('PSSH_NODENUM')
if nodenum:
filename = 'mrs-worker-%s.prof' % nodenum
else:
filename = 'mrs-worker.prof'
util.profile_loop(self.run_once, (), {}, filename)
class WorkerManager(object):
"""Mixin class that provides methods for dealing with Workers.
Assumes that a worker_pipe attribute is defined and that read_worker_pipe
is called when data is available. Also assumes that a current_task
attribute is available.
"""
def worker_setup(self, opts, args, default_dir):
request = WorkerSetupRequest(opts, args, default_dir)
self.worker_pipe.send(request)
response = self.worker_pipe.recv()
if isinstance(response, WorkerSetupSuccess):
return True
if isinstance(response, WorkerFailure):
msg = 'Exception in Worker Setup: %s' % response.exception
logger.critical(msg)
msg = 'Traceback: %s' % response.traceback
logger.error(msg)
return False
else:
raise RuntimeError('Invalid message type.')
def read_worker_pipe(self):
"""Reads a single response from the worker pipe."""
r = self.worker_pipe.recv()
if not (isinstance(r, WorkerSuccess) or isinstance(r, WorkerFailure)):
assert False, 'Unexpected response type'
assert self.current_task == (r.dataset_id, r.task_index)
self.current_task = None
if isinstance(r, WorkerSuccess):
self.worker_success(r)
elif isinstance(r, WorkerFailure):
msg = 'Exception in Worker: %s' % r.exception
logger.critical(msg)
msg = 'Traceback: %s' % r.traceback
logger.error(msg)
self.worker_failure(r)
def submit_request(self, request):
"""Submit the given request to the worker.
If one_at_a_time is specified, then no other one_at_time requests can
be accepted until the current task finishes. Returns a boolean
indicating whether the request was accepted.
Called from the RPC thread.
"""
if isinstance(request, WorkerTaskRequest):
if self.current_task is not None:
return False
self.current_task = (request.dataset_id, request.task_index)
self.worker_pipe.send(request)
return True
def worker_success(self, response):
"""Called when a worker sends a WorkerSuccess for the given task."""
raise NotImplementedError
def worker_failure(self, response):
"""Called when a worker sends a WorkerFailure for the given task."""
raise NotImplementedError
# vim: et sw=4 sts=4
|
{
"content_hash": "c6bf4dcd23e488ea596e1bafecc6fa71",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 78,
"avg_line_length": 32.47457627118644,
"alnum_prop": 0.5947286012526096,
"repo_name": "byu-aml-lab/mrs-mapreduce",
"id": "6826aebb8e6f497fa7f132e89bfabaf049bac782",
"size": "8264",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "mrs/worker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "289172"
}
],
"symlink_target": ""
}
|
import datetime
import json
from libcloud.container.base import (Container, ContainerDriver,
ContainerImage, ContainerCluster)
from libcloud.common.kubernetes import KubernetesException
from libcloud.common.kubernetes import KubernetesBasicAuthConnection
from libcloud.common.kubernetes import KubernetesDriverMixin
from libcloud.container.providers import Provider
from libcloud.container.types import ContainerState
__all__ = [
'KubernetesContainerDriver'
]
ROOT_URL = '/api/'
class KubernetesPod(object):
def __init__(self, name, containers, namespace):
"""
A Kubernetes pod
"""
self.name = name
self.containers = containers
self.namespace = namespace
class KubernetesContainerDriver(KubernetesDriverMixin, ContainerDriver):
type = Provider.KUBERNETES
name = 'Kubernetes'
website = 'http://kubernetes.io'
connectionCls = KubernetesBasicAuthConnection
supports_clusters = True
def list_containers(self, image=None, all=True):
"""
List the deployed container images
:param image: Filter to containers with a certain image
:type image: :class:`libcloud.container.base.ContainerImage`
:param all: Show all container (including stopped ones)
:type all: ``bool``
:rtype: ``list`` of :class:`libcloud.container.base.Container`
"""
try:
result = self.connection.request(
ROOT_URL + "v1/pods").object
except Exception as exc:
errno = getattr(exc, 'errno', None)
if errno == 111:
raise KubernetesException(
errno,
'Make sure kube host is accessible'
'and the API port is correct')
raise
pods = [self._to_pod(value) for value in result['items']]
containers = []
for pod in pods:
containers.extend(pod.containers)
return containers
def get_container(self, id):
"""
Get a container by ID
:param id: The ID of the container to get
:type id: ``str``
:rtype: :class:`libcloud.container.base.Container`
"""
containers = self.list_containers()
match = [container for container in containers if container.id == id]
return match[0]
def list_clusters(self):
"""
Get a list of namespaces that pods can be deployed into
:param location: The location to search in
:type location: :class:`libcloud.container.base.ClusterLocation`
:rtype: ``list`` of :class:`libcloud.container.base.ContainerCluster`
"""
try:
result = self.connection.request(
ROOT_URL + "v1/namespaces/").object
except Exception as exc:
errno = getattr(exc, 'errno', None)
if errno == 111:
raise KubernetesException(
errno,
'Make sure kube host is accessible'
'and the API port is correct')
raise
clusters = [self._to_cluster(value) for value in result['items']]
return clusters
def get_cluster(self, id):
"""
Get a cluster by ID
:param id: The ID of the cluster to get
:type id: ``str``
:rtype: :class:`libcloud.container.base.ContainerCluster`
"""
result = self.connection.request(ROOT_URL + "v1/namespaces/%s" %
id).object
return self._to_cluster(result)
def destroy_cluster(self, cluster):
"""
Delete a cluster (namespace)
:return: ``True`` if the destroy was successful, otherwise ``False``.
:rtype: ``bool``
"""
self.connection.request(ROOT_URL + "v1/namespaces/%s" %
cluster.id, method='DELETE').object
return True
def create_cluster(self, name, location=None):
"""
Create a container cluster (a namespace)
:param name: The name of the cluster
:type name: ``str``
:param location: The location to create the cluster in
:type location: :class:`.ClusterLocation`
:rtype: :class:`.ContainerCluster`
"""
request = {
'metadata': {
'name': name
}
}
result = self.connection.request(ROOT_URL + "v1/namespaces",
method='POST',
data=json.dumps(request)).object
return self._to_cluster(result)
def deploy_container(self, name, image, cluster=None,
parameters=None, start=True):
"""
Deploy an installed container image.
In kubernetes this deploys a single container Pod.
https://cloud.google.com/container-engine/docs/pods/single-container
:param name: The name of the new container
:type name: ``str``
:param image: The container image to deploy
:type image: :class:`.ContainerImage`
:param cluster: The cluster to deploy to, None is default
:type cluster: :class:`.ContainerCluster`
:param parameters: Container Image parameters
:type parameters: ``str``
:param start: Start the container on deployment
:type start: ``bool``
:rtype: :class:`.Container`
"""
if cluster is None:
namespace = 'default'
else:
namespace = cluster.id
request = {
"metadata": {
"name": name
},
"spec": {
"containers": [
{
"name": name,
"image": image.name
}
]
}
}
result = self.connection.request(ROOT_URL + "v1/namespaces/%s/pods"
% namespace,
method='POST',
data=json.dumps(request)).object
return self._to_cluster(result)
def destroy_container(self, container):
"""
Destroy a deployed container. Because the containers are single
container pods, this will delete the pod.
:param container: The container to destroy
:type container: :class:`.Container`
:rtype: ``bool``
"""
return self.ex_destroy_pod(container.extra['namespace'],
container.extra['pod'])
def ex_list_pods(self):
"""
List available Pods
:rtype: ``list`` of :class:`.KubernetesPod`
"""
result = self.connection.request(ROOT_URL + "v1/pods").object
return [self._to_pod(value) for value in result['items']]
def ex_destroy_pod(self, namespace, pod_name):
"""
Delete a pod and the containers within it.
"""
self.connection.request(
ROOT_URL + "v1/namespaces/%s/pods/%s" % (
namespace, pod_name),
method='DELETE').object
return True
def _to_pod(self, data):
"""
Convert an API response to a Pod object
"""
container_statuses = data['status']['containerStatuses']
containers = []
# response contains the status of the containers in a separate field
for container in data['spec']['containers']:
spec = list(filter(lambda i: i['name'] == container['name'],
container_statuses))[0]
containers.append(
self._to_container(container, spec, data)
)
return KubernetesPod(
name=data['metadata']['name'],
namespace=data['metadata']['namespace'],
containers=containers)
def _to_container(self, data, container_status, pod_data):
"""
Convert container in Container instances
"""
return Container(
id=container_status['containerID'],
name=data['name'],
image=ContainerImage(
id=container_status['imageID'],
name=data['image'],
path=None,
version=None,
driver=self.connection.driver),
ip_addresses=None,
state=ContainerState.RUNNING,
driver=self.connection.driver,
extra={
'pod': pod_data['metadata']['name'],
'namespace': pod_data['metadata']['namespace']
})
def _to_cluster(self, data):
"""
Convert namespace to a cluster
"""
metadata = data['metadata']
status = data['status']
return ContainerCluster(
id=metadata['name'],
name=metadata['name'],
driver=self.connection.driver,
extra={'phase': status['phase']})
def ts_to_str(timestamp):
"""
Return a timestamp as a nicely formated datetime string.
"""
date = datetime.datetime.fromtimestamp(timestamp)
date_string = date.strftime("%d/%m/%Y %H:%M %Z")
return date_string
|
{
"content_hash": "6bae3d53643cfc0e45abcc6471add13d",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 77,
"avg_line_length": 32.0448275862069,
"alnum_prop": 0.5439578177122565,
"repo_name": "Kami/libcloud",
"id": "b0b770592c6be329c4707a40b0c638888ea2eff2",
"size": "10075",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "libcloud/container/drivers/kubernetes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9122888"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
}
|
"""SCons.Tool.nasm
Tool-specific initialization for nasm, the famous Netwide Assembler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/nasm.py 3897 2009/01/13 06:45:54 scons"
import SCons.Defaults
import SCons.Tool
import SCons.Util
ASSuffixes = ['.s', '.asm', '.ASM']
ASPPSuffixes = ['.spp', '.SPP', '.sx']
if SCons.Util.case_sensitive_suffixes('.s', '.S'):
ASPPSuffixes.extend(['.S'])
else:
ASSuffixes.extend(['.S'])
def generate(env):
"""Add Builders and construction variables for nasm to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
env['AS'] = 'nasm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
def exists(env):
return env.Detect('nasm')
|
{
"content_hash": "e1972027f27764f63cecb691df6a839e",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 100,
"avg_line_length": 38.696969696969695,
"alnum_prop": 0.7235708692247454,
"repo_name": "rwatson/chromium-capsicum",
"id": "07df8f21be25cc4b38c2c04ac63f139407573edf",
"size": "2554",
"binary": false,
"copies": "3",
"ref": "refs/heads/chromium-capsicum",
"path": "third_party/scons/scons-local/SCons/Tool/nasm.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Interface to the vim archive to use with the other python file
Give an interface to the vim archivecture to hide all vim functionnality
from the python file.
"""
import vim
import string
def evaluate(a_instruction):
"""Evaluate the Vim instruction `a_instruction' and retuen a string
containing the result"""
return vim.eval(a_instruction)
def execute(a_command):
"""Execute the Vim command `a_command'"""
vim.command(a_command)
def is_option_exists(a_option_name):
"""Is the Vim option named `a_option_name' is define."""
return int(evaluate("exists(\"&" + a_option_name + "\")"))
def get_option(a_option_name):
"""Return the value of the Vim option named `a_option_name' ."""
return evaluate("&" + a_option_name)
def set_option(a_option_name, a_value):
"""Assign the value `a_value' to the vim option `a_option_name'."""
if a_value is None or str(a_value) == "":
execute("let &" + a_option_name + " = \"\"")
else:
execute("let &" + a_option_name + " = \"" + str(a_value) + "\"")
def is_prefixed_variable_exists(a_prefix, a_variable_name):
"""Is the Vim variable named `a_variable_name' prefixed with `a_prefix' has
been define."""
return int(evaluate(
"exists(\"" + a_prefix + ":" + a_variable_name + "\")"
))
def get_prefixed_variable(a_prefix, a_variable_name):
"""Return the Vim variable named `a_variable_name' and prefixed with
`a_prefix'."""
return evaluate(a_prefix + ":" + a_variable_name)
def set_prefixed_variable(a_prefix, a_variable_name, a_value):
"""Assign the value `a_value' to the vim variable
`a_variable_name' prefixed with `a_prefix'."""
if a_value is None:
if is_prefixed_variable_exists(a_prefix, a_variable_name):
execute("unlet " + a_prefix + ":" + a_variable_name)
else:
execute(
"let " + a_prefix + ":" + a_variable_name + " = \"" +
str(a_value) + "\""
)
def is_global_variable_exists(a_variable_name):
"""Is the Vim global variable named `a_variable_name' has been define."""
return is_prefixed_variable_exists("g", a_variable_name)
def get_global_variable(a_variable_name):
"""Return the Vim global variable named `a_variable_name'."""
return get_prefixed_variable("g", a_variable_name)
def set_global_variable(a_variable_name, a_value):
"""Assign the value `a_value' to the vim global variable
`a_variable_name'."""
set_prefixed_variable("g", a_variable_name, a_value)
def is_argument_variable_exists(a_variable_name):
"""Is the Vim argument variable named `a_variable_name' has been define."""
return is_prefixed_variable_exists("a", a_variable_name)
def get_argument_variable(a_variable_name):
"""Return the Vim local argument variable named `a_variable_name'."""
return get_prefixed_variable("g", a_variable_name)
def set_argument_variable(a_variable_name, a_value):
"""Assign the value `a_value' to the vim local argument variable
`a_variable_name'."""
set_prefixed_variable("a", a_variable_name, a_value)
def is_script_variable_exists(a_variable_name):
"""Is the Vim script variable named `a_variable_name' has been define."""
return is_prefixed_variable_exists("s", a_variable_name)
def get_script_variable(a_variable_name):
"""Return the Vim script variable named `a_variable_name'."""
return get_prefixed_variable("s", a_variable_name)
def set_script_variable(a_variable_name, a_value):
"""Assign the value `a_value' to the vim script variable
`a_variable_name'."""
set_prefixed_variable("s", a_variable_name, a_value)
def is_buffer_variable_exists(a_variable_name):
"""Is the Vim buffer variable named `a_variable_name' has been define."""
return is_prefixed_variable_exists("b", a_variable_name)
def get_buffer_variable(a_variable_name):
"""Return the Vim buffer variable named `a_variable_name'."""
return get_prefixed_variable("b", a_variable_name)
def set_buffer_variable(a_variable_name, a_value):
"""Assign the value `a_value' to the vim buffer variable
`a_variable_name'."""
set_prefixed_variable("b", a_variable_name, a_value)
def get_cursor_row():
"""The current row position of the Vim cursor (starting at 0)"""
row, col = vim.current.window.cursor
return row - 1
def get_cursor_column():
"""The current column position of the Vim cursor(starting at 0)"""
row, col = vim.current.window.cursor
return col
def text_list():
"""List of the text of each row"""
return vim.current.buffer
def text_of_cursor_row():
"""The text of the row containing the cursor"""
return vim.current.buffer[get_cursor_row()]
def previous_non_white_character_in_row(row, col):
"""
The position of the previous character (before the position `col)
in the `row' that is not a whitespace character (space and tab)
"""
white_character = (" ", "\t")
index_col = col
while index_col >= 0 and\
vim.current.buffer[row][index_col] in white_character:
index_col = index_col - 1
return index_col
def start_column_of_word(row, col):
"""The starting column position of the word at position (`row',`col')"""
non_splittable_characters = string.ascii_letters + string.digits + "_"
start = col
result = start
if len(vim.current.buffer) > row and\
len(vim.current.buffer[row]) > col and\
vim.current.buffer[row][col] in non_splittable_characters:
try:
while start > -1 and\
vim.current.buffer[row][start] in\
non_splittable_characters:
start = start - 1
result = start + 1
except:
result = -1
else:
result = -1
return result
def end_column_of_word(row, col):
"""The ending column position of the word at position (`row',`col')"""
non_splittable_characters = string.ascii_letters + string.digits + "_"
end = col
result = end
if len(vim.current.buffer) > row and\
len(vim.current.buffer[row]) > col:
if vim.current.buffer[row][col] in non_splittable_characters:
try:
while end < len(vim.current.buffer[row]) and\
vim.current.buffer[row][end] in\
non_splittable_characters:
end = end + 1
result = end - 1
except:
result = -2
else:
result = -2
return result
def word_under_the_cursor():
""" The complete word under the current Vim cursor."""
row, col = vim.current.window.cursor
row = row - 1
result = ""
if len(vim.current.buffer) > row and\
len(vim.current.buffer[row]) > col:
start = start_column_of_word(row, col)
if start > -1:
end = end_column_of_word(row, col)
if end > start:
result = vim.current.buffer[row][start:end + 1]
return result
def buffer_to_text(a_number=None):
"""Get the text contained in a buffer.
a_number: The number index of the buffer (if None, use the current
buffer.
Return: The text of the buffer (each line separate by a \n
"""
if a_number:
l_buffer = vim.buffers[int(a_number)]
else:
l_buffer = vim.current.buffer
return "\n".join(l_buffer)
def show_error(message):
"""Print a Vim error `message'."""
execute("echoerr \"" + message + "\"")
def indent_fold():
"""Put the fold indentation."""
execute("setlocal foldmethod=expr")
execute("setlocal foldexpr=eiffelide#indent_fold(v:lnum)")
def manual_fold():
"""Put the manual fold."""
execute("setlocal foldmethod=manual")
def eiffel_fold():
"""Put the eiffel specific fold."""
execute("setlocal foldmethod=expr")
execute("setlocal foldexpr=GetEiffelFoldValue(v:lnum)")
class window:
"""A Vim window."""
_item = None
"""Internal Vim window represented by `Current'"""
must_scroll = False
"""`Current' must scroll when the full"""
def __init__(self, a_number=None, scroll=False):
"""Constructor of the vim window
Create a python interface of a Vim window. If `a_number' is provided,
`Current' represent the vim window with that number value. If
`a_number' is not provided, `Current' represent the currently used
Vim window.
"""
if a_number:
self._item = vim.buffers[int(a_number)]
else:
self._item = vim.current.buffer
self.must_scroll = scroll
def append(self, a_text):
"""Append `a_text' to `Current'"""
self._item.append(a_text.split("\n"))
if self.must_scroll:
vim.command("normal! G")
vim.command("redraw")
def clear(self):
"""Remove all element in `Current'"""
del self._item[:]
vim.command("redraw")
def set_text(self, a_text):
"""Replace the text of `Current' with `a_text'"""
del self._item[:]
self.append(a_text)
|
{
"content_hash": "f3f7bde60e8c1953a285ecdc53011712",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 79,
"avg_line_length": 30.872053872053872,
"alnum_prop": 0.6162067837277784,
"repo_name": "tioui/Vim_Eiffel_IDE",
"id": "fcb495ce69dbfdb0e09cd1cdf627dbbc10dffeaf",
"size": "10290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyplugin/environment_vim.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Eiffel",
"bytes": "299"
},
{
"name": "Python",
"bytes": "116593"
},
{
"name": "Vim script",
"bytes": "43631"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from gamechat import settings
from django.conf import settings as dcs
from django.conf.urls.static import static
from django.views.generic import RedirectView
from chat.chat_container import QueueContainer
QUEUES = QueueContainer()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'gamechat.views.home_page', name='home'),
url(r'^chat/', include('chat.urls'), name='chat'),
url(r'^profile/', include('profiles.urls'), name='profile'),
url(r'^admin/', include(admin.site.urls)),
url(r'^calendar/', include('game_calendar.urls')),
url(r'^accounts/', include('registration.backends.default.urls')),
url(r'^accounts/profile', RedirectView.as_view(url='/profile/')),
url(r'^404$', 'gamechat.views.four_o_four', name='four'),
)
if settings.DEBUG:
urlpatterns += static(dcs.MEDIA_URL, document_root=dcs.MEDIA_ROOT)
|
{
"content_hash": "0a641f96a11f2e652337d7712d151b77",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 70,
"avg_line_length": 35,
"alnum_prop": 0.7111111111111111,
"repo_name": "nbeck90/game-chat",
"id": "9f20acf28e2b4b1dceb8e417117484fcae7f188d",
"size": "945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gamechat/gamechat/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28229"
},
{
"name": "HTML",
"bytes": "17027"
},
{
"name": "JavaScript",
"bytes": "4994"
},
{
"name": "Python",
"bytes": "108273"
}
],
"symlink_target": ""
}
|
'''
High-level functions and classes for solving a wide variety of economic models.
The "core" of HARK is a framework for "microeconomic" and "macroeconomic"
models. A micro model concerns the dynamic optimization problem for some type
of agents, where agents take the inputs to their problem as exogenous. A macro
model adds an additional layer, endogenizing some of the inputs to the micro
problem by finding a general equilibrium dynamic rule.
'''
from HARKutilities import getArgNames, NullFunc
from copy import deepcopy
import numpy as np
def distanceMetric(thing_A,thing_B):
'''
A "universal distance" metric that can be used as a default in many settings.
Parameters
----------
thing_A : object
A generic object.
thing_B : object
Another generic object.
Returns:
------------
distance : float
The "distance" between thing_A and thing_B.
'''
# Get the types of the two inputs
typeA = type(thing_A)
typeB = type(thing_B)
if typeA is list and typeB is list:
lenA = len(thing_A) # If both inputs are lists, then the distance between
lenB = len(thing_B) # them is the maximum distance between corresponding
if lenA == lenB: # elements in the lists. If they differ in length,
distance_temp = [] # the distance is the difference in lengths.
for n in range(lenA):
distance_temp.append(distanceMetric(thing_A[n],thing_B[n]))
distance = max(distance_temp)
else:
distance = float(abs(lenA - lenB))
# If both inputs are numbers, return their difference
elif (typeA is int or typeB is float) and (typeB is int or typeB is float):
distance = float(abs(thing_A - thing_B))
# If both inputs are array-like, return the maximum absolute difference b/w
# corresponding elements (if same shape); return largest difference in dimensions
# if shapes do not align.
elif hasattr(thing_A,'shape') and hasattr(thing_B,'shape'):
if thing_A.shape == thing_B.shape:
distance = np.max(abs(thing_A - thing_B))
else:
distance = np.max(abs(thing_A.shape - thing_B.shape))
# If none of the above cases, but the objects are of the same class, call
# the distance method of one on the other
elif thing_A.__class__.__name__ is thing_B.__class__.__name__:
distance = thing_A.distance(thing_B)
else: # Failsafe: the inputs are very far apart
distance = 1000.0
return distance
class HARKobject():
'''
A superclass for object classes in HARK. Comes with two useful methods:
a generic/universal distance method and an attribute assignment method.
'''
def distance(self,other):
'''
A generic distance method, which requires the existence of an attribute
called distance_criteria, giving a list of strings naming the attributes
to be considered by the distance metric.
Parameters
----------
other : object
Another object to compare this instance to.
Returns
-------
(unnamed) : float
The distance between this object and another, using the "universal
distance" metric.
'''
distance_list = [0.0]
for attr_name in self.distance_criteria:
try:
obj_A = getattr(self,attr_name)
obj_B = getattr(other,attr_name)
distance_list.append(distanceMetric(obj_A,obj_B))
except:
distance_list.append(1000.0) # if either object lacks attribute, they are not the same
return max(distance_list)
def assignParameters(self,**kwds):
'''
Assign an arbitrary number of attributes to this agent.
Parameters
----------
**kwds : keyword arguments
Any number of keyword arguments of the form key=value. Each value
will be assigned to the attribute named in self.
Returns
-------
none
'''
for key in kwds:
setattr(self,key,kwds[key])
def __call__(self,**kwds):
'''
Assign an arbitrary number of attributes to this agent, as a convenience.
See assignParameters.
'''
self.assignParameters(**kwds)
class Solution(HARKobject):
'''
A superclass for representing the "solution" to a single period problem in a
dynamic microeconomic model.
NOTE: This can be deprecated now that HARKobject exists, but this requires
replacing each instance of Solution with HARKobject in the other modules.
'''
class AgentType(HARKobject):
'''
A superclass for economic agents in the HARK framework. Each model should
specify its own subclass of AgentType, inheriting its methods and overwriting
as necessary. Critically, every subclass of AgentType should define class-
specific static values of the attributes time_vary and time_inv as lists of
strings. Each element of time_vary is the name of a field in AgentSubType
that varies over time in the model. Each element of time_inv is the name of
a field in AgentSubType that is constant over time in the model. The string
'solveOnePeriod' should appear in exactly one of these lists, depending on
whether the same solution method is used in all periods of the model.
'''
def __init__(self,solution_terminal=None,cycles=1,time_flow=False,pseudo_terminal=True,
tolerance=0.000001,seed=0,**kwds):
'''
Initialize an instance of AgentType by setting attributes.
Parameters
----------
solution_terminal : Solution
A representation of the solution to the terminal period problem of
this AgentType instance, or an initial guess of the solution if this
is an infinite horizon problem.
cycles : int
The number of times the sequence of periods is experienced by this
AgentType in their "lifetime". cycles=1 corresponds to a lifecycle
model, with a certain sequence of one period problems experienced
once before terminating. cycles=0 corresponds to an infinite horizon
model, with a sequence of one period problems repeating indefinitely.
time_flow : boolean
Whether time is currently "flowing" forward or backward for this
instance. Used to flip between solving (using backward iteration)
and simulating (etc).
pseudo_terminal : boolean
Indicates whether solution_terminal isn't actually part of the
solution to the problem (as a known solution to the terminal period
problem), but instead represents a "scrap value"-style termination.
When True, solution_terminal is not included in the solution; when
false, solution_terminal is the last element of the solution.
tolerance : float
Maximum acceptable "distance" between successive solutions to the
one period problem in an infinite horizon (cycles=0) model in order
for the solution to be considered as having "converged". Inoperative
when cycles>0.
seed : int
A seed for this instance's random number generator.
Returns
-------
None
'''
if solution_terminal is None:
solution_terminal = NullFunc()
self.solution_terminal = solution_terminal
self.cycles = cycles
self.time_flow = time_flow
self.pseudo_terminal = pseudo_terminal
self.solveOnePeriod = NullFunc()
self.tolerance = tolerance
self.seed = seed
self.assignParameters(**kwds)
self.resetRNG()
def timeReport(self):
'''
Report to the user the direction that time is currently "flowing" for
this instance. Only exists as a reminder of how time_flow works.
Parameters
----------
none
Returns
-------
none
'''
if self.time_flow:
print('Time varying objects are listed in ordinary chronological order.')
else:
print('Time varying objects are listed in reverse chronological order.')
def timeFlip(self):
'''
Reverse the flow of time for this instance.
Parameters
----------
none
Returns
-------
none
'''
for name in self.time_vary:
exec('self.' + name + '.reverse()')
self.time_flow = not self.time_flow
def timeFwd(self):
'''
Make time flow forward for this instance.
Parameters
----------
none
Returns
-------
none
'''
if not self.time_flow:
self.timeFlip()
def timeRev(self):
'''
Make time flow backward for this instance.
Parameters
----------
none
Returns
-------
none
'''
if self.time_flow:
self.timeFlip()
def addToTimeVary(self,*params):
'''
Adds any number of parameters to time_vary for this instance.
Parameters
----------
params : string
Any number of strings naming attributes to be added to time_vary
Returns
-------
None
'''
for param in params:
if param not in self.time_vary:
self.time_vary.append(param)
def addToTimeInv(self,*params):
'''
Adds any number of parameters to time_inv for this instance.
Parameters
----------
params : string
Any number of strings naming attributes to be added to time_inv
Returns
-------
None
'''
for param in params:
if param not in self.time_inv:
self.time_inv.append(param)
def delFromTimeVary(self,*params):
'''
Removes any number of parameters from time_vary for this instance.
Parameters
----------
params : string
Any number of strings naming attributes to be removed from time_vary
Returns
-------
None
'''
for param in params:
if param in self.time_vary:
self.time_vary.remove(param)
def delFromTimeInv(self,*params):
'''
Removes any number of parameters from time_inv for this instance.
Parameters
----------
params : string
Any number of strings naming attributes to be removed from time_inv
Returns
-------
None
'''
for param in params:
if param in self.time_inv:
self.time_inv.remove(param)
def solve(self):
'''
Solve the model for this instance of an agent type by backward induction.
Loops through the sequence of one period problems, passing the solution
to period t+1 to the problem for period t.
Parameters
----------
none
Returns
-------
none
'''
self.preSolve() # Do pre-solution stuff
self.solution = solveAgent(self) # Solve the model by backward induction
if self.time_flow: # Put the solution in chronological order if this instance's time flow runs that way
self.solution.reverse()
self.addToTimeVary('solution') # Add solution to the list of time-varying attributes
self.postSolve() # Do post-solution stuff
def resetRNG(self):
'''
Reset the random number generator for this type.
Parameters
----------
none
Returns
-------
none
'''
self.RNG = np.random.RandomState(self.seed)
def isSameThing(self,solutionA,solutionB):
'''
Compare two solutions to see if they are the "same." The model-specific
solution class must have a method called distance, which takes another
solution object as an input and returns the "distance" between the solutions.
This method is used to test for convergence in infinite horizon problems.
Parameters
----------
solutionA : Solution
The solution to a one period problem in the model.
solutionB : Solution
Another solution to (the same) one period problem in the model.
Returns
-------
(unnamed) : boolean
True if the solutions are within a tolerable distance of each other.
'''
solution_distance = solutionA.distance(solutionB)
return(solution_distance <= self.tolerance)
def preSolve(self):
'''
A method that is run immediately before the model is solved, to prepare
the terminal solution, perhaps. Does nothing here.
Parameters
----------
none
Returns
-------
none
'''
return None
def postSolve(self):
'''
A method that is run immediately after the model is solved, to finalize
the solution in some way. Does nothing here.
Parameters
----------
none
Returns
-------
none
'''
return None
def solveAgent(agent):
'''
Solve the dynamic model for one agent type. This function iterates on "cycles"
of an agent's model either a given number of times or until solution convergence
if an infinite horizon model is used (with agent.cycles = 0).
Parameters
----------
agent : AgentType
The microeconomic AgentType whose dynamic problem is to be solved.
Returns
-------
solution : [Solution]
A list of solutions to the one period problems that the agent will
encounter in his "lifetime". Returns in reverse chronological order.
'''
# Record the flow of time when the Agent began the process, and make sure time is flowing backwards
original_time_flow = agent.time_flow
agent.timeRev()
# Check to see whether this is an (in)finite horizon problem
cycles_left = agent.cycles
infinite_horizon = cycles_left == 0
# Initialize the solution, which includes the terminal solution if it's not a pseudo-terminal period
solution = []
if not agent.pseudo_terminal:
solution.append(deepcopy(agent.solution_terminal))
# Initialize the process, then loop over cycles
solution_last = agent.solution_terminal
go = True
completed_cycles = 0
max_cycles = 5000 # escape clause
while go:
# Solve a cycle of the model, recording it if horizon is finite
solution_cycle = solveOneCycle(agent,solution_last)
if not infinite_horizon:
solution += solution_cycle
# Check for termination: identical solutions across cycle iterations or run out of cycles
solution_now = solution_cycle[-1]
if infinite_horizon:
if completed_cycles > 0:
go = (not agent.isSameThing(solution_now,solution_last)) and \
(completed_cycles < max_cycles)
else: # Assume solution does not converge after only one cycle
go = True
else:
cycles_left += -1
go = cycles_left > 0
# Update the "last period solution"
solution_last = solution_now
completed_cycles += 1
# Record the last cycle if horizon is infinite (solution is still empty!)
if infinite_horizon:
solution = solution_cycle # PseudoTerminal=False impossible for infinite horizon
# Restore the direction of time to its original orientation, then return the solution
if original_time_flow:
agent.timeFwd()
return solution
def solveOneCycle(agent,solution_last):
'''
Solve one "cycle" of the dynamic model for one agent type. This function
iterates over the periods within an agent's cycle, updating the time-varying
parameters and passing them to the single period solver(s).
Parameters
----------
agent : AgentType
The microeconomic AgentType whose dynamic problem is to be solved.
solution_last : Solution
A representation of the solution of the period that comes after the
end of the sequence of one period problems. This might be the term-
inal period solution, a "pseudo terminal" solution, or simply the
solution to the earliest period from the succeeding cycle.
Returns
-------
solution_cycle : [Solution]
A list of one period solutions for one "cycle" of the AgentType's
microeconomic model. Returns in reverse chronological order.
'''
# Calculate number of periods per cycle, defaults to 1 if all variables are time invariant
if len(agent.time_vary) > 0:
name = agent.time_vary[0]
T = len(eval('agent.' + name))
else:
T = 1
# Check whether the same solution method is used in all periods
always_same_solver = 'solveOnePeriod' not in agent.time_vary
if always_same_solver:
solveOnePeriod = agent.solveOnePeriod
these_args = getArgNames(solveOnePeriod)
# Construct a dictionary to be passed to the solver
time_inv_string = ''
for name in agent.time_inv:
time_inv_string += ' \'' + name + '\' : agent.' +name + ','
time_vary_string = ''
for name in agent.time_vary:
time_vary_string += ' \'' + name + '\' : None,'
solve_dict = eval('{' + time_inv_string + time_vary_string + '}')
# Initialize the solution for this cycle, then iterate on periods
solution_cycle = []
solution_next = solution_last
for t in range(T):
# Update which single period solver to use (if it depends on time)
if not always_same_solver:
solveOnePeriod = agent.solveOnePeriod[t]
these_args = getArgNames(solveOnePeriod)
# Update time-varying single period inputs
for name in agent.time_vary:
if name in these_args:
solve_dict[name] = eval('agent.' + name + '[t]')
solve_dict['solution_next'] = solution_next
# Make a temporary dictionary for this period
temp_dict = {name: solve_dict[name] for name in these_args}
# Solve one period, add it to the solution, and move to the next period
solution_t = solveOnePeriod(**temp_dict)
solution_cycle.append(solution_t)
solution_next = solution_t
# Return the list of per-period solutions
return solution_cycle
#========================================================================
#========================================================================
class Market(HARKobject):
'''
A superclass to represent a central clearinghouse of information. Used for
dynamic general equilibrium models to solve the "macroeconomic" model as a
layer on top of the "microeconomic" models of one or more AgentTypes.
'''
def __init__(self,agents=[],sow_vars=[],reap_vars=[],const_vars=[],track_vars=[],dyn_vars=[],
millRule=None,calcDynamics=None,act_T=1000,tolerance=0.000001):
'''
Make a new instance of the Market class.
Parameters
----------
agents : [AgentType]
A list of all the AgentTypes in this market.
sow_vars : [string]
Names of variables generated by the "aggregate market process" that should
be "sown" to the agents in the market. Aggregate state, etc.
reap_vars : [string]
Names of variables to be collected ("reaped") from agents in the market
to be used in the "aggregate market process".
const_vars : [string]
Names of attributes of the Market instance that are used in the "aggregate
market process" but do not come from agents-- they are constant or simply
parameters inherent to the process.
track_vars : [string]
Names of variables generated by the "aggregate market process" that should
be tracked as a "history" so that a new dynamic rule can be calculated.
This is often a subset of sow_vars.
dyn_vars : [string]
Names of variables that constitute a "dynamic rule".
millRule : function
A function that takes inputs named in reap_vars and returns an object
with attributes named in sow_vars. The "aggregate market process" that
transforms individual agent actions/states/data into aggregate data to
be sent back to agents.
calcDynamics : function
A function that takes inputs named in track_vars and returns an object
with attributes named in dyn_vars. Looks at histories of aggregate
variables and generates a new "dynamic rule" for agents to believe and
act on.
act_T : int
The number of times that the "aggregate market process" should be run
in order to generate a history of aggregate variables.
tolerance: float
Minimum acceptable distance between "dynamic rules" to consider the
Market solution process converged. Distance is a user-defined metric.
Returns
-------
None
'''
self.agents = agents
self.reap_vars = reap_vars
self.sow_vars = sow_vars
self.const_vars = const_vars
self.track_vars = track_vars
self.dyn_vars = dyn_vars
if millRule is not None: # To prevent overwriting of method-based millRules
self.millRule = millRule
if calcDynamics is not None: # Ditto for calcDynamics
self.calcDynamics = calcDynamics
self.act_T = act_T
self.tolerance = tolerance
def solve(self):
'''
"Solves" the market by finding a "dynamic rule" that governs the aggregate
market state such that when agents believe in these dynamics, their actions
collectively generate the same dynamic rule.
Parameters
----------
none
Returns
-------
none
'''
go = True
max_loops = 1000 # Failsafe against infinite solution loop
completed_loops = 0
old_dynamics = None
while go: # Loop until the dynamic process converges or we hit the loop cap
for this_type in self.agents:
this_type.solve() # Solve each AgentType's micro problem
self.makeHistory() # "Run" the model while tracking aggregate variables
new_dynamics = self.updateDynamics() # Find a new aggregate dynamic rule
# Check to see if the dynamic rule has converged (if this is not the first loop)
if completed_loops > 0:
distance = new_dynamics.distance(old_dynamics)
else:
distance = 1000000.0
# Move to the next loop if the terminal conditions are not met
old_dynamics = new_dynamics
completed_loops += 1
go = distance >= self.tolerance and completed_loops < max_loops
self.dynamics = new_dynamics # Store the final dynamic rule in self
def reap(self):
'''
Collects attributes named in reap_vars from each AgentType in the market,
storing them in respectively named attributes of self.
Parameters
----------
none
Returns
-------
none
'''
for var_name in self.reap_vars:
harvest = []
for this_type in self.agents:
harvest.append(getattr(this_type,var_name))
setattr(self,var_name,harvest)
def sow(self):
'''
Distributes attrributes named in sow_vars from self to each AgentType
in the market, storing them in respectively named attributes.
Parameters
----------
none
Returns
-------
none
'''
for var_name in self.sow_vars:
this_seed = getattr(self,var_name)
for this_type in self.agents:
setattr(this_type,var_name,this_seed)
def mill(self):
'''
Processes the variables collected from agents using the function millRule,
storing the results in attributes named in aggr_sow.
Parameters
----------
none
Returns
-------
none
'''
# Make a dictionary of inputs for the millRule
reap_vars_string = ''
for name in self.reap_vars:
reap_vars_string += ' \'' + name + '\' : self.' + name + ','
const_vars_string = ''
for name in self.const_vars:
const_vars_string += ' \'' + name + '\' : self.' + name + ','
mill_dict = eval('{' + reap_vars_string + const_vars_string + '}')
# Run the millRule and store its output in self
product = self.millRule(**mill_dict)
for j in range(len(self.sow_vars)):
this_var = self.sow_vars[j]
this_product = getattr(product,this_var)
setattr(self,this_var,this_product)
def cultivate(self):
'''
Has each AgentType in agents perform their marketAction method, using
variables sown from the market (and maybe also "private" variables).
The marketAction method should store new results in attributes named in
reap_vars to be reaped later.
Parameters
----------
none
Returns
-------
none
'''
for this_type in self.agents:
this_type.marketAction()
def reset(self):
'''
Reset the state of the market (attributes in sow_vars, etc) to some
user-defined initial state, and erase the histories of tracked variables.
Parameters
----------
none
Returns
-------
none
'''
for var_name in self.track_vars: # Reset the history of tracked variables
setattr(self,var_name + '_hist',[])
for var_name in self.sow_vars: # Set the sow variables to their initial levels
initial_val = getattr(self,var_name + '_init')
setattr(self,var_name,initial_val)
for this_type in self.agents: # Reset each AgentType in the market
this_type.reset()
def store(self):
'''
Record the current value of each variable X named in track_vars in an
attribute named X_hist.
Parameters
----------
none
Returns
-------
none
'''
for var_name in self.track_vars:
value_now = getattr(self,var_name)
getattr(self,var_name + '_hist').append(value_now)
def makeHistory(self):
'''
Runs a loop of sow-->cultivate-->reap-->mill act_T times, tracking the
evolution of variables X named in track_vars in attributes named X_hist.
Parameters
----------
none
Returns
-------
none
'''
self.reset() # Initialize the state of the market
for t in range(self.act_T):
self.sow() # Distribute aggregated information/state to agents
self.cultivate() # Agents take action
self.reap() # Collect individual data from agents
self.mill() # Process individual data into aggregate data
self.store() # Record variables of interest
def updateDynamics(self):
'''
Calculates a new "aggregate dynamic rule" using the history of variables
named in track_vars, and distributes this rule to AgentTypes in agents.
Parameters
----------
none
Returns
-------
dynamics : instance
The new "aggregate dynamic rule" that agents believe in and act on.
Should have attributes named in dyn_vars.
'''
# Make a dictionary of inputs for the dynamics calculator
history_vars_string = ''
for name in self.track_vars:
history_vars_string += ' \'' + name + '\' : self.' + name + '_hist,'
update_dict = eval('{' + history_vars_string + '}')
# Calculate a new dynamic rule and distribute it to the agents in agent_list
dynamics = self.calcDynamics(**update_dict) # User-defined dynamics calculator
for var_name in self.dyn_vars:
this_obj = getattr(dynamics,var_name)
for this_type in self.agents:
setattr(this_type,var_name,this_obj)
return dynamics
if __name__ == '__main__':
print("Sorry, HARKcore doesn't actually do anything on its own.")
print("To see some examples of its frameworks in action, try running a model module.")
print("Several interesting model modules can be found in /ConsumptionSavingModel.")
print('For an extraordinarily simple model that demonstrates the "microeconomic" and')
print('"macroeconomic" frameworks, see /FashionVictim/FashionVictimModel.')
|
{
"content_hash": "543c69ae14def198e225c883c3e2978d",
"timestamp": "",
"source": "github",
"line_count": 834,
"max_line_length": 111,
"avg_line_length": 36.460431654676256,
"alnum_prop": 0.5784004209418574,
"repo_name": "ganong123/HARK",
"id": "7d7c56d1d8ee91f8978d64717b6356c7722b4a8a",
"size": "30408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "HARKcore_original.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7728"
},
{
"name": "CSS",
"bytes": "17842"
},
{
"name": "HTML",
"bytes": "2942480"
},
{
"name": "JavaScript",
"bytes": "92350"
},
{
"name": "Makefile",
"bytes": "8056"
},
{
"name": "OpenEdge ABL",
"bytes": "11415"
},
{
"name": "Python",
"bytes": "1559013"
}
],
"symlink_target": ""
}
|
from OpenGLCffi.GL import params
@params(api='gl', prms=['mode'])
def glProvokingVertex(mode):
pass
|
{
"content_hash": "8a00b1b767b607aeebe043b75dc3d030",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 32,
"avg_line_length": 17.166666666666668,
"alnum_prop": 0.7281553398058253,
"repo_name": "cydenix/OpenGLCffi",
"id": "78418d0b7aa90c01fc37c4042e2de47305a1191c",
"size": "103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OpenGLCffi/GL/EXT/ARB/provoking_vertex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1646"
},
{
"name": "C++",
"bytes": "188"
},
{
"name": "Python",
"bytes": "1853617"
}
],
"symlink_target": ""
}
|
from functools import wraps
import logging
logger = logging.getLogger('peewee')
class _QueryLogHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.queries = []
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
# Counts all entries logged to the "peewee" logger by execute_sql().
if record.name == 'peewee':
self.queries.append(record)
class count_queries(object):
def __init__(self, only_select=False):
self.only_select = only_select
self.count = 0
def get_queries(self):
return self._handler.queries
def __enter__(self):
self._handler = _QueryLogHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(self._handler)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
logger.removeHandler(self._handler)
if self.only_select:
self.count = len([q for q in self._handler.queries
if q.msg[0].startswith('SELECT ')])
else:
self.count = len(self._handler.queries)
class assert_query_count(count_queries):
def __init__(self, expected, only_select=False):
super(assert_query_count, self).__init__(only_select=only_select)
self.expected = expected
def __call__(self, f):
@wraps(f)
def decorated(*args, **kwds):
with self:
ret = f(*args, **kwds)
self._assert_count()
return ret
return decorated
def _assert_count(self):
error_msg = '%s != %s' % (self.count, self.expected)
assert self.count == self.expected, error_msg
def __exit__(self, exc_type, exc_val, exc_tb):
super(assert_query_count, self).__exit__(exc_type, exc_val, exc_tb)
self._assert_count()
|
{
"content_hash": "5eba50d2b28a0785c996bc65a3077138",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 76,
"avg_line_length": 28.96875,
"alnum_prop": 0.5792880258899676,
"repo_name": "coleifer/peewee",
"id": "83c1de7da44eac77cdeadba3986d614bad646fdd",
"size": "1854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "playhouse/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9175"
},
{
"name": "Cython",
"bytes": "54358"
},
{
"name": "Python",
"bytes": "1595137"
},
{
"name": "Shell",
"bytes": "1033"
}
],
"symlink_target": ""
}
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 7, transform = "Fisher", sigma = 0.0, exog_count = 100, ar_order = 0);
|
{
"content_hash": "6aa8ba03dba0e1eb7a33b1c0760712f7",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 165,
"avg_line_length": 37.857142857142854,
"alnum_prop": 0.7056603773584905,
"repo_name": "antoinecarme/pyaf",
"id": "f9bd976251b325bb26ba043295ebe9a4cfa7f2be",
"size": "265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_Fisher/trend_MovingMedian/cycle_7/ar_/test_artificial_128_Fisher_MovingMedian_7__100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
"""Routines to help recognizing sound files.
Function whathdr() recognizes various types of sound file headers.
It understands almost all headers that SOX can decode.
The return tuple contains the following items, in this order:
- file type (as SOX understands it)
- sampling rate (0 if unknown or hard to decode)
- number of channels (0 if unknown or hard to decode)
- number of frames in the file (-1 if unknown or hard to decode)
- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
If the file doesn't have a recognizable type, it returns None.
If the file can't be opened, OSError is raised.
To compute the total time, divide the number of frames by the
sampling rate (a frame contains a sample for each channel).
Function what() calls whathdr(). (It used to also use some
heuristics for raw data, but this doesn't work very well.)
Finally, the function test() is a simple main program that calls
what() for all files mentioned on the argument list. For directory
arguments it calls what() for all files in that directory. Default
argument is "." (testing all files in the current directory). The
option -r tells it to recurse down directories found inside
explicitly given directories.
"""
# The file structure is top-down except that the test program and its
# subroutine come last.
__all__ = ['what', 'whathdr']
from collections import namedtuple
SndHeaders = namedtuple('SndHeaders',
'filetype framerate nchannels nframes sampwidth')
SndHeaders.filetype.__doc__ = ("""The value for type indicates the data type
and will be one of the strings 'aifc', 'aiff', 'au','hcom',
'sndr', 'sndt', 'voc', 'wav', '8svx', 'sb', 'ub', or 'ul'.""")
SndHeaders.framerate.__doc__ = ("""The sampling_rate will be either the actual
value or 0 if unknown or difficult to decode.""")
SndHeaders.nchannels.__doc__ = ("""The number of channels or 0 if it cannot be
determined or if the value is difficult to decode.""")
SndHeaders.nframes.__doc__ = ("""The value for frames will be either the number
of frames or -1.""")
SndHeaders.sampwidth.__doc__ = ("""Either the sample size in bits or
'A' for A-LAW or 'U' for u-LAW.""")
def what(filename):
"""Guess the type of a sound file."""
res = whathdr(filename)
return res
def whathdr(filename):
"""Recognize sound headers."""
with open(filename, 'rb') as f:
h = f.read(512)
for tf in tests:
res = tf(h, f)
if res:
return SndHeaders(*res)
return None
#-----------------------------------#
# Subroutines per sound header type #
#-----------------------------------#
tests = []
def test_aifc(h, f):
import aifc
if not h.startswith(b'FORM'):
return None
if h[8:12] == b'AIFC':
fmt = 'aifc'
elif h[8:12] == b'AIFF':
fmt = 'aiff'
else:
return None
f.seek(0)
try:
a = aifc.open(f, 'r')
except (EOFError, aifc.Error):
return None
return (fmt, a.getframerate(), a.getnchannels(),
a.getnframes(), 8 * a.getsampwidth())
tests.append(test_aifc)
def test_au(h, f):
if h.startswith(b'.snd'):
func = get_long_be
elif h[:4] in (b'\0ds.', b'dns.'):
func = get_long_le
else:
return None
filetype = 'au'
hdr_size = func(h[4:8])
data_size = func(h[8:12])
encoding = func(h[12:16])
rate = func(h[16:20])
nchannels = func(h[20:24])
sample_size = 1 # default
if encoding == 1:
sample_bits = 'U'
elif encoding == 2:
sample_bits = 8
elif encoding == 3:
sample_bits = 16
sample_size = 2
else:
sample_bits = '?'
frame_size = sample_size * nchannels
if frame_size:
nframe = data_size / frame_size
else:
nframe = -1
return filetype, rate, nchannels, nframe, sample_bits
tests.append(test_au)
def test_hcom(h, f):
if h[65:69] != b'FSSD' or h[128:132] != b'HCOM':
return None
divisor = get_long_be(h[144:148])
if divisor:
rate = 22050 / divisor
else:
rate = 0
return 'hcom', rate, 1, -1, 8
tests.append(test_hcom)
def test_voc(h, f):
if not h.startswith(b'Creative Voice File\032'):
return None
sbseek = get_short_le(h[20:22])
rate = 0
if 0 <= sbseek < 500 and h[sbseek] == 1:
ratecode = 256 - h[sbseek+4]
if ratecode:
rate = int(1000000.0 / ratecode)
return 'voc', rate, 1, -1, 8
tests.append(test_voc)
def test_wav(h, f):
import wave
# 'RIFF' <len> 'WAVE' 'fmt ' <len>
if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ':
return None
f.seek(0)
try:
w = wave.openfp(f, 'r')
except (EOFError, wave.Error):
return None
return ('wav', w.getframerate(), w.getnchannels(),
w.getnframes(), 8*w.getsampwidth())
tests.append(test_wav)
def test_8svx(h, f):
if not h.startswith(b'FORM') or h[8:12] != b'8SVX':
return None
# Should decode it to get #channels -- assume always 1
return '8svx', 0, 1, 0, 8
tests.append(test_8svx)
def test_sndt(h, f):
if h.startswith(b'SOUND'):
nsamples = get_long_le(h[8:12])
rate = get_short_le(h[20:22])
return 'sndt', rate, 1, nsamples, 8
tests.append(test_sndt)
def test_sndr(h, f):
if h.startswith(b'\0\0'):
rate = get_short_le(h[2:4])
if 4000 <= rate <= 25000:
return 'sndr', rate, 1, -1, 8
tests.append(test_sndr)
#-------------------------------------------#
# Subroutines to extract numbers from bytes #
#-------------------------------------------#
def get_long_be(b):
return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]
def get_long_le(b):
return (b[3] << 24) | (b[2] << 16) | (b[1] << 8) | b[0]
def get_short_be(b):
return (b[0] << 8) | b[1]
def get_short_le(b):
return (b[1] << 8) | b[0]
#--------------------#
# Small test program #
#--------------------#
def test():
import sys
recursive = 0
if sys.argv[1:] and sys.argv[1] == '-r':
del sys.argv[1:2]
recursive = 1
try:
if sys.argv[1:]:
testall(sys.argv[1:], recursive, 1)
else:
testall(['.'], recursive, 1)
except KeyboardInterrupt:
sys.stderr.write('\n[Interrupted]\n')
sys.exit(1)
def testall(list, recursive, toplevel):
import sys
import os
for filename in list:
if os.path.isdir(filename):
print(filename + '/:', end=' ')
if recursive or toplevel:
print('recursing down:')
import glob
names = glob.glob(os.path.join(filename, '*'))
testall(names, recursive, 0)
else:
print('*** directory (use -r) ***')
else:
print(filename + ':', end=' ')
sys.stdout.flush()
try:
print(what(filename))
except OSError:
print('*** not found ***')
if __name__ == '__main__':
test()
|
{
"content_hash": "29fb2a426dfe4e28be2f7fbf4dd16367",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 79,
"avg_line_length": 27.579766536964982,
"alnum_prop": 0.5660270880361173,
"repo_name": "yotchang4s/cafebabepy",
"id": "7ecafb40e821cdf082a2977ab2383ba2a9ac6c62",
"size": "7088",
"binary": false,
"copies": "9",
"ref": "refs/heads/develop",
"path": "src/main/python/sndhdr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ANTLR",
"bytes": "10148"
},
{
"name": "Java",
"bytes": "253479"
}
],
"symlink_target": ""
}
|
import json
import sys
from utils.benchmark import get_value, parse_node_coords, get_matrix
# Generate a json-formatted problem from a CVRPLIB file.
CVRP_FIELDS = [
"NAME",
"TYPE",
"COMMENT",
"DIMENSION",
"EDGE_WEIGHT_TYPE",
"CAPACITY",
"VEHICLES",
]
def parse_cvrp(input_file):
with open(input_file, "r") as f:
lines = f.readlines()
# Remember main fields describing the problem type.
meta = {}
for s in CVRP_FIELDS:
data = get_value(s, lines)
if data:
meta[s] = data
# Only support EUC_2D for now.
if ("EDGE_WEIGHT_TYPE" not in meta) or (meta["EDGE_WEIGHT_TYPE"] != "EUC_2D"):
message = "Unsupported EDGE_WEIGHT_TYPE"
if "EDGE_WEIGHT_TYPE" in meta:
message += ": " + meta["EDGE_WEIGHT_TYPE"]
message += "."
print(message)
exit(0)
meta["DIMENSION"] = int(meta["DIMENSION"])
meta["CAPACITY"] = int(meta["CAPACITY"])
# Find start of nodes descriptions.
node_start = next(
(i for i, s in enumerate(lines) if s.startswith("NODE_COORD_SECTION"))
)
# Defining all jobs.
jobs = []
coords = []
for i in range(node_start + 1, node_start + 1 + meta["DIMENSION"]):
coord_line = parse_node_coords(lines[i])
if len(coord_line) < 3:
# Reaching another section (like DEMAND_SECTION), happens when
# only jobs are listed in NODE_COORD_SECTION but DIMENSION count
# include jobs + depot.
break
coords.append([float(coord_line[1]), float(coord_line[2])])
jobs.append(
{
"id": int(coord_line[0]),
"location": [float(coord_line[1]), float(coord_line[2])],
"location_index": i - node_start - 1,
}
)
# Add all job demands.
total_demand = 0
demand_start = next(
(i for i, s in enumerate(lines) if s.startswith("DEMAND_SECTION"))
)
for i in range(demand_start + 1, demand_start + 1 + meta["DIMENSION"]):
demand_line = parse_node_coords(lines[i])
if len(demand_line) < 2:
# Same as above in job parsing.
break
job_id = int(demand_line[0])
current_demand = int(demand_line[1])
for j in jobs:
# Add demand to relevant job.
if j["id"] == job_id:
j["delivery"] = [current_demand]
total_demand += current_demand
break
# Find depot description.
depot_start = next(
(i for i, s in enumerate(lines) if s.startswith("DEPOT_SECTION"))
)
depot_def = lines[depot_start + 1].strip().split(" ")
if len(depot_def) == 2:
# Depot coordinates are provided, we add them at the end of coords
# list and remember their index.
depot_loc = [float(depot_def[0]), float(depot_def[1])]
depot_index = len(coords)
coords.append(depot_loc)
else:
# Depot is one of the existing jobs, we retrieve loc and index in
# coords, then remove the job.
depot_id = int(depot_def[0])
job_index = next((i for i, j in enumerate(jobs) if j["id"] == depot_id))
depot_loc = jobs[job_index]["location"]
depot_index = jobs[job_index]["location_index"]
jobs.pop(job_index)
matrix = get_matrix(coords)
if "VEHICLES" in meta:
meta["VEHICLES"] = int(meta["VEHICLES"])
nb_vehicles = meta["VEHICLES"]
else:
nb_vehicles = int(1 + (total_demand / meta["CAPACITY"]))
vehicles = []
for i in range(nb_vehicles):
vehicles.append(
{
"id": i,
"start": depot_loc,
"start_index": depot_index,
"end": depot_loc,
"end_index": depot_index,
"capacity": [meta["CAPACITY"]],
}
)
return {
"meta": meta,
"vehicles": vehicles,
"jobs": jobs,
"matrices": {"car": {"durations": matrix}},
}
if __name__ == "__main__":
input_file = sys.argv[1]
output_name = input_file[: input_file.rfind(".vrp")] + ".json"
print("- Writing problem " + input_file + " to " + output_name)
json_input = parse_cvrp(input_file)
with open(output_name, "w") as out:
json.dump(json_input, out)
|
{
"content_hash": "3542a84ce60eb715264aefc16bf15a61",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 82,
"avg_line_length": 29.2,
"alnum_prop": 0.5420091324200913,
"repo_name": "VROOM-Project/vroom-scripts",
"id": "b8c433486fad1864e3949bcf5ff4758fb9047eb5",
"size": "4426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cvrplib_to_json.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "85116"
},
{
"name": "Shell",
"bytes": "8511"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "pyplearnr",
version = "1.0.11.1",
author = "Christopher Shymansky",
author_email = "CMShymansky@gmail.com",
description = ("Pyplearnr is a tool designed to easily and more " \
"elegantly build, validate (nested k-fold cross-validation" \
"), and test scikit-learn pipelines."),
license = "OSI Approved :: Apache Software License",
keywords = "scikit-learn pipeline k-fold cross-validation model selection",
url = "http://packages.python.org/pyplearnr",
packages=['pyplearnr', 'test'],
long_description=read('README.md'),
install_requires=[
'pandas',
'numpy',
'sklearn',
'matplotlib'
],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
)
|
{
"content_hash": "a0c69ddb9f394b6b092573c30cc03d98",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 80,
"avg_line_length": 33.225806451612904,
"alnum_prop": 0.5951456310679611,
"repo_name": "JaggedParadigm/pyplearnr",
"id": "9a1163da04908d78fe547bb493470d421298838e",
"size": "1030",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4315165"
},
{
"name": "Python",
"bytes": "240030"
}
],
"symlink_target": ""
}
|
"""Tests for molecular_data."""
import os
import unittest
import numpy.random
import scipy.linalg
import numpy as np
from openfermion.config import THIS_DIRECTORY, DATA_DIRECTORY
from openfermion.hamiltonians import jellium_model
from openfermion.chem.chemical_series import make_atom
from openfermion.chem.molecular_data import (
name_molecule, angstroms_to_bohr, bohr_to_angstroms,
load_molecular_hamiltonian, MolecularData, MoleculeNameError,
geometry_from_file, MissingCalculationError, periodic_table)
from openfermion.transforms.repconversions import (get_interaction_operator,
get_molecular_data)
from openfermion.utils import count_qubits, Grid
class MolecularDataTest(unittest.TestCase):
def setUp(self):
self.geometry = [('H', (0., 0., 0.)), ('H', (0., 0., 0.7414))]
self.basis = 'sto-3g'
self.multiplicity = 1
self.filename = os.path.join(DATA_DIRECTORY, 'H2_sto-3g_singlet_0.7414')
self.molecule = MolecularData(self.geometry,
self.basis,
self.multiplicity,
filename=self.filename)
self.molecule.load()
def testUnitConversion(self):
"""Test the unit conversion routines"""
unit_angstrom = 1.0
bohr = angstroms_to_bohr(unit_angstrom)
self.assertAlmostEqual(bohr, 1.889726)
inverse_transform = bohr_to_angstroms(bohr)
self.assertAlmostEqual(inverse_transform, 1.0)
def test_name_molecule(self):
charge = 0
correct_name = str('H2_sto-3g_singlet_0.7414')
computed_name = name_molecule(self.geometry,
self.basis,
self.multiplicity,
charge,
description="0.7414")
self.assertEqual(correct_name, computed_name)
self.assertEqual(correct_name, self.molecule.name)
# Check (+) charge
charge = 1
correct_name = "H2_sto-3g_singlet_1+_0.7414"
computed_name = name_molecule(self.geometry,
self.basis,
self.multiplicity,
charge,
description="0.7414")
self.assertEqual(correct_name, computed_name)
# Check > 1 atom type
charge = 0
correct_name = "H1-F1_sto-3g_singlet_1.0"
test_geometry = [('H', (0, 0, 0)), ('F', (0, 0, 1.0))]
computed_name = name_molecule(test_geometry,
self.basis,
self.multiplicity,
charge,
description="1.0")
self.assertEqual(correct_name, computed_name)
# Check errors in naming
with self.assertRaises(TypeError):
test_molecule = MolecularData(self.geometry,
self.basis,
self.multiplicity,
description=5)
correct_name = str('H2_sto-3g_singlet')
test_molecule = self.molecule = MolecularData(
self.geometry,
self.basis,
self.multiplicity,
data_directory=DATA_DIRECTORY)
self.assertSequenceEqual(correct_name, test_molecule.name)
def test_invalid_multiplicity(self):
geometry = [('H', (0., 0., 0.)), ('H', (0., 0., 0.7414))]
basis = 'sto-3g'
multiplicity = -1
with self.assertRaises(MoleculeNameError):
MolecularData(geometry, basis, multiplicity)
def test_geometry_from_file(self):
water_geometry = [('O', (0., 0., 0.)), ('H', (0.757, 0.586, 0.)),
('H', (-.757, 0.586, 0.))]
filename = os.path.join(DATA_DIRECTORY, 'geometry_example.txt')
test_geometry = geometry_from_file(filename)
for atom in range(3):
self.assertAlmostEqual(water_geometry[atom][0],
test_geometry[atom][0])
for coordinate in range(3):
self.assertAlmostEqual(water_geometry[atom][1][coordinate],
test_geometry[atom][1][coordinate])
def test_save_load(self):
n_atoms = self.molecule.n_atoms
orbitals = self.molecule.canonical_orbitals
self.assertFalse(orbitals is None)
self.molecule.n_atoms += 1
self.assertEqual(self.molecule.n_atoms, n_atoms + 1)
self.molecule.load()
self.assertEqual(self.molecule.n_atoms, n_atoms)
dummy_data = self.molecule.get_from_file("dummy_entry")
self.assertTrue(dummy_data is None)
def test_dummy_save(self):
# Make fake molecule.
filename = os.path.join(DATA_DIRECTORY, 'dummy_molecule')
geometry = [('H', (0., 0., 0.)), ('H', (0., 0., 0.7414))]
basis = '6-31g*'
multiplicity = 7
charge = -1
description = 'openfermion_forever'
molecule = MolecularData(geometry, basis, multiplicity, charge,
description, filename)
# Make some attributes to save.
molecule.n_orbitals = 10
molecule.n_qubits = 10
molecule.nuclear_repulsion = -12.3
molecule.hf_energy = 99.
molecule.canonical_orbitals = [1, 2, 3, 4]
molecule.orbital_energies = [5, 6, 7, 8]
molecule.one_body_integrals = [5, 6, 7, 8]
molecule.two_body_integrals = [5, 6, 7, 8]
molecule.mp2_energy = -12.
molecule.cisd_energy = 32.
molecule.cisd_one_rdm = numpy.arange(10)
molecule.cisd_two_rdm = numpy.arange(10)
molecule.fci_energy = 232.
molecule.fci_one_rdm = numpy.arange(11)
molecule.fci_two_rdm = numpy.arange(11)
molecule.ccsd_energy = 88.
molecule.ccsd_single_amps = [1, 2, 3]
molecule.ccsd_double_amps = [1, 2, 3]
molecule.general_calculations['Fake CI'] = 1.2345
molecule.general_calculations['Fake CI 2'] = 5.2345
# Test missing calculation and information exceptions
molecule.one_body_integrals = None
with self.assertRaises(MissingCalculationError):
molecule.get_integrals()
molecule.hf_energy = 99.
with self.assertRaises(ValueError):
molecule.get_active_space_integrals([], [])
molecule.fci_energy = None
with self.assertRaises(MissingCalculationError):
molecule.get_molecular_rdm(use_fci=True)
molecule.fci_energy = 232.
molecule.cisd_energy = None
with self.assertRaises(MissingCalculationError):
molecule.get_molecular_rdm(use_fci=False)
molecule.cisd_energy = 232.
# Save molecule.
molecule.save()
try:
# Change attributes and load.
molecule.ccsd_energy = -2.232
# Load molecule.
new_molecule = MolecularData(filename=filename)
molecule.general_calculations = {}
molecule.load()
self.assertEqual(molecule.general_calculations['Fake CI'], 1.2345)
# Tests re-load functionality
molecule.save()
# Check CCSD energy.
self.assertAlmostEqual(new_molecule.ccsd_energy,
molecule.ccsd_energy)
self.assertAlmostEqual(molecule.ccsd_energy, 88.)
finally:
os.remove(filename + '.hdf5')
def test_file_loads(self):
"""Test different filename specs"""
data_directory = os.path.join(DATA_DIRECTORY)
molecule = MolecularData(self.geometry,
self.basis,
self.multiplicity,
filename=self.filename)
test_hf_energy = molecule.hf_energy
molecule = MolecularData(self.geometry,
self.basis,
self.multiplicity,
filename=self.filename + ".hdf5",
data_directory=data_directory)
self.assertAlmostEqual(test_hf_energy, molecule.hf_energy)
molecule = MolecularData(filename=self.filename + ".hdf5")
integrals = molecule.one_body_integrals
self.assertTrue(integrals is not None)
with self.assertRaises(ValueError):
MolecularData()
def test_active_space(self):
"""Test simple active space truncation features"""
# Start w/ no truncation
core_const, one_body_integrals, two_body_integrals = (
self.molecule.get_active_space_integrals(active_indices=[0, 1]))
self.assertAlmostEqual(core_const, 0.0)
self.assertAlmostEqual(
scipy.linalg.norm(one_body_integrals -
self.molecule.one_body_integrals), 0.0)
self.assertAlmostEqual(
scipy.linalg.norm(two_body_integrals -
self.molecule.two_body_integrals), 0.0)
def test_energies(self):
self.assertAlmostEqual(self.molecule.hf_energy, -1.1167, places=4)
self.assertAlmostEqual(self.molecule.mp2_energy, -1.1299, places=4)
self.assertAlmostEqual(self.molecule.cisd_energy, -1.1373, places=4)
self.assertAlmostEqual(self.molecule.ccsd_energy, -1.1373, places=4)
self.assertAlmostEqual(self.molecule.ccsd_energy, -1.1373, places=4)
def test_rdm_and_rotation(self):
# Compute total energy from RDM.
molecular_hamiltonian = self.molecule.get_molecular_hamiltonian()
molecular_rdm = self.molecule.get_molecular_rdm()
total_energy = molecular_rdm.expectation(molecular_hamiltonian)
self.assertAlmostEqual(total_energy, self.molecule.cisd_energy)
# Build random rotation with correction dimension.
num_spatial_orbitals = self.molecule.n_orbitals
rotation_generator = numpy.random.randn(num_spatial_orbitals,
num_spatial_orbitals)
rotation_matrix = scipy.linalg.expm(rotation_generator -
rotation_generator.T)
# Compute total energy from RDM under some basis set rotation.
molecular_rdm.rotate_basis(rotation_matrix)
molecular_hamiltonian.rotate_basis(rotation_matrix)
total_energy = molecular_rdm.expectation(molecular_hamiltonian)
self.assertAlmostEqual(total_energy, self.molecule.cisd_energy)
def test_get_up_down_electrons(self):
largest_atom = 10
# Test first row
correct_alpha = [0, 1, 1, 2, 2, 3, 4, 5, 5, 5, 5]
correct_beta = [0, 0, 1, 1, 2, 2, 2, 2, 3, 4, 5]
for n_electrons in range(1, largest_atom + 1):
# Make molecule.
basis = 'sto-3g'
atom_name = periodic_table[n_electrons]
molecule = make_atom(atom_name, basis)
# Test.
self.assertAlmostEqual(molecule.get_n_alpha_electrons(),
correct_alpha[n_electrons])
self.assertAlmostEqual(molecule.get_n_beta_electrons(),
correct_beta[n_electrons])
def test_abstract_molecule(self):
"""Test an abstract molecule like jellium for saving and loading"""
jellium_interaction = get_interaction_operator(
jellium_model(Grid(2, 2, 1.0)))
jellium_molecule = get_molecular_data(jellium_interaction,
geometry="Jellium",
basis="PlaneWave22",
multiplicity=1,
n_electrons=4)
jellium_filename = jellium_molecule.filename
jellium_molecule.save()
jellium_molecule.load()
correct_name = "Jellium_PlaneWave22_singlet"
self.assertEqual(jellium_molecule.name, correct_name)
os.remove("{}.hdf5".format(jellium_filename))
def test_load_molecular_hamiltonian(self):
bond_length = 1.45
geometry = [('Li', (0., 0., 0.)), ('H', (0., 0., bond_length))]
lih_hamiltonian = load_molecular_hamiltonian(geometry, 'sto-3g', 1,
format(bond_length), 2, 2)
self.assertEqual(count_qubits(lih_hamiltonian), 4)
lih_hamiltonian = load_molecular_hamiltonian(geometry, 'sto-3g', 1,
format(bond_length), 2, 3)
self.assertEqual(count_qubits(lih_hamiltonian), 6)
lih_hamiltonian = load_molecular_hamiltonian(geometry, 'sto-3g', 1,
format(bond_length), None,
None)
self.assertEqual(count_qubits(lih_hamiltonian), 12)
def test_jk_matr(self):
h2mol = self.molecule
j = h2mol.get_j()
k = h2mol.get_k()
pyscf_j = [[0.67448877, 0.6634681], [0.6634681, 0.69739377]]
pyscf_k = [[0.67448877, 0.18128881], [0.18128881, 0.69739377]]
for p in range(j.shape[0]):
for q in range(j.shape[1]):
self.assertAlmostEqual(j[p][q], pyscf_j[p][q])
self.assertAlmostEqual(k[p][q], pyscf_k[p][q])
ndocc = h2mol.n_electrons // 2
E1bdy = 2 * h2mol.one_body_integrals[:ndocc, :ndocc]
E2bdy = (2 * j[:ndocc, :ndocc]) - k[:ndocc, :ndocc]
E_test = h2mol.nuclear_repulsion + E1bdy + E2bdy
self.assertAlmostEqual(E_test, h2mol.hf_energy)
def test_antisymint(self):
h2mol = self.molecule
antisymm_spin_orb_tei = h2mol.get_antisym()
nocc = h2mol.n_electrons
mol_H = h2mol.get_molecular_hamiltonian()
E1bdy = np.sum(np.diag(mol_H.one_body_tensor[:nocc, :nocc]))
E2bdy = 1/2.*np.einsum('ijij',\
antisymm_spin_orb_tei[:nocc,:nocc,:nocc,:nocc])
E_test = h2mol.nuclear_repulsion + E1bdy + E2bdy
self.assertAlmostEqual(E_test, h2mol.hf_energy)
def test_missing_calcs_for_integrals(self):
geometry = [('H', (0., 0., 0.)), ('H', (0., 0., 0.8764))]
basis = 'sto-3g'
multiplicity = 1
molecule = MolecularData(geometry, basis, multiplicity)
with self.assertRaises(MissingCalculationError):
molecule.get_j()
with self.assertRaises(MissingCalculationError):
molecule.get_k()
with self.assertRaises(MissingCalculationError):
molecule.get_antisym()
|
{
"content_hash": "80389d357f0f19e3960c58d7e8ca9396",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 80,
"avg_line_length": 43.09855072463768,
"alnum_prop": 0.5594861792992132,
"repo_name": "quantumlib/OpenFermion",
"id": "c6776a70a6d592df4b928dc88584ff710e0aab1d",
"size": "15431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/openfermion/chem/molecular_data_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "269"
},
{
"name": "Makefile",
"bytes": "634"
},
{
"name": "Python",
"bytes": "2282654"
},
{
"name": "Shell",
"bytes": "18776"
}
],
"symlink_target": ""
}
|
"""Configuration and hyperparameter sweeps."""
from lra_benchmarks.listops.configs import base_listops_config
def get_config():
"""Get the default hyperparameter configuration."""
config = base_listops_config.get_config()
config.model_type = "sinkhorn"
return config
def get_hyper(hyper):
return hyper.product([])
|
{
"content_hash": "37c37001c37acb7616349baab8b03a5c",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 62,
"avg_line_length": 23.5,
"alnum_prop": 0.7386018237082067,
"repo_name": "google-research/long-range-arena",
"id": "cf5386b30f08631f446757534cd182738ce3a15d",
"size": "902",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lra_benchmarks/listops/configs/sinkhorn_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "507289"
}
],
"symlink_target": ""
}
|
__author__ = 'nathan'
from base import BaseNotifier
class MailNotifier(BaseNotifier):
"""use mail to send a notification"""
def _notify(self):
pass
|
{
"content_hash": "e37e60e27e30755043739836325ecb78",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 41,
"avg_line_length": 16.8,
"alnum_prop": 0.6547619047619048,
"repo_name": "zhoubangtao/dbsync",
"id": "1092c0f4dd45d192ef60b3a06a5caac2b5c7fdd1",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dbsync/notifiers/mail.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "19862"
},
{
"name": "Python",
"bytes": "52903"
}
],
"symlink_target": ""
}
|
from setuptools import setup
try:
import enum # noqa
extra_requires = []
except ImportError:
extra_requires = ['enum34']
REQUIRES = ['marshmallow>=2.0.0'] + extra_requires
with open('README.md', 'r') as f:
readme = f.read()
with open('CHANGELOG', 'r') as f:
changelog = f.read()
if __name__ == '__main__':
setup(
name='marshmallow-enum',
version='1.5.1',
author='Alec Nikolas Reiter',
author_email='alecreiter@gmail.com',
description='Enum field for Marshmallow',
long_description=readme + '\n\n' + changelog,
long_description_content_type="text/markdown",
package_data={'': ['LICENSE', 'README.md', 'CHANGELOG']},
include_package_data=True,
license='MIT',
packages=['marshmallow_enum'],
install_requires=REQUIRES,
)
|
{
"content_hash": "b2e6abee8ad5896c4ebcbc65ea3fb91c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 65,
"avg_line_length": 25.727272727272727,
"alnum_prop": 0.5948174322732627,
"repo_name": "justanr/marshmallow_enum",
"id": "0a4fdd13278f41378681abc4167a7e3789c20db6",
"size": "849",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15024"
}
],
"symlink_target": ""
}
|
from setuptools import setup
from pdfminer import __version__
import sys
requires = ['six', 'pycryptodome']
if sys.version_info >= (3, 0):
requires.append('chardet')
setup(
name='pdfminer.six-mgh',
version=__version__,
install_requires=requires,
description='PDF parser and analyzer',
long_description='''PDFMiner.six-mgh is a tool for extracting information from PDF documents.
Fork using six for Python 2+3 compatibility maintained by the Monumenta Germaniae Historica.
Unlike other PDF-related tools, it focuses entirely on getting
and analyzing text data. PDFMiner allows to obtain
the exact location of texts in a page, as well as
other information such as fonts or lines.
It includes a PDF converter that can transform PDF files
into other text formats (such as HTML). It has an extensible
PDF parser that can be used for other purposes instead of text analysis.''',
license='MIT/X',
author='Yusuke Shinyama + Philippe Guglielmetti',
author_email='pdfminer@goulu.net',
maintainer='Clemens Radl',
maintainer_email='clemens.radl@mgh.de',
url='http://www.mgh.de/soft/pdfminer-six-mgh',
packages=[
'pdfminer',
],
package_data={
'pdfminer': ['cmap/*.pickle.gz']
},
scripts=[
'tools/pdf2txt.py',
'tools/dumppdf.py',
'tools/latin2ascii.py',
],
keywords=['pdf parser', 'pdf converter', 'layout analysis', 'text mining'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Text Processing',
],
)
|
{
"content_hash": "35d2c0a3f7377b377742f192e541e209",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 97,
"avg_line_length": 35.527272727272724,
"alnum_prop": 0.6796315250767656,
"repo_name": "rotula/pdfminer",
"id": "85fb5b5bd05a1f25ae3278ca10330fd2d2c90f90",
"size": "1988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1861"
},
{
"name": "Python",
"bytes": "527345"
}
],
"symlink_target": ""
}
|
'''
Generates a contact list of SciELO journal editors.
'''
import sys
import xlsxwriter
from articlemeta.client import ThriftClient
import models
def journal_date_list():
scielo_fields = [
'ISSN SciELO',
'SciELO collection',
'Publisher country',
'Title',
'Status',
'Creation year',
'Inclusion year at SciELO',
'Stopping year at SciELO',
'Publisher Name',
'URL']
workbook = xlsxwriter.Workbook('output/journal_dates_list.xlsx')
worksheet = workbook.add_worksheet('SciELO')
format_date = workbook.add_format({'num_format': 'dd/mm/yyyy'})
wrap_red = workbook.add_format({'text_wrap': False, 'bg_color': '#DC143C'})
# Header
row = 0
col = 0
for h in scielo_fields:
worksheet.write(0, col, h, wrap_red)
col += 1
row = 1
query = models.Scielo.objects
for doc in query:
col = 0
worksheet.write(row, col, doc.issn_scielo)
col += 1
if 'api' in doc:
worksheet.write(row, col, doc['collection'])
col += 1
worksheet.write(row, col, doc.country)
col += 1
worksheet.write(row, col, doc.title)
col += 1
worksheet.write(row, col, doc.title_current_status)
col += 1
if 'api' in doc:
if 'first_year' in doc['api']:
worksheet.write(row, col, int(doc['api']['first_year']))
col += 1
worksheet.write(row, col, doc.inclusion_year_at_scielo)
col += 1
if 'stopping_year_at_scielo' in doc:
worksheet.write(row, col, doc.stopping_year_at_scielo)
col += 1
worksheet.write(row, col, doc.publisher_name)
col += 1
if 'api' in doc:
if 'url' in doc['api']:
worksheet.write(row, col, doc.api['url'])
col += 1
row += 1
# Grava planilha Excel
try:
workbook.close()
except IOError as e:
print(e)
sys.exit(1)
def main():
journal_date_list()
if __name__ == "__main__":
main()
|
{
"content_hash": "347132f1d107f1c6d8838afa70ec3dd2",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 79,
"avg_line_length": 21.96875,
"alnum_prop": 0.543859649122807,
"repo_name": "scieloorg/journals-catalog",
"id": "f7c2a6557a45de9d3d075cd933bbf28215a5eb33",
"size": "2125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jcatalog/reports/journal_date_list.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "112941"
}
],
"symlink_target": ""
}
|
import re
from io import BytesIO
import openpyxl
import xlrd
from openpyxl import load_workbook
from openpyxl.styles import Font
from openpyxl.utils import get_column_letter
import frappe
from frappe.utils.html_utils import unescape_html
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None, column_widths=None):
column_widths = column_widths or []
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
for i, column_width in enumerate(column_widths):
if column_width:
ws.column_dimensions[get_column_letter(i + 1)].width = column_width
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri', bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, str) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, str) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
from html2text import HTML2Text
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data or '>' not in data:
return data
h = unescape_html(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return data
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_url=None, fcontent=None, filepath=None):
if file_url:
_file = frappe.get_doc("File", {"file_url": file_url})
filename = _file.get_full_path()
elif fcontent:
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
def read_xls_file_from_attached_file(content):
book = xlrd.open_workbook(file_contents=content)
sheets = book.sheets()
sheet = sheets[0]
rows = []
for i in range(sheet.nrows):
rows.append(sheet.row_values(i))
return rows
def build_xlsx_response(data, filename):
xlsx_file = make_xlsx(data, filename)
# write out response as a xlsx type
frappe.response['filename'] = filename + '.xlsx'
frappe.response['filecontent'] = xlsx_file.getvalue()
frappe.response['type'] = 'binary'
|
{
"content_hash": "6ae79c8c0c7b4342dfbe52f96c3e8b0d",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 92,
"avg_line_length": 23.794871794871796,
"alnum_prop": 0.694683908045977,
"repo_name": "mhbu50/frappe",
"id": "38a076212ad2d90a829b2240a74c56032b8090b7",
"size": "2881",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/utils/xlsxutils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "67734"
},
{
"name": "HTML",
"bytes": "247122"
},
{
"name": "JavaScript",
"bytes": "2359670"
},
{
"name": "Less",
"bytes": "25489"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3464477"
},
{
"name": "SCSS",
"bytes": "248877"
},
{
"name": "Shell",
"bytes": "3505"
},
{
"name": "Vue",
"bytes": "96912"
}
],
"symlink_target": ""
}
|
"""
Copyright (C) 2010-2013, Ryan Fan
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
#!/usr/bin/python
from constants import *
import logging
from pyvirtualdisplay import Display
from splinter import Browser
import time
from datetime import datetime
import glob
import os
import json
import re
import tempfile
class KM:
"""
Internal KM related RPC calls
"""
def __init__(self, session):
self.session = session
self.sso_username = None
self.sso_password = None
self.read_cred_info()
if not self.sso_username or not self.sso_password:
logging.error("mod_km initialize failure: Empty SSO username or pasword")
return False
def get_exposed_methods(self):
methods = {
'get_krb_hpactions': self.get_krb_hpactions,
'get_doc_info': self.get_doc_info,
}
return methods
def __get_random_window_id(self):
tf = tempfile.NamedTemporaryFile(delete=True)
return os.path.basename(tf.name)
def __get_doc_access_url(self, doc_id):
view_url = "https://mosemp.us.abc.com/epmos/faces/DocumentDisplay?id=%s" % doc_id
try:
redirect_html = self.session.net.fetch_url(view_url)
with open("/tmp/debug_get_doc_1", 'w') as f:
f.write(redirect_html.encode('utf-8'))
except Exception,e:
logging.error("Failed to get KM document access URL becuase of:%s" % e)
return None
expr = re.compile(r'_addParam\(query\,\s\"_afrLoop\"\,\s\"(\d+)\"\)')
founds = expr.findall(redirect_html)
if founds:
_afrLoop = founds[0]
else:
logging.error("Failed to construct redirect URL because cannot parse _afrLoop")
return None
redirect_url = view_url + "&_afrWindowMode=0&_afrLoop=%s&_afrWindowId=%s" % (_afrLoop, self.__get_random_window_id())
logging.debug("Successfully get KM document access URL")
return redirect_url
def get_doc_info(self, doc_id):
doc_url = self.__get_doc_access_url(doc_id)
if not doc_url:
logging.error("Failed to get document access URL")
return None
doc = {}
try:
html_doc = self.session.net.fetch_url(doc_url)
except Exception,e:
logging.error("Failed to retrieve KM document:%s becuase of:%s" % (doc_id, e))
return None
doc['id'] = doc_id
# extract document full title
expr = re.compile("<label>(.*)\s\(.*\)</label>")
founds = expr.findall(html_doc)
if not founds:
logging.error("Failed to parse document title")
return {}
doc['title'] = founds[0]
#extract document author
expr = re.compile('<td id="pt1:r1:0:cf4">(.*?);</td>')
founds = expr.findall(html_doc)
if not founds:
logging.error("Failed to parse document author")
return {}
doc['author'] = founds[0]
return json.dumps(doc)
def read_cred_info(self):
with open("/root/.sso") as f:
for line in f:
kvset = line.split("=")
if len(kvset) < 2:
logging.error("read_cred_info in mod_km: Invalid credentials in /root/.sso")
return False
(k,v) = kvset
if k == 'SSO_USERNAME':
self.sso_username = v.strip()
elif k == 'SSO_PASSWORD':
self.sso_password = v.strip()
else:
logging.error("read_cred_info in mod_km: Invalid credentials setting in /root/.sso")
return False
def wait_for_element_visibility(self, element):
if element.is_visible():
return element
else:
for i in range(10):
if not element.is_visible():
time.sleep(.5)
else:
return element
def get_krb_hpactions(self):
"""
Get the export file content from Internal KM site by Action Browser,
there is a customized saved query which can be executed automatically when select Infrastructure group
"""
logging.debug("Export HP actions for KM_REQ Bugs...")
start_time = time.time()
URL_ACTION_BROWSER = "https://mosemp.us.abc.com/epmos/faces/secure/mykm/hp/ActionBrowser.jspx"
# the location where to export the excel file
DIR_EXPORT = "/tmp"
# setup DISPLAY for browser using
display = Display(visible=0, size=(1920, 1280))
display.start()
# firefox profile which let download xls file automatically
ffPrefs = {
"browser.download.folderList": 2,
"browser.download.manager.showWhenStarting": False,
"browser.download.dir": DIR_EXPORT,
"browser.helperApps.neverAsk.saveToDisk": "application/vnd.ms-excel",
"app.update.enabled": False,
"app.update.auto": False,
}
browser = Browser('firefox', profile_preferences=ffPrefs)
found = ""
try:
logging.debug("Accessing KM action browser...")
browser.visit(URL_ACTION_BROWSER)
# try to login via sso
logging.debug("Redirected, login via SSO...")
with open("/tmp/debug_login", 'w') as f:
f.write(browser.html.encode('utf-8'))
browser.fill('ssousername', self.sso_username)
browser.fill('password', self.sso_password)
browser.click_link_by_partial_href("javascript:doLogin")
# select "Infrastructure" PLA group
logging.debug("Select 'Infrastructure' group")
with open("/tmp/debug_select", 'w') as f:
f.write(browser.html.encode('utf-8'))
browser.click_link_by_text("Infrastructure")
# Export 1309 and 4455 KM_REQ BUG HP actions as excel file
logging.debug("Click 'Export All' button")
with open("/tmp/debug_export", 'w') as f:
f.write(browser.html.encode('utf-8'))
exportAllBtn = browser.find_by_id('pt1:r1:0:r2:0:pc1:exportAll').first
exportAllBtn.click()
# wait the excel file to be downloaded automatically by firefox
logging.debug("Downloading exported excel file to %s" % DIR_EXPORT)
max_wait = 60
wait = 5
already_wait = 0
while not found and already_wait <= max_wait:
time.sleep(wait)
already_wait += wait
for filename in glob.glob(os.path.join(DIR_EXPORT, "export*.xls")):
if os.path.getmtime(filename) > start_time:
found = filename
break
except Exception, e:
logging.error("Failed to get HPActions because of: %s" % e)
finally:
logging.debug("Stop browser and clear display")
browser.quit()
display.stop()
# return export file contents to caller
ret = ""
if found:
try:
with open(found) as f:
ret = f.read()
os.unlink(found)
except Exception, e:
logging.error("Failed to read and remove excel file because of: %s" % e)
return ret
|
{
"content_hash": "c4b0461ba0c5c033b06fd0e63d90554d",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 126,
"avg_line_length": 34.74786324786325,
"alnum_prop": 0.574468085106383,
"repo_name": "rfancn/myprojects",
"id": "59029f8c7b4af606a92e3ebd47f2490dc11588c9",
"size": "8131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "srportal/spd/rpc_modules/mod_km.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10173"
},
{
"name": "HTML",
"bytes": "78344"
},
{
"name": "JavaScript",
"bytes": "84269"
},
{
"name": "Python",
"bytes": "844644"
},
{
"name": "Shell",
"bytes": "5683"
}
],
"symlink_target": ""
}
|
import logging
import pymel.core as pymel
from maya import OpenMaya
#
# A PyNodeChain is a special pymel-related object that act exactly like a standard array.
# However it allow us to have more bells and whistles.
#
def is_valid_PyNode(val):
return (val and hasattr(val, 'exists') and val.exists()) if val else None
def distance_between_nodes(x, y):
"""
Return the distance between two pynodes.
"""
ax, ay, az = x.getTranslation(space="world")
bx, b, bz = y.getTranslation(space="world")
return ((ax - bx) ** 2 + (ay - b) ** 2 + (az - bz) ** 2) ** 0.5
def distance_between_vectors(a, b):
"""
http://darkvertex.com/wp/2010/06/05/python-distance-between-2-vectors/
"""
return ((a.x - b.x) ** 2 + (a.y - b.y) ** 2 + (a.z - b.z) ** 2) ** 0.5
def is_child_of(node, potential_parent):
while node:
if node == potential_parent:
return True
node = node.getParent()
return False
class PyNodeChain(list):
"""A container for manipulating lists of hosts"""
@property
def start(self):
return next(iter(self), None)
@property
def end(self):
return self[-1] if len(self) > 0 else None
@property
def chain(self):
return self
def duplicate(self):
# Hack - Convert self into list even if self is a list to prevent duplicate self parameter in pymel.duplicate
new_chain = pymel.duplicate(list(self), renameChildren=True, parentOnly=True)
return PyNodeChain(new_chain)
def setParent(self, new_parent, **kwargs):
for node in self:
if node != new_parent and node.getParent() != new_parent:
node.setParent(new_parent, **kwargs)
# todo: convert to property?
def length(self):
length = 0
for i in range(len(self) - 1):
head = self[i]
tail = self[i + 1]
length += distance_between_nodes(head, tail)
return length
# get the first pynode that have the attr
def __getattr__(self, key):
logging.warning("Searching unknow attribute {key} in {self}", key=key, self=self)
first_node = next((node for node in self.__dict__['_list'] if hasattr(node, key)), None)
if first_node is not None:
return getattr(first_node, key)
raise AttributeError
# set all the pynodes that have the attr
def __setattr__(self, key, value):
for node in self:
try:
setattr(node, key, value)
except Exception, e:
logging.error(str(e))
def duplicate_chain(chain):
new_chain = pymel.duplicate(chain, renameChildren=True, parentOnly=True)
return PyNodeChain(new_chain)
def get_num_parents(obj):
num_parents = -1
while obj is not None:
obj = obj.getParent()
num_parents += 1
return num_parents
def get_chains_from_objs(objs):
"""
Take an arbitraty collection of joints and sort them in hyerarchies represented by lists.
"""
chains = []
objs = sorted(objs, key=get_num_parents)
for obj in objs:
parent = obj.getParent()
if parent not in objs:
chains.append([obj])
else:
for chain in chains:
if parent in chain:
chain.append(obj)
return [PyNodeChain(chain) for chain in chains]
def iter_parents(obj):
while obj.getParent() is not None:
obj = obj.getParent()
yield obj
def get_parents(obj):
return list(iter_parents(obj))
'''
parents = []
while obj.getParent() is not None:
parent = obj.getParent()
parents.append(parent)
obj = parent
return parents
'''
def get_common_parents(objs):
"""
Return the first parent that all provided objects share.
:param objs: A list of pymel.PyNode instances.
:return: A pymel.PyNode instance.
"""
parent_sets = set()
for jnt in objs:
parent_set = set(get_parents(jnt))
if not parent_sets:
parent_sets = parent_set
else:
parent_sets &= parent_set
result = next(iter(reversed(sorted(parent_sets, key=get_num_parents))), None)
if result and result in objs:
result = result.getParent()
return result
class Tree(object):
__slots__ = ('val', 'children', 'parent')
def __init__(self, val):
self.val = val
self.children = []
self.parent = None
def append(self, tree):
self.children.append(tree)
tree.parent = self
def __repr__(self):
return '<Tree {0}>'.format(self.val)
def get_tree_from_objs(objs, sort=False):
"""
Sort all provided objects in a tree fashion.
Support missing objects between hierarchy.
Note that tree root value will always be None, representing the root node.
"""
dagpaths = sorted([obj.fullPath() for obj in objs])
root = Tree(None)
def dag_is_child_of(dag_parent, dag_child):
return dag_child.startswith(dag_parent + '|')
last_knot = root
for dagpath in dagpaths:
knot = Tree(dagpath)
# Resolve the new knot parent
p = last_knot
while not (p.val is None or dag_is_child_of(p.val, dagpath)):
p = p.parent
p.append(knot)
# Save the last knot, since we are iterating in alphabetical order,
# we can assume that the next knot parent can be found using this knot.
last_knot = knot
return root
#
# ls() reimplementations
#
def ls(*args, **kwargs):
return PyNodeChain(pymel.ls(*args, **kwargs))
# Wrapper for pymel.ls that return only objects without parents.
def ls_root(*args, **kwargs):
# TODO: Better finding of the root joint
return PyNodeChain(filter(lambda x: x.getParent() is None or type(x.getParent()) != pymel.nt.Joint,
iter(pymel.ls(*args, **kwargs))))
def ls_root_anms(pattern='anm*', **kwargs):
return ls_root(pattern, type='transform', **kwargs)
def ls_root_geos(pattern='geo*', **kwargs):
return ls_root(pattern, type='transform', **kwargs)
def ls_root_rigs(pattern='rig*', **kwargs):
return ls_root(pattern, type='transform', **kwargs)
def ls_root_jnts(pattern='jnt*', **kwargs):
return ls_root(pattern, type='transform', **kwargs)
#
# isinstance() reimplementation
#
# Class check for transform PyNodes
def isinstance_of_transform(obj, cls=pymel.nodetypes.Transform):
return isinstance(obj, cls)
# Class check for shape PyNodes
def isinstance_of_shape(obj, cls=pymel.nodetypes.Shape):
if isinstance(obj, pymel.nodetypes.Transform):
return any((shape for shape in obj.getShapes() if isinstance(shape, cls)))
elif isinstance(obj, pymel.nodetypes.Shape):
return isinstance(obj, cls)
def create_zero_grp(obj):
zero_grp = pymel.createNode('transform')
new_name = obj.name() + '_' + 'zero_grp'
zero_grp.rename(new_name)
# Note: Removed for performance
zero_grp.setMatrix(obj.getMatrix(worldSpace=True))
parent = obj.getParent()
if parent:
zero_grp.setParent(parent)
obj.setParent(zero_grp)
return zero_grp
def zero_out_objs(objs):
for o in objs:
create_zero_grp(o)
#
# pymel.datatypes extensions.
#
class Segment(object):
"""
In Maya there's no class to represent a segment.
This is the pymel.datatypes.Segment I've always wanted.
"""
def __init__(self, pos_s, pos_e):
self.pos_s = pos_s
self.pos_e = pos_e
# self.pos_s = numpy.array(pos_s.x, pos_s.y, pos_s.z)
# self.pos_e = numpy.array(pos_e.x, pos_e.y, pos_e.z)
def closest_point(self, p):
"""
http://stackoverflow.com/questions/3120357/get-closest-point-to-a-line
"""
a = self.pos_s
b = self.pos_e
a_to_p = p - a
a_to_b = b - a
ab_length = a_to_b.length()
ap_length = a_to_p.length()
a_to_p_norm = a_to_p.normal()
a_to_b_norm = a_to_b.normal()
atp_dot_atb = a_to_p_norm * (a_to_b_norm) # dot product
dist_norm = atp_dot_atb * ap_length / ab_length
return pymel.datatypes.Vector(
a.x + a_to_b.x * dist_norm,
a.y + a_to_b.y * dist_norm,
a.z + a_to_b.z * dist_norm
)
def closest_point_normalized_distance(self, p, epsilon=0.001):
"""
Same things as .closest_point but only return the distance relative from the length of a to b.
Available for optimisation purpose.
"""
a = self.pos_s
b = self.pos_e
a_to_p = p - a
a_to_b = b - a
ab_length = a_to_b.length()
ap_length = a_to_p.length()
a_to_p_norm = a_to_p.normal()
a_to_b_norm = a_to_b.normal()
atp_dot_atb = a_to_p_norm * a_to_b_norm
return (atp_dot_atb * ap_length / ab_length) if abs(ab_length) > epsilon else 0.0
class SegmentCollection(object):
def __init__(self, segments=None):
if segments is None:
segments = []
self.segments = segments
self.knots = [segment.pos_s for segment in self.segments]
self.knots.append(self.segments[-1].pos_e)
def closest_segment(self, pos):
bound_min = -0.000000000001 # Damn float imprecision
bound_max = 1.0000000000001 # Damn float imprecision
num_segments = len(self.segments)
for i, segment in enumerate(self.segments):
distance_normalized = segment.closest_point_normalized_distance(pos)
if bound_min <= distance_normalized <= bound_max:
return segment, distance_normalized
elif i == 0 and distance_normalized < bound_min: # Handle out-of-bound
return segment, 0.0
elif i == (num_segments - 1) and distance_normalized > bound_max: # Handle out-of-bound
return segment, 1.0
raise Exception("Can't resolve segment for {0}".format(pos))
def closest_segment_index(self, pos):
closest_segment, ratio = self.closest_segment(pos)
index = self.segments.index(closest_segment)
return index, ratio
def get_knot_weights(self, dropoff=1.0, normalize=True):
num_knots = len(self.knots)
knots_weights = []
for i, knot in enumerate(self.knots):
if i == 0:
weights = [0] * num_knots
weights[0] = 1.0
elif i == (num_knots - 1):
weights = [0] * num_knots
weights[-1] = 1.0
else:
weights = []
total_weight = 0.0
for j in range(num_knots):
distance = abs(j - i)
weight = max(0, 1.0 - (distance / dropoff))
total_weight += weight
weights.append(weight)
weights = [weight / total_weight for weight in weights]
knots_weights.append(weights)
return knots_weights
'''
def get_weights(self, pos, dropoff=1.0, normalize=True):
# Compute the 'SegmentCollection' relative ratio and return the weight for each knots.
closest_segment, relative_ratio = self.closest_segment(pos)
index = self.segments.index(closest_segment)
absolute_ratio = relative_ratio + index
weights = []
total_weights = 0.0
for segment_ratio in range(len(self.knots)):
#segment_ratio += 0.5 # center of the joint
#print segment_ratio, absolute_ratio
distance = abs(segment_ratio - absolute_ratio)
weight = max(0, 1.0-(distance/dropoff))
# Apply cubic interpolation for greater results.
#weight = interp_cubic(weight)
total_weights += weight
weights.append(weight)
if normalize:
weights = [weight / total_weights for weight in weights]
return weights
'''
@classmethod
def from_transforms(cls, objs):
segments = []
num_objs = len(objs)
for i in range(num_objs - 1):
obj_s = objs[i]
obj_e = objs[i + 1]
mfn_transform_s = obj_s.__apimfn__()
mfn_transform_e = obj_e.__apimfn__()
pos_s = OpenMaya.MVector(mfn_transform_s.getTranslation(OpenMaya.MSpace.kWorld))
pos_e = OpenMaya.MVector(mfn_transform_e.getTranslation(OpenMaya.MSpace.kWorld))
segment = Segment(pos_s, pos_e)
segments.append(segment)
return cls(segments)
@classmethod
def from_positions(cls, positions):
segments = []
num_positions = len(positions)
for i in range(num_positions - 1):
pos_s = positions[i]
pos_e = positions[i + 1]
segment = Segment(pos_s, pos_e)
segments.append(segment)
return cls(segments)
def get_rotation_from_matrix(tm):
"""
Bypass pymel bug
see https://github.com/LumaPictures/pymel/issues/355
"""
return pymel.datatypes.TransformationMatrix(tm).rotate
def makeIdentity_safe(obj, translate=False, rotate=False, scale=False, apply=False, **kwargs):
"""
Extended pymel.makeIdentity method that won't crash for idiotic reasons.
"""
from . import libAttr
affected_attrs = []
# Ensure the shape don't have any extra transformation.
if apply:
if translate:
libAttr.unlock_translation(obj)
affected_attrs.extend([
obj.translate, obj.translateX, obj.translateY, obj.translateZ
])
if rotate:
libAttr.unlock_rotation(obj)
affected_attrs.extend([
obj.rotate, obj.rotateX, obj.rotateY, obj.rotateZ
])
if scale:
libAttr.unlock_scale(obj)
affected_attrs.extend([
obj.scale, obj.scaleX, obj.scaleY, obj.scaleZ
])
# Make identify will faile if attributes are connected...
with libAttr.context_disconnected_attrs(affected_attrs, hold_inputs=True, hold_outputs=False):
pymel.makeIdentity(obj, apply=apply, translate=translate, rotate=rotate, scale=scale, **kwargs)
|
{
"content_hash": "69c7f85db7872776d5dee298e82ae689",
"timestamp": "",
"source": "github",
"line_count": 472,
"max_line_length": 117,
"avg_line_length": 30.171610169491526,
"alnum_prop": 0.5911803946352082,
"repo_name": "SqueezeStudioAnimation/omtk",
"id": "2f2ffe66bca521e336fc34cf94ea72ef3362ec4a",
"size": "14241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/omtk/libs/libPymel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mathematica",
"bytes": "1124321"
},
{
"name": "Python",
"bytes": "1054644"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
}
|
'''Unit tests for the examples in the examples dir.'''
from bayesian.factor_graph import build_graph
from bayesian.examples.factor_graphs.cancer import fP, fS, fC, fX, fD
'''
Since one of the goals of this package
are to have many working examples its
very important that the examples work
correctly "out of the box".
Please add unit tests for all examples
and give references to their sources.
Note that the identical graph also
appears in test_graph where many more
lower level tests are run. These tests
however import the code directly from
the examples directory.
'''
def pytest_funcarg__cancer_graph(request):
g = build_graph(
fP, fS, fC, fX, fD,
domains={
'P': ['low', 'high']})
return g
class TestCancerGraph():
'''
See table 2.2 of BAI_Chapter2.pdf
For verification of results.
(Note typo in some values)
'''
def test_no_evidence(self, cancer_graph):
'''Column 2 of upper half of table'''
result = cancer_graph.query()
assert round(result[('P', 'high')], 3) == 0.1
assert round(result[('P', 'low')], 3) == 0.9
assert round(result[('S', True)], 3) == 0.3
assert round(result[('S', False)], 3) == 0.7
assert round(result[('C', True)], 3) == 0.012
assert round(result[('C', False)], 3) == 0.988
assert round(result[('X', True)], 3) == 0.208
assert round(result[('X', False)], 3) == 0.792
assert round(result[('D', True)], 3) == 0.304
assert round(result[('D', False)], 3) == 0.696
def test_D_True(self, cancer_graph):
'''Column 3 of upper half of table'''
result = cancer_graph.query(D=True)
assert round(result[('P', 'high')], 3) == 0.102
assert round(result[('P', 'low')], 3) == 0.898
assert round(result[('S', True)], 3) == 0.307
assert round(result[('S', False)], 3) == 0.693
assert round(result[('C', True)], 3) == 0.025
assert round(result[('C', False)], 3) == 0.975
assert round(result[('X', True)], 3) == 0.217
assert round(result[('X', False)], 3) == 0.783
assert round(result[('D', True)], 3) == 1
assert round(result[('D', False)], 3) == 0
def test_S_True(self, cancer_graph):
'''Column 4 of upper half of table'''
result = cancer_graph.query(S=True)
assert round(result[('P', 'high')], 3) == 0.1
assert round(result[('P', 'low')], 3) == 0.9
assert round(result[('S', True)], 3) == 1
assert round(result[('S', False)], 3) == 0
assert round(result[('C', True)], 3) == 0.032
assert round(result[('C', False)], 3) == 0.968
assert round(result[('X', True)], 3) == 0.222
assert round(result[('X', False)], 3) == 0.778
assert round(result[('D', True)], 3) == 0.311
assert round(result[('D', False)], 3) == 0.689
def test_C_True(self, cancer_graph):
'''Column 5 of upper half of table'''
result = cancer_graph.query(C=True)
assert round(result[('P', 'high')], 3) == 0.249
assert round(result[('P', 'low')], 3) == 0.751
assert round(result[('S', True)], 3) == 0.825
assert round(result[('S', False)], 3) == 0.175
assert round(result[('C', True)], 3) == 1
assert round(result[('C', False)], 3) == 0
assert round(result[('X', True)], 3) == 0.9
assert round(result[('X', False)], 3) == 0.1
assert round(result[('D', True)], 3) == 0.650
assert round(result[('D', False)], 3) == 0.350
def test_C_True_S_True(self, cancer_graph):
'''Column 6 of upper half of table'''
result = cancer_graph.query(C=True, S=True)
assert round(result[('P', 'high')], 3) == 0.156
assert round(result[('P', 'low')], 3) == 0.844
assert round(result[('S', True)], 3) == 1
assert round(result[('S', False)], 3) == 0
assert round(result[('C', True)], 3) == 1
assert round(result[('C', False)], 3) == 0
assert round(result[('X', True)], 3) == 0.9
assert round(result[('X', False)], 3) == 0.1
assert round(result[('D', True)], 3) == 0.650
assert round(result[('D', False)], 3) == 0.350
def test_D_True_S_True(self, cancer_graph):
'''Column 7 of upper half of table'''
result = cancer_graph.query(D=True, S=True)
assert round(result[('P', 'high')], 3) == 0.102
assert round(result[('P', 'low')], 3) == 0.898
assert round(result[('S', True)], 3) == 1
assert round(result[('S', False)], 3) == 0
assert round(result[('C', True)], 3) == 0.067
assert round(result[('C', False)], 3) == 0.933
assert round(result[('X', True)], 3) == 0.247
assert round(result[('X', False)], 3) == 0.753
assert round(result[('D', True)], 3) == 1
assert round(result[('D', False)], 3) == 0
|
{
"content_hash": "a54e425b3c05b46a2d3e03e9bc2011ee",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 69,
"avg_line_length": 40.90833333333333,
"alnum_prop": 0.5477694031370951,
"repo_name": "kamijawa/ogc_server",
"id": "c0617adff70eda1403866d9363b661c54ea5ac41",
"size": "4909",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bayesian/test/test_examples.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2980"
},
{
"name": "Python",
"bytes": "2338027"
},
{
"name": "Shell",
"bytes": "705"
}
],
"symlink_target": ""
}
|
import csv
import itertools
import argparse
import importlib
import sys
from csvgroup.common import dump_as_csv
def cast_to_type(type, values):
return [type(value) for value in values if value]
def aggregate(function, values, type):
cast_values = cast_to_type(type, values)
return function(cast_values) if cast_values else None
def get_values(column, items):
return [item.get(column) for item in items]
def process(items, group, columns, function, type):
def key(item):
return [item.get(column) for column in group]
def groups(items, key):
return (
(group_key, list(generator))
for group_key, generator in itertools.groupby(
sorted(items, key=key),
key
)
)
fieldnames = group + columns
yield fieldnames
for group_key, group_items in groups(items, key):
yield (
group_key + [
aggregate(
function,
get_values(column, group_items),
type
)
for column in columns
]
)
def arguments():
parser = argparse.ArgumentParser(
description="Perform group-by aggregation on CSV files"
)
parser.add_argument(
"--group",
"-g",
help="list of columns to group by, separated by comma")
parser.add_argument(
"--import-module",
"-i",
help="module to import for the operation")
parser.add_argument(
"--type",
"-t",
default="float",
help="type of columns to aggregate (default: float)")
parser.add_argument(
"columns",
help="list of columns to perform aggregation on, separated by comma")
parser.add_argument(
"function",
help=("Python function that performs aggregation (e.g., 'sum' "
+ "or 'lambda v: 1. * sum(v) / len(v)')"))
return parser.parse_args()
def main():
args = arguments()
environment = dict()
if args.import_module:
module = importlib.import_module(args.import_module)
environment[module.__name__] = module
group = args.group.split(",") if args.group else []
columns = args.columns.split(",")
function = eval(args.function, environment)
type = eval(args.type, environment)
dump_as_csv(
process(
csv.DictReader(sys.stdin),
group,
columns,
function,
type
)
)
if __name__ == "__main__":
main()
|
{
"content_hash": "23aa848938b0176496556f6afac2cdda",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 77,
"avg_line_length": 23.88888888888889,
"alnum_prop": 0.5608527131782945,
"repo_name": "ceumicrodata/csvgroup",
"id": "0b6faf0056fb4d2eaf4cdf442668af90bf84eb0b",
"size": "2580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "csvgroup/aggregate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9002"
}
],
"symlink_target": ""
}
|
def recover_message(input_string):
message = ""
lines = []
for line in input_string.split("\n"):
if line.strip() != "":
lines.append(line)
chars = zip(*lines)
for char_list in chars:
counts = {}
for char in char_list:
if char not in counts:
counts[char] = 1
else:
counts[char] += 1
message += sorted(counts.items(), key=lambda a : a[1], reverse=True)[0][0]
return message
if __name__=="__main__":
input_string = open("input.txt", "r").read()
print recover_message(input_string)
|
{
"content_hash": "76a8996d6ad8b7db858ad158ca60072f",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 82,
"avg_line_length": 21.517241379310345,
"alnum_prop": 0.5112179487179487,
"repo_name": "mnestis/advent2016",
"id": "1dce15206aa5ad9afa7764815d3be9917986fe00",
"size": "643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "06/part1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18631"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
#
# Import modules
#
import HTMLParser
import base64
import getpass
import optparse
import os
import httplib
import urllib
import urllib2
import sys
import time
class CalientController(object):
"""Abstract interface for all Calient Switch Controls"""
def __init__(self):
"""Initialization"""
self._conns = None
def showConnections(self):
"""Print all of the available connections to the screen"""
print('Port \t Name \t XConnect')
print('---- \t --------------- \t --------')
self._conns = self._getConnections()
for c in self._conns:
(port, group, power, circuit, name) = c
print(port, '\t', name, '\t', circuit)
def showCrossConnections(self):
"""Print all of the available connections to the screen"""
print('Cross Connections')
print('-----------------')
self._conns = self._getConnections()
for c in self._conns:
(port, group, power, circuit, name) = c
if circuit:
print(circuit)
def refreshCrossConnections(self):
"""Refresh cross connections"""
if self._conns is None:
self._conns = self._getConnections()
# Find the ports for each circuit
circuitPorts = {}
conns = self._getConnections()
for c in conns:
(port, group, power, circuit, name) = c
if circuit:
if circuit in circuitPorts:
circuitPorts[circuit].append(port)
else:
circuitPorts[circuit] = [port]
# Refresh each identified circuit
for circ, ports in circuitPorts.items():
print('Refreshing XConnect:', circ, circuitPorts[circ])
self._refreshCrossConnect(ports[0], ports[1])
def crossConnect(self, conn1, conn2):
"""Cross connect conn1 and conn2"""
if self._conns is None:
self._conns = self._getConnections()
(port1, _, _, circuit1, name1) = self.getConnection(conn1)
(port2, _, _, circuit2, name2) = self.getConnection(conn2)
if circuit1 or circuit2:
print('ERROR: Connection already exists')
else:
circuit = name1 + '-' + name2
print('Adding circuit', circuit)
self._addCrossConnect(name1, name2, circuit)
def deleteCrossConnect(self, conn):
"""Cross connect conn1 and conn2"""
if self._conns is None:
self._conns = self._getConnections()
# Get the first port
(port1, _, _, circuit1, name1) = self.getConnection(conn)
if not circuit1:
print('ERROR: No XConnect exists for', port1)
else:
# Get the second port
(port1, port2) = self.getPorts(circuit1)
self._removeCrossConnect(port1, port2)
def getConnection(self, conn):
"""
Lookup port by portId, shorthand, portname, or circuit. Shorthand
means that instead of typing 1.1.3, the user can simply supply 113.
"""
if self._conns is None:
self._conns = self._getConnections()
# First loopthru by port id and shorthand
for c in self._conns:
(portId, _, _, _, _) = c
shorthand = portId.translate(None, '.')
if conn == portId or conn == shorthand:
return c
# Search by port name
for c in self._conns:
(_, _, _, _, name) = c
if conn == name:
return c
# Search by circuit name
for c in self._conns:
(_, _, _, circuit, _) = c
if conn == circuit:
return c
return None
def getPorts(self, circuit):
"""@return tuple with both ports for the circuit"""
if self._conns is None:
self._conns = self._getConnections()
# Find the ports for each circuit
circuitPorts = {}
conns = self._getConnections()
for c in conns:
(port, group, power, circuit, name) = c
if circuit:
if circuit in circuitPorts:
circuitPorts[circuit].append(port)
else:
circuitPorts[circuit] = [port]
# Return the requested circuit ports
return (circuitPorts[circuit][0], circuitPorts[circuit][0])
def _getConnections(self):
"""@return map of port, connection name pairs"""
print('ERROR: _getConnections not provided')
return None
def _addCrossConnect(self, portName1, portName2, circuitName):
"""Cross connect ports on the calient"""
print('ERROR: _addCrossConnect not provided')
def _refreshCrossConnect(self, port1, port2):
"""Cross connect ports on the calient"""
print('ERROR: _refreshCrossConnect not provided')
def _removeCrossConnect(self, port1, port2):
"""Cross connect ports on the calient"""
print('ERROR: _removeCrossConnect not provided')
class CalientHTTPController(CalientController):
"""Controller for Calient Optical Switch"""
def __init__(self, host, user, password):
CalientController.__init__(self)
self._host = host
self._auth = base64.encodestring(user + ':' + password)
self._sessionId = None
self._renewSession()
def _renewSession(self):
"""@return the sessionid for the renewed session"""
if self._sessionId is None:
cookie = None
while cookie is None:
try:
# Request a session cookie
url = 'http://' + self._host + '/index.html'
request = urllib2.Request(url)
request.add_header('User-Agent', 'calientctl')
request.add_header('Connection', 'keep-alive')
request.add_header('Authorization', 'Basic ' + self._auth)
request.add_header('Cookie', 'sessionid=invalid')
r = urllib2.urlopen(request)
cookie = r.headers['set-cookie']
except urllib2.HTTPError:
print('ERROR: Check user/password for', self._host)
pass
if cookie is None:
print('Unable to establish session, trying again in 2 secs')
time.sleep(2)
self._sessionId = cookie
return self._sessionId
def _getConnections(self):
"""@return map of connection name/port number pairs"""
# Make sure there is a valid session
self._renewSession()
# Build the HTTP request header and parameters
url = 'http://%s/port/portSummaryTable.html?shelf=1' % self._host
request = urllib2.Request(url)
request.add_header('User-Agent', 'calientctl')
request.add_header('Connection', 'Keep-Alive')
request.add_header('Authorization', 'Basic ' + self._auth)
request.add_header('Cookie', self._sessionId)
r = urllib2.urlopen(request)
data = r.read()
# Parse the HTML to get the port map
parser = CalientPortTableParser()
parser.feed(data)
return parser.getConnections()
def _addCrossConnect(self, portName1, portName2, circuitName):
"""Cross connect ports on the calient"""
# Make sure there is a valid session
self._renewSession()
# Build the HTTP request header and parameters
postData = {'groupName2' : 'SYSTEM',
'connName2' : circuitName,
'direction2' : 'Bi',
'lightBand2' : 'W',
'from2' : portName1,
'to2' : portName2,
'defDirection2' : 'Bi',
'autoFocusOnOff' : 'AFEnabled',
'noLightConnOnOff' : 'NLCDisabled'}
url = 'http://%s/xConnects/xConnectsAddConnForm.html/bogusAction' % self._host
data = urllib.urlencode(postData)
request = urllib2.Request(url, data)
request.add_header('User-Agent', 'calientctl')
request.add_header('Connection', 'Keep-Alive')
request.add_header('Authorization', 'Basic ' + self._auth)
request.add_header('Cookie', self._sessionId)
r = urllib2.urlopen(request)
if 200 != r.getcode():
print('ERROR cross connecting', portName1, portName2)
pass
def _removeCrossConnect(self, port1, port2):
"""Cross connect ports on the calient"""
# Make sure there is a valid session
self._renewSession()
# Build the HTTP request header and parameters
params = 'xgroup=SYSTEM&xconnect=%s-%s&actionType=Retry' % conn1, conn2
url = 'http://%s/xConnects/processConnection.html' % self._host
url += '?' + params
request = urllib2.Request(url)
request.add_header('User-Agent', 'calientctl')
request.add_header('Connection', 'Keep-Alive')
request.add_header('Authorization', 'Basic ' + self._auth)
request.add_header('Cookie', self._sessionId)
r = urllib2.urlopen(request)
if 200 != r.getcode():
print('ERROR removing cross connect:', port1, port2)
def _refreshCrossConnect(self, port1, port2):
"""Cross connect ports on the calient"""
# Make sure there is a valid session
self._renewSession()
# Build the HTTP request header and parameters
xconnect = port1 + '-' + port2
params = 'xgroup=SYSTEM&xconnect=' + xconnect + '&actionType=Retry'
url = 'http://%s/xConnects/processConnection.html' % self._host
url += '?' + params
request = urllib2.Request(url)
request.add_header('User-Agent', 'calientctl')
request.add_header('Connection', 'Keep-Alive')
request.add_header('Authorization', 'Basic ' + self._auth)
request.add_header('Cookie', self._sessionId)
r = urllib2.urlopen(request)
if 200 != r.getcode():
print('ERROR cross connecting', port1, port2)
class CalientPortTableParser(HTMLParser.HTMLParser):
def __init__(self):
"""Initialize the maps"""
HTMLParser.HTMLParser.__init__(self)
self._connections = []
def getConnections(self):
"""
@return a list of connection tuples
the tuple contains (port, group, power, circuit, name)
"""
return self._connections
def handle_data(self, data):
"""Port descriptions are in the data stream"""
port = ''
group = ''
power = ''
circuit = ''
name = ''
for line in data.splitlines():
if -1 != line.find('PortDesc ='):
fields = line.split('"')
port = fields[1]
elif -1 != line.find('PortGroup ='):
fields = line.split('"')
group = fields[1]
elif -1 != line.find('\tinPwrVal ='):
fields = line.split('"')
power = fields[1]
elif -1 != line.find('CircuitID ='):
fields = line.split('"')
circuit = fields[1]
elif -1 != line.find('PortName ='):
fields = line.split('"')
name = fields[1]
elif -1 != line.find('new portsummary'):
conn = (port, group, power, circuit, name)
self._connections.append(conn)
def createOptionsParser():
"""Create an option parser for calientctl"""
usage='usage: calientctl.py [Options] <host>'
parser = optparse.OptionParser(usage=usage)
parser.add_option('-d', '--delete', dest='delete',
action='store', type='string', metavar='conn',
help='Cross connection to delete (port, name, shorthand, or circuit)')
parser.add_option('-p', '--password', dest='password', default='',
action='store', type='string', metavar='pw',
help='Calient access password')
parser.add_option('-r', '--refresh', dest='refresh', default=False,
action='store_true',
help='Refresh all cross connections')
parser.add_option('-u', '--user', dest='user', default=os.getlogin(),
action='store', type='string', metavar='user',
help='Calient access password')
parser.add_option('-x', '--xconnect', dest='xc',
action='store', type='string', metavar='conn,conn',
help='Pair of connections (port, name, or shorthand) to cross connect')
return parser
def control_calient():
"""Refresh, set or show the Calient cross connections"""
parser = createOptionsParser()
(opts, args) = parser.parse_args()
if 1 > len(args):
print('ERROR: calient hostname is required')
parser.print_usage()
return 1
# For each host, perform the commands
for host in args:
ctl = CalientHTTPController(host, opts.user, opts.password)
if opts.refresh:
ctl.refreshCrossConnections()
if opts.xc:
c1,c2 = opts.xc.split(',')
ctl.crossConnect(c1, c2)
if opts.delete:
ctl.deleteCrossConnect(opts.delete)
if not opts.refresh and not opts.xc:
ctl.showConnections()
else:
ctl.showCrossConnections()
def main():
"""Catch outermost exceptions"""
try:
rc = control_calient()
except KeyboardInterrupt:
print('Cancelled by user')
rc = 1
raise
return rc
if __name__ == '__main__':
rc = main()
sys.exit(rc)
|
{
"content_hash": "1d935b996b728c91ec03eed3a37e63af",
"timestamp": "",
"source": "github",
"line_count": 379,
"max_line_length": 93,
"avg_line_length": 36.699208443271765,
"alnum_prop": 0.5530951182687468,
"repo_name": "eunsungc/gt6-RAMSES_8_5",
"id": "1e0bda227acd9a41f095894ffbe7541dc4f40dcc",
"size": "14062",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "xdd-7.0.0.rc-ramses3/contrib/calientctl.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "8555"
},
{
"name": "C",
"bytes": "21601985"
},
{
"name": "C++",
"bytes": "219725"
},
{
"name": "CSS",
"bytes": "3348"
},
{
"name": "Groff",
"bytes": "2150691"
},
{
"name": "HTML",
"bytes": "55140"
},
{
"name": "KRL",
"bytes": "4950"
},
{
"name": "Lex",
"bytes": "18165"
},
{
"name": "M4",
"bytes": "453378"
},
{
"name": "Makefile",
"bytes": "5063950"
},
{
"name": "Objective-C",
"bytes": "212361"
},
{
"name": "Perl",
"bytes": "795825"
},
{
"name": "Python",
"bytes": "246664"
},
{
"name": "Shell",
"bytes": "20472031"
},
{
"name": "XSLT",
"bytes": "13587"
},
{
"name": "Yacc",
"bytes": "23583"
}
],
"symlink_target": ""
}
|
import logging
from userreport.models import UserReport
from django.http import HttpResponse
from django.views.decorators.cache import cache_page
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.dates import DateFormatter
import matplotlib.artist
LOG = logging.getLogger(__name__)
@cache_page(60 * 120)
def report_user_count(request):
reports = UserReport.objects.order_by('upload_date')
users_by_date = {}
for report in reports:
t = report.upload_date.date() # group by day
users_by_date.setdefault(t, set()).add(report.user_id_hash)
seen_users = set()
data_scatter = ([], [], [])
for date, users in sorted(users_by_date.items()):
data_scatter[0].append(date)
data_scatter[1].append(len(users))
data_scatter[2].append(len(users - seen_users))
seen_users |= users
fig = Figure(figsize=(12, 6))
ax = fig.add_subplot(111)
fig.subplots_adjust(left=0.08, right=0.95, top=0.95, bottom=0.2)
ax.plot(data_scatter[0], data_scatter[1], marker='o')
ax.plot(data_scatter[0], data_scatter[2], marker='o')
ax.legend(('Total users', 'New users'), 'upper left', frameon=False)
matplotlib.artist.setp(ax.get_legend().get_texts(), fontsize='small')
ax.set_ylabel('Number of users per day')
for label in ax.get_xticklabels():
label.set_rotation(90)
label.set_fontsize(9)
ax.xaxis.set_major_formatter(DateFormatter('%d-%m-%Y'))
canvas = FigureCanvas(fig)
response = HttpResponse(content_type='image/png')
try:
canvas.print_png(response, dpi=80)
except ValueError:
LOG.warning('On displaying usercount data(possible empty stats)')
return HttpResponse('<h1>Warning: No stats data available</h1>')
return response
|
{
"content_hash": "86ca1b8f22c630acbc8b6487570cb231",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 75,
"avg_line_length": 31.033333333333335,
"alnum_prop": 0.6750805585392051,
"repo_name": "leyyin/stk-stats",
"id": "7ab8fa0f87945b7066cf71096d13d9f00869883e",
"size": "1862",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "userreport/views/usercount.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "26181"
},
{
"name": "Python",
"bytes": "65543"
}
],
"symlink_target": ""
}
|
"""
Tests for the pandas.io.common functionalities
"""
import mmap
import pytest
import os
from os.path import isabs
import pandas as pd
import pandas.util.testing as tm
import pandas.util._test_decorators as td
from pandas.io import common
from pandas.compat import is_platform_windows, StringIO, FileNotFoundError
from pandas import read_csv, concat
class CustomFSPath(object):
"""For testing fspath on unknown objects"""
def __init__(self, path):
self.path = path
def __fspath__(self):
return self.path
# Functions that consume a string path and return a string or path-like object
path_types = [str, CustomFSPath]
try:
from pathlib import Path
path_types.append(Path)
except ImportError:
pass
try:
from py.path import local as LocalPath
path_types.append(LocalPath)
except ImportError:
pass
HERE = os.path.abspath(os.path.dirname(__file__))
class TestCommonIOCapabilities(object):
data1 = """index,A,B,C,D
foo,2,3,4,5
bar,7,8,9,10
baz,12,13,14,15
qux,12,13,14,15
foo2,12,13,14,15
bar2,12,13,14,15
"""
def test_expand_user(self):
filename = '~/sometest'
expanded_name = common._expand_user(filename)
assert expanded_name != filename
assert isabs(expanded_name)
assert os.path.expanduser(filename) == expanded_name
def test_expand_user_normal_path(self):
filename = '/somefolder/sometest'
expanded_name = common._expand_user(filename)
assert expanded_name == filename
assert os.path.expanduser(filename) == expanded_name
@td.skip_if_no('pathlib')
def test_stringify_path_pathlib(self):
rel_path = common._stringify_path(Path('.'))
assert rel_path == '.'
redundant_path = common._stringify_path(Path('foo//bar'))
assert redundant_path == os.path.join('foo', 'bar')
@td.skip_if_no('py.path')
def test_stringify_path_localpath(self):
path = os.path.join('foo', 'bar')
abs_path = os.path.abspath(path)
lpath = LocalPath(path)
assert common._stringify_path(lpath) == abs_path
def test_stringify_path_fspath(self):
p = CustomFSPath('foo/bar.csv')
result = common._stringify_path(p)
assert result == 'foo/bar.csv'
@pytest.mark.parametrize('extension,expected', [
('', None),
('.gz', 'gzip'),
('.bz2', 'bz2'),
('.zip', 'zip'),
('.xz', 'xz'),
])
@pytest.mark.parametrize('path_type', path_types)
def test_infer_compression_from_path(self, extension, expected, path_type):
path = path_type('foo/bar.csv' + extension)
compression = common._infer_compression(path, compression='infer')
assert compression == expected
def test_get_filepath_or_buffer_with_path(self):
filename = '~/sometest'
filepath_or_buffer, _, _ = common.get_filepath_or_buffer(filename)
assert filepath_or_buffer != filename
assert isabs(filepath_or_buffer)
assert os.path.expanduser(filename) == filepath_or_buffer
def test_get_filepath_or_buffer_with_buffer(self):
input_buffer = StringIO()
filepath_or_buffer, _, _ = common.get_filepath_or_buffer(input_buffer)
assert filepath_or_buffer == input_buffer
def test_iterator(self):
reader = read_csv(StringIO(self.data1), chunksize=1)
result = concat(reader, ignore_index=True)
expected = read_csv(StringIO(self.data1))
tm.assert_frame_equal(result, expected)
# GH12153
it = read_csv(StringIO(self.data1), chunksize=1)
first = next(it)
tm.assert_frame_equal(first, expected.iloc[[0]])
tm.assert_frame_equal(concat(it), expected.iloc[1:])
@pytest.mark.parametrize('reader, module, error_class, fn_ext', [
(pd.read_csv, 'os', FileNotFoundError, 'csv'),
(pd.read_table, 'os', FileNotFoundError, 'csv'),
(pd.read_fwf, 'os', FileNotFoundError, 'txt'),
(pd.read_excel, 'xlrd', FileNotFoundError, 'xlsx'),
(pd.read_feather, 'feather', Exception, 'feather'),
(pd.read_hdf, 'tables', FileNotFoundError, 'h5'),
(pd.read_stata, 'os', FileNotFoundError, 'dta'),
(pd.read_sas, 'os', FileNotFoundError, 'sas7bdat'),
(pd.read_json, 'os', ValueError, 'json'),
(pd.read_msgpack, 'os', ValueError, 'mp'),
(pd.read_pickle, 'os', FileNotFoundError, 'pickle'),
])
def test_read_non_existant(self, reader, module, error_class, fn_ext):
pytest.importorskip(module)
path = os.path.join(HERE, 'data', 'does_not_exist.' + fn_ext)
with pytest.raises(error_class):
reader(path)
@pytest.mark.parametrize('reader, module, path', [
(pd.read_csv, 'os', os.path.join(HERE, 'data', 'iris.csv')),
(pd.read_table, 'os', os.path.join(HERE, 'data', 'iris.csv')),
(pd.read_fwf, 'os', os.path.join(HERE, 'data',
'fixed_width_format.txt')),
(pd.read_excel, 'xlrd', os.path.join(HERE, 'data', 'test1.xlsx')),
(pd.read_feather, 'feather', os.path.join(HERE, 'data',
'feather-0_3_1.feather')),
(pd.read_hdf, 'tables', os.path.join(HERE, 'data', 'legacy_hdf',
'datetimetz_object.h5')),
(pd.read_stata, 'os', os.path.join(HERE, 'data', 'stata10_115.dta')),
(pd.read_sas, 'os', os.path.join(HERE, 'sas', 'data',
'test1.sas7bdat')),
(pd.read_json, 'os', os.path.join(HERE, 'json', 'data',
'tsframe_v012.json')),
(pd.read_msgpack, 'os', os.path.join(HERE, 'msgpack', 'data',
'frame.mp')),
(pd.read_pickle, 'os', os.path.join(HERE, 'data',
'categorical_0_14_1.pickle')),
])
def test_read_fspath_all(self, reader, module, path):
pytest.importorskip(module)
mypath = CustomFSPath(path)
result = reader(mypath)
expected = reader(path)
if path.endswith('.pickle'):
# categorical
tm.assert_categorical_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize('writer_name, writer_kwargs, module', [
('to_csv', {}, 'os'),
('to_excel', {'engine': 'xlwt'}, 'xlwt'),
('to_feather', {}, 'feather'),
('to_html', {}, 'os'),
('to_json', {}, 'os'),
('to_latex', {}, 'os'),
('to_msgpack', {}, 'os'),
('to_pickle', {}, 'os'),
('to_stata', {}, 'os'),
])
def test_write_fspath_all(self, writer_name, writer_kwargs, module):
p1 = tm.ensure_clean('string')
p2 = tm.ensure_clean('fspath')
df = pd.DataFrame({"A": [1, 2]})
with p1 as string, p2 as fspath:
pytest.importorskip(module)
mypath = CustomFSPath(fspath)
writer = getattr(df, writer_name)
writer(string, **writer_kwargs)
with open(string, 'rb') as f:
expected = f.read()
writer(mypath, **writer_kwargs)
with open(fspath, 'rb') as f:
result = f.read()
assert result == expected
def test_write_fspath_hdf5(self):
# Same test as write_fspath_all, except HDF5 files aren't
# necessarily byte-for-byte identical for a given dataframe, so we'll
# have to read and compare equality
pytest.importorskip('tables')
df = pd.DataFrame({"A": [1, 2]})
p1 = tm.ensure_clean('string')
p2 = tm.ensure_clean('fspath')
with p1 as string, p2 as fspath:
mypath = CustomFSPath(fspath)
df.to_hdf(mypath, key='bar')
df.to_hdf(string, key='bar')
result = pd.read_hdf(fspath, key='bar')
expected = pd.read_hdf(string, key='bar')
tm.assert_frame_equal(result, expected)
class TestMMapWrapper(object):
def setup_method(self, method):
self.mmap_file = os.path.join(tm.get_data_path(),
'test_mmap.csv')
def test_constructor_bad_file(self):
non_file = StringIO('I am not a file')
non_file.fileno = lambda: -1
# the error raised is different on Windows
if is_platform_windows():
msg = "The parameter is incorrect"
err = OSError
else:
msg = "[Errno 22]"
err = mmap.error
tm.assert_raises_regex(err, msg, common.MMapWrapper, non_file)
target = open(self.mmap_file, 'r')
target.close()
msg = "I/O operation on closed file"
tm.assert_raises_regex(
ValueError, msg, common.MMapWrapper, target)
def test_get_attr(self):
with open(self.mmap_file, 'r') as target:
wrapper = common.MMapWrapper(target)
attrs = dir(wrapper.mmap)
attrs = [attr for attr in attrs
if not attr.startswith('__')]
attrs.append('__next__')
for attr in attrs:
assert hasattr(wrapper, attr)
assert not hasattr(wrapper, 'foo')
def test_next(self):
with open(self.mmap_file, 'r') as target:
wrapper = common.MMapWrapper(target)
lines = target.readlines()
for line in lines:
next_line = next(wrapper)
assert next_line.strip() == line.strip()
pytest.raises(StopIteration, next, wrapper)
def test_unknown_engine(self):
with tm.ensure_clean() as path:
df = tm.makeDataFrame()
df.to_csv(path)
with tm.assert_raises_regex(ValueError, 'Unknown engine'):
read_csv(path, engine='pyt')
|
{
"content_hash": "90ceedfc3c18ea6b8151f5b52fb51d0e",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 79,
"avg_line_length": 34.395833333333336,
"alnum_prop": 0.5702604482132041,
"repo_name": "zfrenchee/pandas",
"id": "a0070dce6a7f1850d46c9de25386f6d0466ec163",
"size": "9906",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/tests/io/test_common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3847"
},
{
"name": "C",
"bytes": "470171"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "551706"
},
{
"name": "Makefile",
"bytes": "989"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "12658422"
},
{
"name": "Shell",
"bytes": "25785"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
}
|
"""Tests for the main app."""
from __future__ import (
absolute_import, division, print_function, unicode_literals,
)
import itertools
import json
import logging
import os
import subprocess
from unittest import TestCase
import jupyter_core.paths
import nose.tools as nt
from jupyter_contrib_core.notebook_compat import nbextensions
from jupyter_contrib_core.testing_utils import (
get_logger, patch_traitlets_app_logs,
)
from jupyter_contrib_core.testing_utils.jupyter_env import patch_jupyter_dirs
from nose.plugins.skip import SkipTest
from traitlets.config import Config
from traitlets.tests.utils import check_help_all_output, check_help_output
from jupyter_contrib_nbextensions.application import main as main_app
from jupyter_contrib_nbextensions.application import (
BaseContribNbextensionsApp, BaseContribNbextensionsInstallApp,
ContribNbextensionsApp, InstallContribNbextensionsApp,
UninstallContribNbextensionsApp,
)
app_classes = (
BaseContribNbextensionsApp, BaseContribNbextensionsInstallApp,
ContribNbextensionsApp,
InstallContribNbextensionsApp, UninstallContribNbextensionsApp,
)
def _get_files_in_tree(tree_root_dir):
installed_files = []
for root, subdirs, files in os.walk(tree_root_dir, followlinks=True):
installed_files.extend([os.path.join(root, f) for f in files])
return installed_files
class AppTest(TestCase):
"""Tests for the main app."""
@classmethod
def setup_class(cls):
cls.log = cls.log = get_logger(cls.__name__)
cls.log.handlers = []
cls.log.propagate = True
def setUp(self):
"""Set up test fixtures for each test."""
(jupyter_patches, self.jupyter_dirs,
remove_jupyter_dirs) = patch_jupyter_dirs()
for ptch in jupyter_patches:
ptch.start()
self.addCleanup(ptch.stop)
self.addCleanup(remove_jupyter_dirs)
for klass in app_classes:
patch_traitlets_app_logs(klass)
klass.log_level.default_value = logging.DEBUG
def _check_install(self, dirs):
expected_write_dirs = dirs.values()
all_dirs = [
path
for dset in self.jupyter_dirs.values() if isinstance(dset, dict)
for name, path in dset.items() if name != 'root']
msgs = []
installed_files = []
for tree_dir in expected_write_dirs:
in_this_tree = _get_files_in_tree(tree_dir)
installed_files.extend(in_this_tree)
if not in_this_tree:
msgs.append(
'Expected files created in {} but found none.'.format(
tree_dir))
for tree_dir in [d for d in all_dirs if d not in expected_write_dirs]:
in_this_tree = _get_files_in_tree(tree_dir)
if in_this_tree:
msgs.append(
'Expected no files created in {} but found:\n\t{}'.format(
tree_dir, '\n\t'.join(in_this_tree)))
installed_files.extend(in_this_tree)
# check that dependency-provided nbexts got installed
if 'data' in dirs:
expected_require_paths = [
p.replace('/', os.path.sep) + '.js' for p in [
'highlight_selected_word/main',
'latex_envs/latex_envs',
]]
for req_part in expected_require_paths:
if not any([
p for p in installed_files if p.endswith(req_part)]):
msgs.append(
'Expected a file ending in {} but found none'.format(
req_part))
nt.assert_false(bool(msgs), '\n'.join(msgs))
return installed_files
def _check_uninstall(self, dirs, installed_files):
# check that nothing remains in the data directory
data_installed = [
path for path in installed_files
if path.startswith(dirs['data']) and os.path.exists(path)]
nt.assert_false(
data_installed,
'Uninstall should remove all data files from {}'.format(
dirs['data']))
# check the config directory
conf_installed = [
path for path in installed_files
if path.startswith(dirs['conf']) and os.path.exists(path)]
for path in conf_installed:
with open(path, 'r') as f:
conf = Config(json.load(f))
confstrip = {}
confstrip.update(conf)
# strip out config values we are ok to have remain
confstrip.pop('NotebookApp', None)
confstrip.pop('version', None)
conf_exts = confstrip.get('load_extensions', {})
conf_exts.pop('nbextensions_configurator/config_menu/main', None)
conf_exts.pop('nbextensions_configurator/tree_tab/main', None)
if not conf_exts:
confstrip.pop('load_extensions', None)
nt.assert_false(confstrip, 'disable should leave config empty.')
def _get_default_check_kwargs(self, argv=None, dirs=None):
if argv is None:
argv = []
if dirs is None:
dirs = {
'conf': jupyter_core.paths.SYSTEM_CONFIG_PATH[0],
'data': jupyter_core.paths.SYSTEM_JUPYTER_PATH[0],
}
return argv, dirs
def _call_main_app(self, argv):
main_app(argv=argv)
# a bit of a hack to allow initializing a new app instance
for klass in app_classes:
klass.clear_instance()
def _check_subproc(self, args):
proc = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
try:
output, unused_err = proc.communicate()
except:
proc.kill()
proc.wait()
raise
print(output.decode()) # this gets it captured by nose
retcode = proc.poll()
nt.assert_equal(retcode, 0, 'command should exit with code 0')
def check_app_install(self, argv=None, dirs=None, dirs_install=None):
"""Check files were installed in the correct place."""
argv, dirs = self._get_default_check_kwargs(argv, dirs)
if dirs_install is None:
dirs_install = dirs
self._call_main_app(argv=['install'] + argv)
installed_files = self._check_install(dirs_install)
self._call_main_app(argv=['uninstall'] + argv)
self._check_uninstall(dirs, installed_files)
def check_cli_install(self, argv=None, dirs=None, dirs_install=None,
app_name='jupyter contrib nbextension'):
argv, dirs = self._get_default_check_kwargs(argv, dirs)
if dirs_install is None:
dirs_install = dirs
args = app_name.split(' ') + ['install'] + argv
self._check_subproc(args)
installed_files = self._check_install(dirs_install)
args = app_name.split(' ') + ['uninstall'] + argv
self._check_subproc(args)
self._check_uninstall(dirs, installed_files)
def test_00_extra_args(self):
"""Check that app complains about extra args."""
for subcom in ('install', 'uninstall'):
# sys.exit should be called if extra args specified
with nt.assert_raises(SystemExit):
main_app([subcom, 'arbitrary_extension_name'])
for klass in app_classes:
klass.clear_instance()
def test_01_help_output(self):
"""Check that app help works."""
app_module = 'jupyter_contrib_nbextensions.application'
for subcommand in (None, ['install'], ['uninstall']):
check_help_output(app_module, subcommand=subcommand)
check_help_all_output(app_module, subcommand=subcommand)
# sys.exit should be called if empty argv specified
with nt.assert_raises(SystemExit):
main_app([])
for klass in app_classes:
klass.clear_instance()
def test_02_argument_conflict(self):
"""Check that install objects to multiple flags."""
conflicting_flags = ('--user', '--system', '--sys-prefix')
conflicting_flagsets = []
for nn in range(2, len(conflicting_flags) + 1):
conflicting_flagsets.extend(
itertools.combinations(conflicting_flags, nn))
for subcommand in ('install', 'uninstall'):
for flagset in conflicting_flagsets:
self.log.info('testing conflicting flagset {}'.format(flagset))
nt.assert_raises(nbextensions.ArgumentConflict,
main_app, [subcommand] + list(flagset))
for klass in app_classes:
klass.clear_instance()
def test_03_app_install_defaults(self):
"""Check that app install works correctly using defaults."""
self.check_app_install()
# don't test cli install with defaults, as we can't patch system
# directories in the subprocess
def test_05_app_install_user(self):
"""Check that app install works correctly using --user flag."""
self.check_app_install(
argv=['--user'], dirs=self.jupyter_dirs['env_vars'])
def test_06_cli_install_user(self):
"""Check that cli install works correctly using --user flag."""
self.check_cli_install(
argv=['--user'], dirs=self.jupyter_dirs['env_vars'])
def test_07_app_install_sys_prefix(self):
"""Check that app install works correctly using --sys-prefix flag."""
self.check_app_install(
dirs=self.jupyter_dirs['sys_prefix'], argv=['--sys-prefix'])
# don't test cli install with --sys-prefix flag, as we can't patch
# directories in the subprocess
def test_08_app_install_system(self):
"""Check that app install works correctly using --system flag."""
self.check_app_install(
dirs=self.jupyter_dirs['system'], argv=['--system'])
# don't test cli install with --system flag, as we can't patch
# directories in the subprocess
def test_09_app_install_symlink(self):
"""Check that app install works correctly using --symlink flag."""
if os.name in ('nt', 'dos'):
raise SkipTest('symlinks are not supported on Windows.')
self.check_app_install(argv=['--symlink'])
def test_10_cli_install_symlink(self):
"""Check that cli install works correctly using --symlink flag."""
if os.name in ('nt', 'dos'):
raise SkipTest('symlinks are not supported on Windows.')
self.check_cli_install(
argv=['--user', '--symlink'], dirs=self.jupyter_dirs['env_vars'])
def test_11_app_install_nbextensions_dir(self):
"""Check that app install works correctly using --nbextensions arg."""
dirs = self._get_default_check_kwargs()[1]
dirs['data'] = self.jupyter_dirs['custom']['data']
nbext_dir = os.path.join(dirs['data'], 'nbextensions')
self.check_app_install(dirs=dirs, argv=['--nbextensions=' + nbext_dir])
# We can't test cli install using nbextensions_dir, since it edits system
# config, and we can't patch directories in the subprocess
def test_12_app_plural_alias(self):
"""Check that app works correctly when using 'nbextensions' plural."""
self.check_cli_install(
argv=['--user'], dirs=self.jupyter_dirs['env_vars'],
app_name='jupyter contrib nbextensions')
def test_13_app_install_prefix(self):
"""Check that app install works correctly using --prefix arg."""
dirs = self._get_default_check_kwargs()[1]
dirs['data'] = self.jupyter_dirs['custom']['data']
self.check_app_install(dirs=dirs, argv=['--prefix=' + dirs['data']])
def test_14_app_install_only_files(self):
"""Check that install works correctly using --only-files flag."""
argv, dirs = self._get_default_check_kwargs()
self.check_app_install(
argv=argv + ['--only-files'], dirs=dirs,
dirs_install={'data': dirs['data']})
def test_15_app_install_only_config(self):
"""Check that install works correctly using --only-config flag."""
argv, dirs = self._get_default_check_kwargs()
self.check_app_install(
argv=argv + ['--only-config'], dirs=dirs,
dirs_install={'conf': dirs['conf']})
|
{
"content_hash": "02a34333d549254a22c49692d40475dc",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 79,
"avg_line_length": 41.20198675496689,
"alnum_prop": 0.6047576950896086,
"repo_name": "RTHMaK/RPGOne",
"id": "155e11a93fdee204fd3cfe21cf824d881552b015",
"size": "12467",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "jupyter_contrib_nbextensions-master/tests/test_application.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "1C Enterprise",
"bytes": "36"
},
{
"name": "Batchfile",
"bytes": "15029"
},
{
"name": "CSS",
"bytes": "41709"
},
{
"name": "Erlang",
"bytes": "39438"
},
{
"name": "Go",
"bytes": "287"
},
{
"name": "HTML",
"bytes": "633076"
},
{
"name": "JavaScript",
"bytes": "1128791"
},
{
"name": "Jupyter Notebook",
"bytes": "927247"
},
{
"name": "Makefile",
"bytes": "31756"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Matlab",
"bytes": "9454"
},
{
"name": "PHP",
"bytes": "708541"
},
{
"name": "PowerShell",
"bytes": "68503"
},
{
"name": "Python",
"bytes": "2278740"
},
{
"name": "Ruby",
"bytes": "1136"
},
{
"name": "Shell",
"bytes": "62555"
},
{
"name": "Smarty",
"bytes": "5752"
},
{
"name": "TeX",
"bytes": "34544"
}
],
"symlink_target": ""
}
|
from io import BytesIO
import tornado.web
from tornado import gen
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError
from tornado.httputil import HTTPHeaders
from tornado.testing import AsyncHTTPTestCase, gen_test
from smart_sentinel.tornado_client import TornadoStrictRedis
from tornado_stale_client import StaleHTTPClient
class FakeClient(object):
def __init__(self, responses=None):
self.responses = responses or []
@gen.coroutine
def fetch(self, request, **kwargs):
raise gen.Return(self.responses.pop(0))
def add_response(self, headers=None, code=200, body=b'fake'):
request = HTTPRequest('/fake')
headers = HTTPHeaders(headers or {})
buffer = BytesIO(body)
response = HTTPResponse(
request=request,
headers=headers,
code=code,
buffer=buffer,
)
self.responses.append(response)
return response
class StaleHTTPClientTestCase(AsyncHTTPTestCase):
@gen.coroutine
def setUp(self):
super(StaleHTTPClientTestCase, self).setUp()
self.fake_client = FakeClient()
self.cache = TornadoStrictRedis()
yield self.cache.flushall()
def get_app(self):
return tornado.web.Application([])
@gen_test
def test_returns_response(self):
fake_response = self.fake_client.add_response(
code=200, body=b'fake response', headers={'fake': 'header'})
client = StaleHTTPClient(client=self.fake_client)
response = yield client.fetch('/url')
self.assertResponseEqual(response, fake_response)
@gen_test
def test_accepts_request_object(self):
fake_response = self.fake_client.add_response()
client = StaleHTTPClient(client=self.fake_client)
request = HTTPRequest('/url')
response = yield client.fetch(request)
self.assertIs(response, fake_response)
@gen_test
def test_returns_real_response(self):
expected_response = self.fake_client.add_response()
client = StaleHTTPClient(client=self.fake_client)
response = yield client.fetch('/url')
self.assertIs(response, expected_response)
@gen_test
def test_returns_response_from_primary_cache(self):
response = self.fake_client.add_response()
client = StaleHTTPClient(client=self.fake_client)
response = yield client.fetch('/url')
cached_response = yield client.fetch('/url')
self.assertIsNot(cached_response, response)
self.assertResponseEqual(cached_response, response)
@gen_test
def test_returns_response_from_primary_with_specific_ttl(self):
response = self.fake_client.add_response()
client = StaleHTTPClient(client=self.fake_client, ttl=0.001)
response = yield client.fetch('/url', ttl=0.05)
yield tornado.gen.sleep(0.002)
cached_response = yield client.fetch('/url')
self.assertIsNot(cached_response, response)
self.assertResponseEqual(cached_response, response)
@gen_test
def test_specific_ttl_is_isolated(self):
first_expected = self.fake_client.add_response()
second_expected = self.fake_client.add_response()
another_response = self.fake_client.add_response(body=b'another')
client = StaleHTTPClient(client=self.fake_client, ttl=1)
yield [client.fetch('/first', ttl=0.001), client.fetch('/second')]
yield tornado.gen.sleep(0.005)
first_response, second_response = yield [
client.fetch('/first'), client.fetch('/second')]
self.assertIsNot(first_response, first_expected)
self.assertResponseEqual(first_response, another_response)
self.assertResponseEqual(second_response, second_expected)
@gen_test
def test_returns_stale_response_after_error(self):
expected_response = self.fake_client.add_response(body=b'stale')
error_response = self.fake_client.add_response(body=b'error', code=500)
client = StaleHTTPClient(client=self.fake_client, ttl=0.001)
yield client.fetch('/url')
yield tornado.gen.sleep(0.002)
stale_response = yield client.fetch('/url')
self.assertIsNot(stale_response, error_response)
self.assertResponseEqual(stale_response, expected_response)
@gen_test
def test_returns_stale_response_after_error_with_specific_statle_ttl(self):
expected_response = self.fake_client.add_response(body=b'stale')
error_response = self.fake_client.add_response(body=b'error', code=500)
client = StaleHTTPClient(client=self.fake_client, ttl=0.001, stale_ttl=0.001)
yield client.fetch('/url', stale_ttl=0.05)
yield tornado.gen.sleep(0.002)
stale_response = yield client.fetch('/url')
self.assertIsNot(stale_response, error_response)
self.assertResponseEqual(stale_response, expected_response)
@gen_test
def test_returns_error_without_stale_response_after_error(self):
stale_response = self.fake_client.add_response(body=b'stale')
error_response = self.fake_client.add_response(body=b'error', code=500)
client = StaleHTTPClient(client=self.fake_client, ttl=0.001, stale_ttl=0.002)
yield client.fetch('/url')
current_response = yield client.fetch('/url')
self.assertIsNot(current_response, error_response)
self.assertResponseEqual(current_response, stale_response)
yield tornado.gen.sleep(0.003)
with self.assertRaises(HTTPError):
yield client.fetch('/url')
@gen_test
def test_raises_error_after_error_with_empty_cache(self):
self.fake_client.add_response(body=b'error', code=500)
client = StaleHTTPClient(client=self.fake_client, ttl=None)
with self.assertRaises(HTTPError):
yield client.fetch('/url')
@gen_test
def test_returns_error_when_empty_cache_and_raise_error_flag_is_off(self):
expected_response = self.fake_client.add_response(
body=b'error', code=500)
client = StaleHTTPClient(client=self.fake_client, ttl=None)
response = yield client.fetch('/url', raise_error=False)
self.assertIs(response, expected_response)
@gen_test
def test_caches_multiple_urls(self):
first_expected = self.fake_client.add_response()
second_expected = self.fake_client.add_response()
client = StaleHTTPClient(client=self.fake_client, ttl=1)
# Populate cache
yield [client.fetch('/first'), client.fetch('/second')]
# Read from cache
first_response, second_response = yield [
client.fetch('/first'), client.fetch('/second')]
self.assertIsNot(first_response, first_expected)
self.assertIsNot(second_response, second_expected)
self.assertResponseEqual(first_response, first_expected)
self.assertResponseEqual(second_response, second_expected)
@gen_test
def test_varies_cache_by_headers(self):
json_response = self.fake_client.add_response(body=b'{}')
xml_response = self.fake_client.add_response(body=b'<xml />')
client = StaleHTTPClient(client=self.fake_client, ttl=1)
# Populate and read from cache
for i in range(2):
first_response, second_response = yield [
client.fetch('/url', headers={'Accept': 'application/json'}, vary=['Accept']),
client.fetch('/url', headers={'Accept': 'text/xml'}, vary=['Accept'])
]
self.assertIsNot(first_response, json_response)
self.assertIsNot(second_response, xml_response)
self.assertResponseEqual(first_response, json_response)
self.assertResponseEqual(second_response, xml_response)
def assertResponseEqual(self, response, expected_response):
self.assertEqual(response.body, expected_response.body)
self.assertEqual(response.code, expected_response.code)
self.assertEqual(response.headers, expected_response.headers)
self.assertIsInstance(response.headers, HTTPHeaders)
self.assertIsInstance(response.request, HTTPRequest)
self.assertIsInstance(response.request.headers, HTTPHeaders)
|
{
"content_hash": "2f6092b369e1798b87911149d6cee0e2",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 94,
"avg_line_length": 35.668103448275865,
"alnum_prop": 0.6683987915407855,
"repo_name": "globocom/tornado-stale-client",
"id": "d034fc538897a775b09d72acc3325f9e02319186",
"size": "8540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "454"
},
{
"name": "Python",
"bytes": "15794"
}
],
"symlink_target": ""
}
|
class Solution(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
n = len(height)
l, r, water, min_height = 0, n - 1, 0, 0
while l < r:
while l < r and height[l] <= min_height:
water += min_height - height[l]
l += 1
while r > l and height[r] <= min_height:
water += min_height - height[r]
r -= 1
min_height = min(height[l], height[r])
return water
# Note: The basic idea is that we set two pointers l and r to the left and right end of height. Then we get the
# minimum height (min_height) of these pointers (similar to Container with Most Water due to the Leaking Bucket
# Effect) since the level of the water cannot be higher than it. Then we move the two pointers towards the center. If
# the coming level is less than min_height, then it will hold some water. Fill the water until we meet some "barrier"
# (with height larger than min_height) and update l and r to repeat this process in a new interval.
|
{
"content_hash": "d1d280b96276e1c6dd27d2dd1e48e4d1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 118,
"avg_line_length": 48,
"alnum_prop": 0.6005434782608695,
"repo_name": "jigarkb/Programming",
"id": "936cbb4e818e0fdc6f4f4719708dc181e9410dfb",
"size": "1323",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "LeetCode/042-H-TrappingRainWater.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3840"
},
{
"name": "Python",
"bytes": "361231"
}
],
"symlink_target": ""
}
|
'''This example shows a sample checkout form created using bokeh widgets.
.. bokeh-example-metadata::
:apis: bokeh.models.widgets.button, bokeh.models.widgets.checkbox, bokeh.models.widgets.groups, bokeh.models.widgets.inputs
:refs: :ref:`ug_interaction_widgets`
:keywords: form, widgets, form, inputs
'''
from bokeh.io import show
from bokeh.layouts import column, row
from bokeh.models.widgets import Button, Checkbox, RadioGroup, Select, TextInput
# Billing address
first_name = TextInput(title="First name")
last_name = TextInput(title="Last name")
username = TextInput(title="Username", placeholder="Username", prefix="@")
email = TextInput(title="E-mail", placeholder="you@example.com")
address = TextInput(title="Address", placeholder="1234 Main St.")
address2 = TextInput(title="Address 2 (Optional)", placeholder="Apartment or suite")
country = Select(title="Country", options=["United States"]) #, placeholder="Choose...")
state = Select(title="State", options=["California"]) #, placeholder="Choose...")
zip = TextInput(title="Zip")
shipping = Checkbox(label="Shipping address is the same as my billing address")
account = Checkbox(label="Save this information for next time")
# Payment
payment_type = RadioGroup(labels=["Credit card", "Debit card", "PayPal"])
name_on_card = TextInput(title="Name on card", placeholder="Full name as displayed on card")
card_number = TextInput(title="Credit card number")
expiration = TextInput(title="Expiration")
cvv = TextInput(title="CVV")
continue_to_checkout = Button(button_type="primary", label="Continue to checkout", sizing_mode="stretch_width")
form = column([
# billing_address
row([first_name, last_name]),
username,
email,
address,
address2,
row([country, state, zip]),
# hr
shipping,
account,
# hr
# payment
payment_type,
row([name_on_card, card_number]),
row([expiration, cvv]),
# hr
continue_to_checkout,
])
show(form)
|
{
"content_hash": "24599e38cbafb3bc3cd558784f205aba",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 127,
"avg_line_length": 33.440677966101696,
"alnum_prop": 0.7100861632032438,
"repo_name": "bokeh/bokeh",
"id": "a8a0e4e213b79ba367aeea8fc8d4d6349acdb647",
"size": "1973",
"binary": false,
"copies": "1",
"ref": "refs/heads/branch-3.1",
"path": "examples/plotting/checkout_form.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1884"
},
{
"name": "Dockerfile",
"bytes": "1924"
},
{
"name": "GLSL",
"bytes": "44696"
},
{
"name": "HTML",
"bytes": "53475"
},
{
"name": "JavaScript",
"bytes": "20301"
},
{
"name": "Less",
"bytes": "46376"
},
{
"name": "Python",
"bytes": "4475226"
},
{
"name": "Shell",
"bytes": "7673"
},
{
"name": "TypeScript",
"bytes": "3652153"
}
],
"symlink_target": ""
}
|
import wx
import armid
import ARM
from GoalRefinementDialog import GoalRefinementDialog
class GoalAssociationListCtrl(wx.ListCtrl):
def __init__(self,parent,winId,dp,goalList=False,boxSize=wx.DefaultSize):
wx.ListCtrl.__init__(self,parent,winId,size=boxSize,style=wx.LC_REPORT)
self.dbProxy = dp
self.goalList = goalList
self.theCurrentEnvironment = ''
if (self.goalList == True):
self.InsertColumn(0,'Goal')
else:
self.InsertColumn(0,'Sub-Goal')
self.SetColumnWidth(0,200)
self.InsertColumn(1,'Type')
self.SetColumnWidth(1,100)
self.InsertColumn(2,'Refinement')
self.SetColumnWidth(2,100)
self.InsertColumn(3,'Alternative')
self.SetColumnWidth(3,50)
self.InsertColumn(4,'Rationale')
self.SetColumnWidth(4,200)
self.theSelectedIdx = -1
self.theDimMenu = wx.Menu()
self.theDimMenu.Append(armid.SGA_MENUADD_ID,'Add')
self.theDimMenu.Append(armid.SGA_MENUDELETE_ID,'Delete')
self.Bind(wx.EVT_RIGHT_DOWN,self.OnRightDown)
wx.EVT_MENU(self.theDimMenu,armid.SGA_MENUADD_ID,self.onAddAssociation)
wx.EVT_MENU(self.theDimMenu,armid.SGA_MENUDELETE_ID,self.onDeleteAssociation)
self.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnItemSelected)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnItemDeselected)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED,self.onGoalActivated)
def setEnvironment(self,environmentName):
self.theCurrentEnvironment = environmentName
def OnRightDown(self,evt):
self.PopupMenu(self.theDimMenu)
def onAddAssociation(self,evt):
dlg = GoalRefinementDialog(self,self.dbProxy,self.theCurrentEnvironment,isGoal=self.goalList)
if (dlg.ShowModal() == armid.GOALREFINEMENT_BUTTONCOMMIT_ID):
self.theSelectedIdx = self.GetItemCount()
self.InsertStringItem(self.theSelectedIdx,dlg.goal())
self.SetStringItem(self.theSelectedIdx,1,dlg.goalDimension())
self.SetStringItem(self.theSelectedIdx,2,dlg.refinement())
self.SetStringItem(self.theSelectedIdx,3,dlg.alternate())
self.SetStringItem(self.theSelectedIdx,4,dlg.rationale())
def onDeleteAssociation(self,evt):
if (self.theSelectedIdx == -1):
errorText = 'No association selected'
errorLabel = 'Delete goal association'
dlg = wx.MessageDialog(self,errorText,errorLabel,wx.OK)
dlg.ShowModal()
dlg.Destroy()
else:
selectedValue = self.GetItemText(self.theSelectedIdx)
self.DeleteItem(self.theSelectedIdx)
def OnItemSelected(self,evt):
self.theSelectedIdx = evt.GetIndex()
def OnItemDeselected(self,evt):
self.theSelectedIdx = -1
def onGoalActivated(self,evt):
self.theSelectedIdx = evt.GetIndex()
goal = self.GetItemText(self.theSelectedIdx)
goalDim = self.GetItem(self.theSelectedIdx,1)
refinement = self.GetItem(self.theSelectedIdx,2)
alternate = self.GetItem(self.theSelectedIdx,3)
rationale = self.GetItem(self.theSelectedIdx,4)
dlg = GoalRefinementDialog(self,self.dbProxy,self.theCurrentEnvironment,goal,goalDim.GetText(),refinement.GetText(),alternate.GetText())
if (dlg.ShowModal() == armid.GOALREFINEMENT_BUTTONCOMMIT_ID):
self.SetStringItem(self.theSelectedIdx,0,dlg.goal())
self.SetStringItem(self.theSelectedIdx,1,dlg.goalDimension())
self.SetStringItem(self.theSelectedIdx,2,dlg.refinement())
self.SetStringItem(self.theSelectedIdx,3,dlg.alternate())
self.SetStringItem(self.theSelectedIdx,4,dlg.rationale())
def load(self,goals):
for goal,goalDim,refinement,alternate,rationale in goals:
idx = self.GetItemCount()
self.InsertStringItem(idx,goal)
self.SetStringItem(idx,1,goalDim)
self.SetStringItem(idx,2,refinement)
self.SetStringItem(idx,3,alternate)
self.SetStringItem(idx,4,rationale)
def dimensions(self):
goals = []
for x in range(self.GetItemCount()):
goal = self.GetItemText(x)
goalDim = self.GetItem(x,1)
refinement = self.GetItem(x,2)
alternate = self.GetItem(x,3)
rationale = self.GetItem(x,4)
goals.append((goal,goalDim.GetText(),refinement.GetText(),alternate.GetText(),rationale.GetText()))
return goals
|
{
"content_hash": "0c3bfd993233a2d6fcc41e0211abba61",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 140,
"avg_line_length": 39.424528301886795,
"alnum_prop": 0.7257717157214645,
"repo_name": "RobinQuetin/CAIRIS-web",
"id": "8a4a2ffe425eb9e7239330f2b5a50eb2d9f4c9f3",
"size": "4978",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cairis/cairis/GoalAssociationListCtrl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11265"
},
{
"name": "Mako",
"bytes": "13226"
},
{
"name": "Python",
"bytes": "3313365"
},
{
"name": "Shell",
"bytes": "19461"
},
{
"name": "XSLT",
"bytes": "35522"
}
],
"symlink_target": ""
}
|
print """
What do I know so far?
A lot of things... we're almost halfway. Python is fun!
"""
|
{
"content_hash": "289c225d0b1a8cbdf971cffeb284ade9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 55,
"avg_line_length": 15.833333333333334,
"alnum_prop": 0.6421052631578947,
"repo_name": "darthbinamira/learn_python",
"id": "2e8b95e4bb91bf354db63c9dcc64b1605a8b132d",
"size": "95",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lpthw/22/ex22.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "29338"
}
],
"symlink_target": ""
}
|
import sys
import json
import random
import time
import os
import CacheStats
import LRU
CACHE_ATIME = 0
CACHE_SIZE = 1
CACHE_FRESHER_ID = 2
CACHE_OLDER_ID = 3
class MRUCache(LRU.LRUCache):
"""
Most Recently Used (MRU): http://en.wikipedia.org/wiki/Cache_algorithms
Discards, in contrast to LRU, the most recently used items first.
In findings presented at the 11th VLDB conference, Chou and Dewitt noted that
"When a file is being repeatedly scanned in a [Looping Sequential] reference pattern,
MRU is the best replacement algorithm."[3] Subsequently other researchers presenting
at the 22nd VLDB conference noted that for random access patterns and repeated scans
over large datasets (sometimes known as cyclic access patterns) MRU cache algorithms
have more hits than LRU due to their tendency to retain older data.
[4] MRU algorithms are most useful in situations where the older an item is, the more likely it is to be accessed.
"""
def _evict_bytes(self, bytes, xtime):
"""
evicts the last used objects and frees at least @bytes size.
"""
if self.stats.first_eviction_ts == 0:
self.stats.first_eviction_ts = xtime
size_before = self._max_size - self._used_size
if bytes > self._max_size:
raise Exception("Cache too small.")
# if verbose:
# print ("_evict_bytes %d" % (bytes))
evicted_bytes = 0
evicted_objects_cnt = 0
while evicted_bytes < bytes:
freed_bytes = self._remove_cached(self._freshest_obj_id)
if freed_bytes == None:
print("remove for evicted object failed! %r" % self._freshest_obj_id)
sys.exit(1)
evicted_bytes += freed_bytes
# update stats
self.stats.cached_objects_current -= 1
self.stats.evicted_objects += 1
self.daily_stats.evicted_objects += 1
size_after = self._max_size - self._used_size
assert (size_after > size_before)
def test_a():
c = MRUCache(10000)
ts = 1
c.cache_object('a', 1000, ts)
c.get_cached('a', ts)
c.remove_cached('a')
ts += 1
c.cache_object('b', 1000, ts)
c.remove_cached('b')
def test_b():
cache_size = 10000
cache = MRUCache(cache_size)
cache.cache_object('a', 1000, 10)
cache.cache_object('b', 2000, 11)
cache.cache_object('c', 6000, 12)
cache.cache_object('d', 4000, 13)
cache.check_sanity()
assert('d' == cache._freshest_obj_id)
assert(cache.is_cached('a'))
assert(cache.is_cached('b'))
assert(cache.is_cached('d'))
assert(cache.is_cached('c') == False)
def main(argv=None):
test_a()
test_b()
if __name__ == "__main__":
sys.exit(main())
|
{
"content_hash": "65560e42cdcd8f5b93d839dba1899c94",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 118,
"avg_line_length": 27.568627450980394,
"alnum_prop": 0.6194879089615932,
"repo_name": "zdvresearch/fast15-paper-extras",
"id": "bc33f02c49a070b9bf1d0737555014df5b58d9b4",
"size": "2835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cache-simulator/cache_model_evaluation/MRU.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "19024"
},
{
"name": "Makefile",
"bytes": "9378"
},
{
"name": "Python",
"bytes": "781265"
}
],
"symlink_target": ""
}
|
from ...online_crowdsourcing import *
from part import *
import json
import math
import os
import pickle
from PIL import Image
PART_COLORS = [ '#FF0000', '#00FF00', '#008000', '#FFBF4A', '#000080', '#FFFF00', '#626200', '#00FFFF', '#006262', '#FF00FF', '#620062', '#FFFFFF', '#000000', '#44200F' ]
PART_OUTLINE_COLORS = [ '#000000', '#FFFFFF', '#000000', '#FFFFFF', '#000000', '#000000', '#FFFFFF', '#000000', '#FFFFFF', '#000000', '#FFFFFF', '#000000', '#FFFFFF', '#FFFFFF' ]
PART_OUTLINE_GT_COLORS = [ '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF', '#0000FF' ]
PART_OUTLINE_UNFINISHED_COLORS = [ '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000', '#FF0000' ]
PART_OUTLINE_FINISHED_COLORS = [ '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00', '#00FF00' ]
NUM_COLS = 4
# Crowdsourcing a collection of P parts. Many of the computations are based on CrowdDatasetPart, which handles a single part
# independently from all other parts; however, the computer vision module models correlation between parts
class CrowdDatasetParts(CrowdDataset):
def __init__(self, part_names=None, **kwds):
super(CrowdDatasetParts, self).__init__(**kwds)
self._CrowdImageClass_ = CrowdImageParts
self._CrowdWorkerClass_ = CrowdWorkerParts
self._CrowdLabelClass_ = CrowdLabelParts
self.opts = kwds
self.part_names = part_names
if not part_names is None:
self.parts = [CrowdDatasetPart(p, color=PART_COLORS[p%len(PART_COLORS)], outline_color=PART_OUTLINE_GT_COLORS[p%len(PART_OUTLINE_COLORS)], name=part_names[p], **kwds) for p in range(len(self.part_names))]
self.encode_exclude['parts'] = True
self.encode_exclude['opts'] = True
self.skill_names = ['Location Sigma', 'Prob Mistake', 'Prob Vis Correct', 'Prob Vis Correct Given Vis', 'Prob Vis Correct Given Not Vis']
name = self.name if self.name and len(self.name) > 0 else "objects"
self.hit_params = {'object_name':name};
dollars_per_hour, sec_per_click, sec_per_hour = 8, 2, 3600
self.reward = 0.15
self.images_per_hit = 1
self.reward = math.ceil(100*float(self.images_per_hit)*dollars_per_hour/sec_per_hour*sec_per_click*(len(part_names) if part_names else 10))/100.0
self.description = self.title = "Click on parts of " + name + " in images"
self.keywords = "click,parts,images," + name
self.html_template_dir = 'html/parts'
def NewCrowdLabel(self, i, w):
return CrowdLabelParts(self.images[i], self.workers[w])
def estimate_priors(self, avoid_if_finished=False):
for p in range(len(self.parts)):
self.parts[p].estimate_priors(avoid_if_finished=avoid_if_finished)
def initialize_parameters(self, avoid_if_finished=False):
for p in range(len(self.parts)):
self.parts[p].initialize_parameters(avoid_if_finished=avoid_if_finished)
def copy_parameters_from(self, dataset, full=True):
super(CrowdDatasetParts, self).copy_parameters_from(dataset, full=full)
self.part_names = dataset.part_names
self.parts = [CrowdDatasetPart(p, color=PART_COLORS[p%len(PART_COLORS)], outline_color=PART_OUTLINE_GT_COLORS[p%len(PART_OUTLINE_COLORS)], name=self.part_names[p], **self.opts) for p in range(len(self.part_names))]
self.reward = dataset.reward
for p in range(len(self.parts)):
self.parts[p].copy_parameters_from(dataset.parts[p], full=full)
def num_unfinished(self, max_annos=float('Inf'), full_dataset=None):
num = 0
for p in range(len(self.parts)):
num += self.parts[p].num_unfinished(max_annos=max_annos, full_dataset=full_dataset)
return num
def num_annotations(self):
num = 0
for p in range(len(self.parts)):
num += self.parts[p].num_annotations()
return num/float(len(self.parts))
def risk(self, images=None):
r = 0
for p in range(len(self.parts)):
r += self.parts[p].risk()
return r
def parse(self, data):
super(CrowdDatasetParts, self).parse(data)
self.parts = [CrowdDatasetPart(p=p, name=self.part_names[p], color=PART_COLORS[p%len(PART_COLORS)], outline_color=PART_OUTLINE_GT_COLORS[p%len(PART_OUTLINE_COLORS)]) for p in range(len(self.part_names))]
if 'parts' in data:
for p in range(len(self.parts)):
self.parts[p].parse(data['parts'][p])
def encode(self):
enc = super(CrowdDatasetParts, self).encode()
enc['parts'] = [self.parts[p].encode() for p in range(len(self.parts))]
return enc
class CrowdImageParts(CrowdImage):
def __init__(self, id, params):
super(CrowdImageParts, self).__init__(id, params)
self.bbox, self.worker_bboxes = None, {}
if hasattr(params, 'parts') and params.parts:
self.parts = []
for p in range(len(params.parts)):
self.parts.append(CrowdImagePart(id, params.parts[p], p))
params.parts[p].images[id] = self.parts[p]
self.encode_exclude['parts'] = True
def crowdsource_simple(self, avoid_if_finished=False):
if avoid_if_finished and self.finished:
return
# Take the "median" bounding box
if len(self.worker_bboxes) > 0:
best = float('-inf')
best_j = -1
for wj in self.worker_bboxes:
sumA = 0
if self.worker_bboxes[wj][2] == 1e-7 or self.worker_bboxes[wj][4] <= 1: sumA=-1+self.worker_bboxes[wj][4]*1e-5
for wk in self.worker_bboxes:
if self.worker_bboxes[wk][2] == 1e-7 or self.worker_bboxes[wk][4] <= 1:
continue
ux = max(self.worker_bboxes[wj][0]+self.worker_bboxes[wj][2],self.worker_bboxes[wk][0]+self.worker_bboxes[wk][2])-min(self.worker_bboxes[wj][0],self.worker_bboxes[wk][0])
uy = max(self.worker_bboxes[wj][1]+self.worker_bboxes[wj][3],self.worker_bboxes[wk][1]+self.worker_bboxes[wk][3])-min(self.worker_bboxes[wj][1],self.worker_bboxes[wk][1])
ix = max(0,min(self.worker_bboxes[wj][0]+self.worker_bboxes[wj][2],self.worker_bboxes[wk][0]+self.worker_bboxes[wk][2])-max(self.worker_bboxes[wj][0],self.worker_bboxes[wk][0]))
iy = max(0,min(self.worker_bboxes[wj][1]+self.worker_bboxes[wj][3],self.worker_bboxes[wk][1]+self.worker_bboxes[wk][3])-max(self.worker_bboxes[wj][1],self.worker_bboxes[wk][1]))
sumA += ix*iy/max(ux*uy,1e-7)
if sumA > best:
best = sumA
best_j = wj
self.bbox = self.worker_bboxes[best_j]
self.y = CrowdLabelParts(self, None)
for p in range(len(self.parts)):
if self.bbox: self.parts[p].set_bbox(self.bbox)
self.parts[p].crowdsource_simple(avoid_if_finished=avoid_if_finished)
self.y.parts[p] = self.parts[p].y
def predict_true_labels(self, avoid_if_finished=False):
self.y = CrowdLabelParts(self, None)
for p in range(len(self.parts)):
self.parts[p].predict_true_labels(avoid_if_finished=avoid_if_finished)
self.y.parts[p] = self.parts[p].y
def compute_log_likelihood(self):
ll = 0
for p in range(len(self.parts)):
ll += self.parts[p].compute_log_likelihood()
return ll
# Estimate difficulty parameters
def estimate_parameters(self, avoid_if_finished=False):
for p in range(len(self.parts)):
self.parts[p].estimate_parameters(avoid_if_finished=avoid_if_finished)
def check_finished(self, set_finished=True):
finished = True
self.risk = 0
for p in range(len(self.parts)):
if not self.parts[p].check_finished(set_finished=set_finished):
finished = False
if hasattr(self.parts[p], "risk"):
self.risk += self.parts[p].risk
if set_finished: self.finished = finished
return finished
def num_annotations(self):
num = 0
for p in range(len(self.parts)):
num += self.parts[p].num_annotations()
return num/float(len(self.parts))
def parse(self, data):
super(CrowdImageParts, self).parse(data)
self.parts = [CrowdImagePart(self.id, self.params.parts[p], p=p) for p in range(len(self.params.parts))]
for p in range(len(self.parts)):
if 'parts' in data:
self.parts[p].parse(data['parts'][p])
self.params.parts[p].images[id] = self.parts[p]
def encode(self):
enc = super(CrowdImageParts, self).encode()
enc['parts'] = [self.parts[p].encode() for p in range(len(self.parts))]
return enc
class CrowdWorkerParts(CrowdWorker):
def __init__(self, id, params):
super(CrowdWorkerParts,self).__init__(id, params)
if hasattr(params, 'parts') and params.parts:
self.parts = []
for p in range(len(params.parts)):
self.parts.append(CrowdWorkerPart(id, params.parts[p], p))
params.parts[p].workers[id] = self.parts[p]
self.encode_exclude['parts'] = True
def compute_log_likelihood(self):
ll = 0
for p in range(len(self.parts)):
ll += self.parts[p].compute_log_likelihood()
return ll
def estimate_parameters(self):
for p in range(len(self.parts)):
self.parts[p].estimate_parameters()
self.skill = np.asarray([p.skill for p in self.parts]).mean(axis=0).tolist()
def parse(self, data):
super(CrowdWorkerParts, self).parse(data)
self.parts = [CrowdWorkerPart(self.id, self.params.parts[p], p=p) for p in range(len(self.params.parts))]
for p in range(len(self.parts)):
if 'parts' in data:
self.parts[p].parse(data['parts'][p])
self.params.parts[p].workers[id] = self.parts[p]
def encode(self):
enc = super(CrowdWorkerParts, self).encode()
enc['parts'] = [self.parts[p].encode() for p in range(len(self.parts))]
return enc
class CrowdLabelParts(CrowdLabel):
def __init__(self, image, worker):
super(CrowdLabelParts, self).__init__(image, worker)
#self.parts = [None for p in range(len(image.parts))]
self.encode_exclude['parts'] = True
self.parts = [CrowdLabelPart(image.parts[p], worker.parts[p] if worker else None, p) for p in range(len(image.parts))]
self.gtype = 'keypoints'
def compute_log_likelihood(self):
ll = 0
for p in range(len(self.parts)):
if not self.parts[p] is None:
ll += self.parts[p].compute_log_likelihood()
return ll
def loss(self, y):
loss = 0
for p in range(len(self.parts)):
self.parts[p].image.loss = self.parts[p].loss(y.parts[p])
loss += self.parts[p].image.loss
loss /= float(len(self.parts))
self.image.loss = loss
return loss
def estimate_parameters(self, avoid_if_finished=False):
for p in range(len(self.parts)):
if not self.parts[p] is None:
self.parts[p].estimate_parameters(avoid_if_finished=avoid_if_finished)
def parse(self, data):
super(CrowdLabelParts, self).parse(data)
self.parts = [CrowdLabelPart(self.image.parts[p], self.worker.parts[p] if self.worker else None, p=p) for p in range(len(data['parts']))]
bbox = [float("inf"), float("inf"), float("-inf"), float("-inf"), 0]
num_vis = 0
#print str(data)
for p in range(len(self.parts)):
if not self.parts[p].image.finished:
if self.worker:# and not self.worker.id in self.worker.parts[p].params.workers:
self.worker.parts[p].params.workers[self.worker.id] = self.worker.parts[p]
if self.worker:# and not self.worker.id in self.worker.params.parts[p].workers:
self.worker.params.parts[p].workers[self.worker.id] = self.worker.parts[p]
#if not self.image.id in self.image.parts[p].params.images:
self.image.parts[p].params.images[self.image.id] = self.image.parts[p]
#if not self.image.id in self.image.params.parts[p].images:
self.image.params.parts[p].images[self.image.id] = self.image.parts[p]
if self.worker:
self.parts[p].image.workers.append(self.worker.id)
self.parts[p].worker.images[self.image.id] = self.image.parts[p]
self.parts[p].parse(data['parts'][p])
if self.worker:
self.image.parts[p].z[self.worker.id] = self.parts[p]
elif self == self.image.y_gt:
self.image.parts[p].y_gt = self.parts[p]
elif self == self.image.y:
self.image.parts[p].y = self.parts[p]
if data['parts'][p]['vis']:
bbox[0] = float(min(bbox[0], data['parts'][p]['x']))
bbox[1] = float(min(bbox[1], data['parts'][p]['y']))
bbox[2] = float(max(bbox[2], data['parts'][p]['x']))
bbox[3] = float(max(bbox[3], data['parts'][p]['y']))
num_vis += 1
if num_vis == 0:
bbox = [0, 0, 1e-7, 1e-7, 0]
else:
bbox[0] = max(0, bbox[0])
bbox[1] = max(0, bbox[1])
bbox[2] = max(1e-7, bbox[2]-bbox[0])
bbox[3] = max(1e-7, bbox[3]-bbox[1])
bbox[4] = num_vis
if self.worker:
self.image.worker_bboxes[self.worker.id] = bbox
for p in range(len(self.parts)):
if not self.parts[p].image.finished:
self.parts[p].set_bbox(bbox)
def encode(self):
enc = super(CrowdLabelParts, self).encode()
enc['parts'] = [self.parts[p].encode() for p in range(len(self.parts))]
return enc
|
{
"content_hash": "9fe76b79e4e686349770a5d465711be8",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 218,
"avg_line_length": 45.53198653198653,
"alnum_prop": 0.6247134511572876,
"repo_name": "sbranson/online_crowdsourcing",
"id": "98cf24d3921b9a8c2e91f5da3ccfee37366f8952",
"size": "13523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crowdsourcing/annotation_types/part/parts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "15706"
},
{
"name": "Python",
"bytes": "444456"
}
],
"symlink_target": ""
}
|
from omnilib.xmlrpc.client import make_client
from omnilib.frameworks.framework_base import Framework_Base
import os
import traceback
import sys
import time
class Framework(Framework_Base):
def __init__(self, config):
config['cert'] = os.path.expanduser(config['cert'])
if not os.path.exists(config['cert']):
sys.exit('OpenFlow Framework certfile %s doesnt exist' % config['cert'])
config['key'] = os.path.expanduser(config['key'])
if not os.path.exists(config['key']):
sys.exit('OpenFlow Framework keyfile %s doesnt exist' % config['key'])
if not config.has_key('verbose'):
config['verbose'] = False
self.config = config
self.ch = make_client(config['ch'], config['key'], config['cert'])
self.cert_string = file(config['cert'],'r').read()
self.user_cred = None
def get_user_cred(self):
if self.user_cred == None:
try:
self.user_cred = self.ch.CreateUserCredential(self.cert_string)
except Exception:
raise Exception("Using OpenFlow Failed to do CH.CreateUserCredentials on CH %s from cert file %s: %s" % (self.config['ch'], self.config['cert'], traceback.format_exc()))
return self.user_cred
def get_slice_cred(self, urn):
return self.ch.CreateSlice(urn)
def create_slice(self, urn):
return self.get_slice_cred(urn)
def delete_slice(self, urn):
return self.ch.DeleteSlice(urn)
def list_aggregates(self):
sites = []
try:
sites = self.ch.ListAggregates()
except Exception:
raise Exception("Using OpenFlow Failed to do CH.ListAggregates on CH %s from cert file %s: %s" % (self.config['ch'], self.config['cert'], traceback.format_exc()))
aggs = {}
for (urn, url) in sites:
aggs[urn] = url
return aggs
def slice_name_to_urn(self, name):
"""Convert a slice name to a slice urn."""
# FIXME: Use constants
base = 'urn:publicid:IDN+'
# FIXME: Validate name from credential.publicid_to_urn
# Old OMNI configs did not have authority specified,
# all we can do with those is append the name to the base
if not self.config.has_key('authority'):
if name.startswith(base):
return name
else:
return base + name
auth = self.config['authority']
if name.startswith(base):
if not name.startswith(base+auth+"+slice+"):
raise Exception("Incorrect slice name")
return name
if name.startswith(auth):
return base + name
if '+' in name:
raise Exception("Incorrect slice name")
return base + auth + "+slice+" + name
def renew_slice(self, urn, expiration_dt):
"""See framework_base for doc.
"""
expiration = expiration_dt.isoformat()
if self.ch.RenewSlice(urn, expiration):
return expiration_dt
else:
return None
|
{
"content_hash": "111604513c9b0b51c5a35d393ac6cad1",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 185,
"avg_line_length": 35.68888888888889,
"alnum_prop": 0.5688044831880449,
"repo_name": "avlach/univbris-ocf",
"id": "95f165865d3f3bbac7d3c08afee4c17553d0c94d",
"size": "4424",
"binary": false,
"copies": "4",
"ref": "refs/heads/ofelia.opticaldevelopment",
"path": "expedient/src/python/omnilib/frameworks/framework_of.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "127542"
},
{
"name": "JavaScript",
"bytes": "289680"
},
{
"name": "Perl",
"bytes": "4421"
},
{
"name": "Python",
"bytes": "3446617"
},
{
"name": "Racket",
"bytes": "32770"
},
{
"name": "Shell",
"bytes": "7609"
}
],
"symlink_target": ""
}
|
kpa = float(input("Input pressure in kilopascals> "))
psi = kpa * 0.145038
mmhg = kpa * 760 / 101.325
atm = kpa / 101.325
print "The pressure in pounds per square inch: %.2f psi" % (psi)
print "The pressure in millimeter of mercury: %.2f mmHg" % (mmhg)
print "Atmosphere pressure: %.2f atm." % (atm)
|
{
"content_hash": "d90adae08d6c4bab768710bc2971995d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 65,
"avg_line_length": 43,
"alnum_prop": 0.6744186046511628,
"repo_name": "dadavidson/Python_Lab",
"id": "95146ed210abfc7f5888ed406baa1ce058e4b8f6",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python-w3resource/Python_Basic/ex67.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2047495"
},
{
"name": "Python",
"bytes": "106265"
}
],
"symlink_target": ""
}
|
"""
9. Many-to-many relationships via an intermediary table
For many-to-many relationships that need extra fields on the intermediary
table, use an intermediary model.
In this example, an ``Article`` can have multiple ``Reporter``s, and each
``Article``-``Reporter`` combination (a ``Writer``) has a ``position`` field,
which specifies the ``Reporter``'s position for the given article (e.g. "Staff
writer").
"""
from django.db import models
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
def __unicode__(self):
return self.headline
class Writer(models.Model):
reporter = models.ForeignKey(Reporter)
article = models.ForeignKey(Article)
position = models.CharField(max_length=100)
def __unicode__(self):
return u'%s (%s)' % (self.reporter, self.position)
__test__ = {'API_TESTS':"""
# Create a few Reporters.
>>> r1 = Reporter(first_name='John', last_name='Smith')
>>> r1.save()
>>> r2 = Reporter(first_name='Jane', last_name='Doe')
>>> r2.save()
# Create an Article.
>>> from datetime import datetime
>>> a = Article(headline='This is a test', pub_date=datetime(2005, 7, 27))
>>> a.save()
# Create a few Writers.
>>> w1 = Writer(reporter=r1, article=a, position='Main writer')
>>> w1.save()
>>> w2 = Writer(reporter=r2, article=a, position='Contributor')
>>> w2.save()
# Play around with the API.
>>> a.writer_set.select_related().order_by('-position')
[<Writer: John Smith (Main writer)>, <Writer: Jane Doe (Contributor)>]
>>> w1.reporter
<Reporter: John Smith>
>>> w2.reporter
<Reporter: Jane Doe>
>>> w1.article
<Article: This is a test>
>>> w2.article
<Article: This is a test>
>>> r1.writer_set.all()
[<Writer: John Smith (Main writer)>]
"""}
|
{
"content_hash": "bdca613eb70f723baad48ea67f84bf77",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 78,
"avg_line_length": 29.058823529411764,
"alnum_prop": 0.6700404858299596,
"repo_name": "rawwell/django",
"id": "0f93d5a154879cc19910a440ebd8ec34627b1832",
"size": "1976",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "tests/modeltests/m2m_intermediary/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "66105"
},
{
"name": "Python",
"bytes": "2924201"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
}
|
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from pprint import pformat
import json
import pytest
import lasio
import lasio.las_items
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
stegfn = lambda vers, fn: os.path.join(os.path.dirname(__file__), "examples", vers, fn)
def test_repr():
h = lasio.las_items.HeaderItem("MN", unit="m", value=20, descr="test testing")
assert h.__repr__() == pformat(h)
def test_useful_mnemonic_setter_not_allowed():
h = lasio.las_items.HeaderItem("MN", unit="m", value=20, descr="test testing")
# Writing to useful_mnemonic is prevented by exception.
with pytest.raises(ValueError):
h.useful_mnemonic = "NEW_NAME"
def test_mmenomic_names_behavior():
h = lasio.las_items.HeaderItem("MN", unit="m", value=20, descr="test testing")
# mnemonic is not changed
h["mnemonic"] = "ZZZ"
assert h["mnemonic"] == "MN"
assert h.mnemonic == "MN"
assert h.useful_mnemonic == "MN"
# mnemonic is changed
h.mnemonic = "ZZZ"
assert h.useful_mnemonic == "ZZZ"
def test_getitem():
h = lasio.las_items.HeaderItem("MN", unit="m", value=20, descr="test testing")
assert h["mnemonic"] == "MN"
assert h["original_mnemonic"] == "MN"
assert h["useful_mnemonic"] == "MN"
assert h["unit"] == "m"
with pytest.raises(KeyError):
h["notakey"]
def test_header_json():
h = lasio.las_items.HeaderItem("MN", unit="m", value=20, descr="test testing")
# HeaderItem transformed to json string that includes
# object type and property key/values.
myjson = h.json
# Transform json string into a python dictionary
result = json.loads(myjson)
for key in result.keys():
if key == "_type":
# type of the object this json came from
assert result[key] == "HeaderItem"
else:
# data key/values: mnemonic, name, value, descr
assert result[key] == h[key]
# Verify write-to-HeaderItem.json is discouraged.
with pytest.raises(Exception):
h.json = '{ "_type: "HeaderItem" }'
|
{
"content_hash": "961e72006d1d66104cc18ff0c6ea8f45",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 87,
"avg_line_length": 27.2125,
"alnum_prop": 0.6311437758383096,
"repo_name": "kwinkunks/lasio",
"id": "50cd2d333e13ca6d517935fa06da28e88199c863",
"size": "2177",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/test_header_items.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "271856"
},
{
"name": "Python",
"bytes": "94521"
}
],
"symlink_target": ""
}
|
import re
import sys
from tox.config import parseconfig
from tox.package import get_package
from tox.session import Session
def test_install_via_installpkg(mock_venv, initproj, cmd):
base = initproj(
"pkg-0.1",
filedefs={
"tox.ini": """
[tox]
install_cmd = python -m -c 'print("ok")' -- {opts} {packages}'
""",
},
)
fake_package = base.ensure(".tox", "dist", "pkg123-0.1.zip")
result = cmd("-e", "py", "--notest", "--installpkg", str(fake_package.relto(base)))
result.assert_success()
def test_installpkg(tmpdir, newconfig):
p = tmpdir.ensure("pkg123-1.0.zip")
config = newconfig(["--installpkg={}".format(p)], "")
session = Session(config)
_, sdist_path = get_package(session)
assert sdist_path == p
def test_sdist_latest(tmpdir, newconfig):
distshare = tmpdir.join("distshare")
config = newconfig(
[],
"""
[tox]
distshare={}
sdistsrc={{distshare}}/pkg123-*
""".format(
distshare,
),
)
p = distshare.ensure("pkg123-1.4.5.zip")
distshare.ensure("pkg123-1.4.5a1.zip")
session = Session(config)
_, dist = get_package(session)
assert dist == p
def test_separate_sdist_no_sdistfile(cmd, initproj, tmpdir):
distshare = tmpdir.join("distshare")
initproj(
("pkg123-foo", "0.7"),
filedefs={
"tox.ini": """
[tox]
distshare={}
""".format(
distshare,
),
},
)
result = cmd("--sdistonly", "-e", "py")
assert not result.ret
distshare_files = distshare.listdir()
assert len(distshare_files) == 1
sdistfile = distshare_files[0]
assert "pkg123-foo-0.7.zip" in str(sdistfile)
def test_sdistonly(initproj, cmd):
initproj(
"example123",
filedefs={
"tox.ini": """
""",
},
)
result = cmd("-v", "--sdistonly", "-e", "py")
assert not result.ret
assert re.match(r".*sdist-make.*setup.py.*", result.out, re.DOTALL)
assert "-mvirtualenv" not in result.out
def test_make_sdist(initproj):
initproj(
"example123-0.5",
filedefs={
"tests": {"test_hello.py": "def test_hello(): pass"},
"tox.ini": """
""",
},
)
config = parseconfig([])
session = Session(config)
_, sdist = get_package(session)
assert sdist.check()
assert sdist.ext == ".zip"
assert sdist == config.distdir.join(sdist.basename)
_, sdist2 = get_package(session)
assert sdist2 == sdist
sdist.write("hello")
assert sdist.stat().size < 10
_, sdist_new = get_package(Session(config))
assert sdist_new == sdist
assert sdist_new.stat().size > 10
def test_build_backend_without_submodule(initproj, cmd):
# The important part of this test is that the build backend
# "inline_backend" is just a base package without a submodule.
# (Regression test for #1344)
initproj(
"magic-0.1",
filedefs={
"tox.ini": """\
[tox]
isolated_build = true
[testenv:.package]
basepython = {}
[testenv]
setenv = PYTHONPATH = {{toxinidir}}
""".format(
sys.executable,
),
"pyproject.toml": """\
[build-system]
requires = []
build-backend = "inline_backend"
""",
# To trigger original bug, must be package with __init__.py
"inline_backend": {
"__init__.py": """\
import sys
def get_requires_for_build_sdist(*args, **kwargs):
return ["pathlib2;python_version<'3.4'"]
def build_sdist(sdist_directory, config_settings=None):
if sys.version_info[:2] >= (3, 4):
import pathlib
else:
import pathlib2 as pathlib
(pathlib.Path(sdist_directory) / "magic-0.1.0.tar.gz").touch()
return "magic-0.1.0.tar.gz"
""",
},
".gitignore": ".tox",
},
add_missing_setup_py=False,
)
result = cmd("--sdistonly", "-e", "py", "-v", "-v")
result.assert_success(is_run_test_env=False)
def test_package_inject(initproj, cmd, monkeypatch, tmp_path):
monkeypatch.delenv(str("PYTHONPATH"), raising=False)
initproj(
"example123-0.5",
filedefs={
"tox.ini": """
[testenv:py]
passenv = PYTHONPATH
commands = python -c 'import os; assert os.path.exists(os.environ["TOX_PACKAGE"])'
""",
},
)
result = cmd("-q")
assert result.session.getvenv("py").envconfig.setenv.get("TOX_PACKAGE")
|
{
"content_hash": "28fd137a0f0ddc4789fea75cce59287c",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 94,
"avg_line_length": 29.423529411764704,
"alnum_prop": 0.5133946421431428,
"repo_name": "tox-dev/tox",
"id": "5a196d56c154746bce674d1ce776b318e1fdb8a5",
"size": "5002",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/unit/package/test_package.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "554999"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
Size = namedtuple('Size', ['width', 'height'])
Location = namedtuple('Location', ['x', 'y'])
class Style(object):
"""
Allows getting and setting the CSS style.
"""
_elem = None
def __init__(self, elem):
self.__dict__.update({'_elem': elem})
def __getattr__(self, name):
return self._elem.value_of_css_property(name)
def __setattr__(self, name, value):
self._elem.javascript('style.%s = "%s"' % (name, value))
# http://thatmattbone.com/2010/04/delaying-computation-lazy-dictionaries-in-python/
# http://stackoverflow.com/questions/2048720/get-all-attributes-from-a-html-element-with-javascript-jquery
class Attributes(object):
"""
Allows getting, setting and deleting attributes.
"""
def __init__(self, elem):
self._elem = elem
def _get_attributes(self):
script = """
var elem = arguments[0];
var ret = {}
for (var i=0, attrs=elem.attributes, l=attrs.length; i<l; i++){
ret[attrs.item(i).nodeName] = attrs.item(i).nodeValue
}
return ret"""
return self._elem._parent.execute_script(script, self._elem)
def __getitem__(self, name):
return self._elem.javascript("getAttribute('%s')" % name)
def __setitem__(self, name, value):
return self._elem.javascript("setAttribute('%s', %s)" %
(name, repr(value)))
def __delitem__(self, name):
return self._elem.javascript("removeAttribute('%s')" % name)
def __getattr__(self, name):
data = self._get_attributes()
return getattr(data, name)
def __repr__(self):
return repr(self._get_attributes())
def __eq__(self, other):
return self._get_attributes() == other
|
{
"content_hash": "6d972de1e25976bbe4fd327f3e323acb",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 106,
"avg_line_length": 28.793650793650794,
"alnum_prop": 0.5848952590959207,
"repo_name": "tomchristie/webdriverplus",
"id": "eb721affbfa75a620fec5e4dff5b0703701abadd",
"size": "1814",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webdriverplus/wrappers.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "80381"
},
{
"name": "Shell",
"bytes": "69"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import unittest
import numpy as np
import math
from op_test import OpTest
def quantize_max_abs(x, max_range):
scale = np.max(np.abs(x).flatten())
y = np.round(x / scale * max_range)
return y, scale
def dequantize_max_abs(x, scale, max_range):
y = (scale / max_range) * x
return y
class TestFakeDequantizeMaxAbsOp(OpTest):
def set_args(self):
self.num_bits = 8
self.max_range = math.pow(2, self.num_bits - 1) - 1
self.data_type = "float32"
def setUp(self):
self.set_args()
self.op_type = "fake_dequantize_max_abs"
x = np.random.randn(31, 65).astype(self.data_type)
yq, scale = quantize_max_abs(x, self.max_range)
ydq = dequantize_max_abs(yq, scale, self.max_range)
self.inputs = {'X': yq, 'Scale': np.array(scale).astype(self.data_type)}
self.attrs = {'max_range': self.max_range}
self.outputs = {'Out': ydq}
def test_check_output(self):
self.check_output()
class TestFakeDequantizeMaxAbsOpDouble(TestFakeDequantizeMaxAbsOp):
def set_args(self):
self.num_bits = 8
self.max_range = math.pow(2, self.num_bits - 1) - 1
self.data_type = "float64"
class TestFakeDequantizeMaxAbsOp5Bits(TestFakeDequantizeMaxAbsOp):
def set_args(self):
self.num_bits = 5
self.max_range = math.pow(2, self.num_bits - 1) - 1
self.data_type = "float32"
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "4a7934d08c32d1b9a0e0f57bd955bc71",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 80,
"avg_line_length": 26.982142857142858,
"alnum_prop": 0.6207809397749835,
"repo_name": "reyoung/Paddle",
"id": "1bb4662e8d83ac0c34b209e4e7a605869fdb59d5",
"size": "2124",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_fake_dequantize_op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "274815"
},
{
"name": "C++",
"bytes": "9634861"
},
{
"name": "CMake",
"bytes": "321482"
},
{
"name": "Cuda",
"bytes": "1290076"
},
{
"name": "Dockerfile",
"bytes": "8631"
},
{
"name": "Go",
"bytes": "109508"
},
{
"name": "Perl",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "4853892"
},
{
"name": "Shell",
"bytes": "170766"
}
],
"symlink_target": ""
}
|
from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._digital_twin_models_operations import build_add_request, build_delete_request, build_get_by_id_request, build_list_request, build_update_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DigitalTwinModelsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.digitaltwins.core.aio.AzureDigitalTwinsAPI`'s
:attr:`digital_twin_models` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
args = list(args)
self._client = args.pop(0) if args else kwargs.pop("client")
self._config = args.pop(0) if args else kwargs.pop("config")
self._serialize = args.pop(0) if args else kwargs.pop("serializer")
self._deserialize = args.pop(0) if args else kwargs.pop("deserializer")
@distributed_trace_async
async def add(
self,
models: Optional[List[Any]] = None,
digital_twin_models_add_options: Optional["_models.DigitalTwinModelsAddOptions"] = None,
**kwargs: Any
) -> List["_models.DigitalTwinsModelData"]:
"""Uploads one or more models. When any error occurs, no models are uploaded.
Status codes:
* 201 Created
* 400 Bad Request
* DTDLParserError - The models provided are not valid DTDL.
* InvalidArgument - The model id is invalid.
* LimitExceeded - The maximum number of model ids allowed in 'dependenciesFor' has been
reached.
* ModelVersionNotSupported - The version of DTDL used is not supported.
* 409 Conflict
* ModelAlreadyExists - The model provided already exists.
:param models: An array of models to add. Default value is None.
:type models: list[any]
:param digital_twin_models_add_options: Parameter group. Default value is None.
:type digital_twin_models_add_options:
~azure.digitaltwins.core.models.DigitalTwinModelsAddOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of DigitalTwinsModelData, or the result of cls(response)
:rtype: list[~azure.digitaltwins.core.models.DigitalTwinsModelData]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.DigitalTwinsModelData"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-05-31") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_traceparent = None
_tracestate = None
if digital_twin_models_add_options is not None:
_traceparent = digital_twin_models_add_options.traceparent
_tracestate = digital_twin_models_add_options.tracestate
if models is not None:
_json = self._serialize.body(models, '[object]')
else:
_json = None
request = build_add_request(
api_version=api_version,
content_type=content_type,
json=_json,
traceparent=_traceparent,
tracestate=_tracestate,
template_url=self.add.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[DigitalTwinsModelData]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
add.metadata = {'url': "/models"} # type: ignore
@distributed_trace
def list(
self,
dependencies_for: Optional[List[str]] = None,
include_model_definition: Optional[bool] = False,
digital_twin_models_list_options: Optional["_models.DigitalTwinModelsListOptions"] = None,
**kwargs: Any
) -> AsyncIterable["_models.PagedDigitalTwinsModelDataCollection"]:
"""Retrieves model metadata and, optionally, model definitions.
Status codes:
* 200 OK
* 400 Bad Request
* InvalidArgument - The model id is invalid.
* LimitExceeded - The maximum number of model ids allowed in 'dependenciesFor' has been
reached.
* 404 Not Found
* ModelNotFound - The model was not found.
:param dependencies_for: The set of the models which will have their dependencies retrieved. If
omitted, all models are retrieved. Default value is None.
:type dependencies_for: list[str]
:param include_model_definition: When true the model definition will be returned as part of the
result. Default value is False.
:type include_model_definition: bool
:param digital_twin_models_list_options: Parameter group. Default value is None.
:type digital_twin_models_list_options:
~azure.digitaltwins.core.models.DigitalTwinModelsListOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PagedDigitalTwinsModelDataCollection or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.digitaltwins.core.models.PagedDigitalTwinsModelDataCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2022-05-31") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.PagedDigitalTwinsModelDataCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
_traceparent = None
_tracestate = None
_max_items_per_page = None
if digital_twin_models_list_options is not None:
_traceparent = digital_twin_models_list_options.traceparent
_tracestate = digital_twin_models_list_options.tracestate
_max_items_per_page = digital_twin_models_list_options.max_items_per_page
request = build_list_request(
api_version=api_version,
traceparent=_traceparent,
tracestate=_tracestate,
dependencies_for=dependencies_for,
include_model_definition=include_model_definition,
max_items_per_page=_max_items_per_page,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
_traceparent = None
_tracestate = None
_max_items_per_page = None
if digital_twin_models_list_options is not None:
_traceparent = digital_twin_models_list_options.traceparent
_tracestate = digital_twin_models_list_options.tracestate
_max_items_per_page = digital_twin_models_list_options.max_items_per_page
request = build_list_request(
api_version=api_version,
traceparent=_traceparent,
tracestate=_tracestate,
dependencies_for=dependencies_for,
include_model_definition=include_model_definition,
max_items_per_page=_max_items_per_page,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PagedDigitalTwinsModelDataCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/models"} # type: ignore
@distributed_trace_async
async def get_by_id(
self,
id: str,
include_model_definition: Optional[bool] = False,
digital_twin_models_get_by_id_options: Optional["_models.DigitalTwinModelsGetByIdOptions"] = None,
**kwargs: Any
) -> "_models.DigitalTwinsModelData":
"""Retrieves model metadata and optionally the model definition.
Status codes:
* 200 OK
* 400 Bad Request
* InvalidArgument - The model id is invalid.
* MissingArgument - The model id was not provided.
* 404 Not Found
* ModelNotFound - The model was not found.
:param id: The id for the model. The id is globally unique and case sensitive.
:type id: str
:param include_model_definition: When true the model definition will be returned as part of the
result. Default value is False.
:type include_model_definition: bool
:param digital_twin_models_get_by_id_options: Parameter group. Default value is None.
:type digital_twin_models_get_by_id_options:
~azure.digitaltwins.core.models.DigitalTwinModelsGetByIdOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DigitalTwinsModelData, or the result of cls(response)
:rtype: ~azure.digitaltwins.core.models.DigitalTwinsModelData
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DigitalTwinsModelData"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-05-31") # type: str
_traceparent = None
_tracestate = None
if digital_twin_models_get_by_id_options is not None:
_traceparent = digital_twin_models_get_by_id_options.traceparent
_tracestate = digital_twin_models_get_by_id_options.tracestate
request = build_get_by_id_request(
id=id,
api_version=api_version,
traceparent=_traceparent,
tracestate=_tracestate,
include_model_definition=include_model_definition,
template_url=self.get_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('DigitalTwinsModelData', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': "/models/{id}"} # type: ignore
@distributed_trace_async
async def update( # pylint: disable=inconsistent-return-statements
self,
id: str,
update_model: List[Any],
digital_twin_models_update_options: Optional["_models.DigitalTwinModelsUpdateOptions"] = None,
**kwargs: Any
) -> None:
"""Updates the metadata for a model.
Status codes:
* 204 No Content
* 400 Bad Request
* InvalidArgument - The model id is invalid.
* JsonPatchInvalid - The JSON Patch provided is invalid.
* MissingArgument - The model id was not provided.
* 404 Not Found
* ModelNotFound - The model was not found.
* 409 Conflict
* ModelReferencesNotDecommissioned - The model refers to models that are not decommissioned.
:param id: The id for the model. The id is globally unique and case sensitive.
:type id: str
:param update_model: An update specification described by JSON Patch. Only the decommissioned
property can be replaced.
:type update_model: list[any]
:param digital_twin_models_update_options: Parameter group. Default value is None.
:type digital_twin_models_update_options:
~azure.digitaltwins.core.models.DigitalTwinModelsUpdateOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-05-31") # type: str
content_type = kwargs.pop('content_type', "application/json-patch+json") # type: Optional[str]
_traceparent = None
_tracestate = None
if digital_twin_models_update_options is not None:
_traceparent = digital_twin_models_update_options.traceparent
_tracestate = digital_twin_models_update_options.tracestate
_json = self._serialize.body(update_model, '[object]')
request = build_update_request(
id=id,
api_version=api_version,
content_type=content_type,
json=_json,
traceparent=_traceparent,
tracestate=_tracestate,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
update.metadata = {'url': "/models/{id}"} # type: ignore
@distributed_trace_async
async def delete( # pylint: disable=inconsistent-return-statements
self,
id: str,
digital_twin_models_delete_options: Optional["_models.DigitalTwinModelsDeleteOptions"] = None,
**kwargs: Any
) -> None:
"""Deletes a model. A model can only be deleted if no other models reference it.
Status codes:
* 204 No Content
* 400 Bad Request
* InvalidArgument - The model id is invalid.
* MissingArgument - The model id was not provided.
* 404 Not Found
* ModelNotFound - The model was not found.
* 409 Conflict
* ModelReferencesNotDeleted - The model refers to models that are not deleted.
:param id: The id for the model. The id is globally unique and case sensitive.
:type id: str
:param digital_twin_models_delete_options: Parameter group. Default value is None.
:type digital_twin_models_delete_options:
~azure.digitaltwins.core.models.DigitalTwinModelsDeleteOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-05-31") # type: str
_traceparent = None
_tracestate = None
if digital_twin_models_delete_options is not None:
_traceparent = digital_twin_models_delete_options.traceparent
_tracestate = digital_twin_models_delete_options.tracestate
request = build_delete_request(
id=id,
api_version=api_version,
traceparent=_traceparent,
tracestate=_tracestate,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': "/models/{id}"} # type: ignore
|
{
"content_hash": "a3fef8c950b977143c65a4d34a47022e",
"timestamp": "",
"source": "github",
"line_count": 487,
"max_line_length": 164,
"avg_line_length": 41.449691991786445,
"alnum_prop": 0.630635093629248,
"repo_name": "Azure/azure-sdk-for-python",
"id": "33e4f3cab191b28d71ed4069cb5abe2a913d123d",
"size": "20686",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/digitaltwins/azure-digitaltwins-core/azure/digitaltwins/core/_generated/aio/operations/_digital_twin_models_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from oslo_config import cfg
from neutron._i18n import _
DEFAULT_DEVICE_MAPPINGS = []
DEFAULT_EXCLUDE_DEVICES = []
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
]
sriov_nic_opts = [
cfg.ListOpt('physical_device_mappings',
default=DEFAULT_DEVICE_MAPPINGS,
help=_("Comma-separated list of "
"<physical_network>:<network_device> tuples mapping "
"physical network names to the agent's node-specific "
"physical network device interfaces of SR-IOV physical "
"function to be used for VLAN networks. All physical "
"networks listed in network_vlan_ranges on the server "
"should have mappings to appropriate interfaces on "
"each agent.")),
cfg.ListOpt('exclude_devices',
default=DEFAULT_EXCLUDE_DEVICES,
help=_("Comma-separated list of "
"<network_device>:<vfs_to_exclude> tuples, mapping "
"network_device to the agent's node-specific list of "
"virtual functions that should not be used for virtual "
"networking. vfs_to_exclude is a semicolon-separated "
"list of virtual functions to exclude from "
"network_device. The network_device in the mapping "
"should appear in the physical_device_mappings "
"list.")),
]
def register_agent_sriov_nic_opts(cfg=cfg.CONF):
cfg.register_opts(agent_opts, 'AGENT')
cfg.register_opts(sriov_nic_opts, 'SRIOV_NIC')
|
{
"content_hash": "efb35952678debd5abb511a0b2f4a428",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 79,
"avg_line_length": 44.63414634146341,
"alnum_prop": 0.5622950819672131,
"repo_name": "eayunstack/neutron",
"id": "679ac222f8eab391cbf2992eab3b12c3221e9357",
"size": "2423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/conf/plugins/ml2/drivers/mech_sriov/agent_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "10593193"
},
{
"name": "Shell",
"bytes": "8804"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
from functools import partial
from django.db import models
from django.conf import settings
from django.template import Context
from django.template.loader import get_template
from django.templatetags.static import static
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from filer.fields.image import FilerImageField
from aldryn_social_addthis import defaults
SOCIAL_NETWORKS = (
('facebook', _('Facebook')),
('instagram', _('Instagram')),
('google', _('Google+')),
('twitter', _('Twitter')),
('youtube', _('YouTube')),
('xing', _('XING')),
('linkedin', _('LinkedIn')),
('rss', _('RSS')),
)
SOCIAL_NETWORKS_BY_NAME = OrderedDict(SOCIAL_NETWORKS)
AVAILABLE_NETWORKS = getattr(settings, 'ALDRYN_SOCIAL_ADDTHIS_NETWORKS', SOCIAL_NETWORKS_BY_NAME.keys())
CMSPluginField = partial(
models.OneToOneField,
to=CMSPlugin,
related_name='%(app_label)s_%(class)s',
parent_link=True,
)
class Like(CMSPlugin):
facebook = models.BooleanField(_('facebook'), default=False)
google = models.BooleanField(_('google'), default=False)
twitter = models.BooleanField(_('twitter'), default=False)
pinterest = models.BooleanField(_('pinterest'), default=False)
email = models.BooleanField(_('email'), default=False)
buttons = [
('facebook', 'aldryn_social_addthis/likes/facebook.html'),
('google', 'aldryn_social_addthis/likes/google.html'),
('twitter', 'aldryn_social_addthis/likes/twitter.html'),
('pinterest', 'aldryn_social_addthis/likes/pinterest.html'),
('email', 'aldryn_social_addthis/likes/email.html')
]
title = models.CharField(
verbose_name=_('title'),
max_length=255,
default=defaults.LIKE['title'],
blank=True,
null=True,
help_text=_('Uses the title of the browser window if empty.')
)
description = models.CharField(
verbose_name=_('description'),
max_length=255,
default=defaults.LIKE['description'],
blank=True,
null=True
)
image = FilerImageField(
verbose_name=_('image'),
blank=True,
null=True,
help_text=_('This setting can only be set once per page. If set twice, it will be overridden.')
)
cmsplugin_ptr = CMSPluginField()
def get_buttons(self):
context = Context({'title': self.title,
'description': self.description})
for button, template_path in self.buttons:
if getattr(self, button):
template = get_template(template_path)
yield template.render(context)
class Mail(CMSPlugin):
subject = models.CharField(_('subject'), max_length=100)
body = models.TextField(_('body'), default='', blank=True)
append_url = models.BooleanField(
verbose_name=_('append url'),
default=True,
help_text=_('Append the current web address at the end of the mail.')
)
cmsplugin_ptr = CMSPluginField()
class Links(CMSPlugin):
facebook = models.URLField(_('Facebook'), null=True, blank=True)
instagram = models.URLField(_('Instagram'), null=True, blank=True)
google = models.URLField(_('Google+'), null=True, blank=True)
twitter = models.URLField(_('Twitter'), null=True, blank=True)
youtube = models.URLField(_('YouTube'), null=True, blank=True)
xing = models.URLField(_('XING'), null=True, blank=True)
linkedin = models.URLField(_('LinkedIn'), null=True, blank=True)
rss = models.URLField(_('RSS'), null=True, blank=True)
cmsplugin_ptr = CMSPluginField()
def get_link(self, network):
icon_path = self.get_plugin_class().ICON_URL % {'network': network}
return {'name': network, 'url': getattr(self, network), 'icon_url': static(icon_path)}
def get_links(self):
return [self.get_link(network) for network in AVAILABLE_NETWORKS if getattr(self, network)]
|
{
"content_hash": "41cef6f7daeab162b4e784e0978ae0a7",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 104,
"avg_line_length": 33.96610169491525,
"alnum_prop": 0.6469560878243513,
"repo_name": "aldryn/aldryn-social-addthis",
"id": "1df5896115ece2ec9868bda9cc669fd5db04e17b",
"size": "4032",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aldryn_social_addthis/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2054"
},
{
"name": "Python",
"bytes": "13858"
}
],
"symlink_target": ""
}
|
import sys
import django
from django.conf import settings
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.humanize',
'django.contrib.sites',
'django_nyt',
]
from django import VERSION
if VERSION <= (1, 6):
INSTALLED_APPS.append('south')
SOUTH_MIGRATION_MODULES = {
'django_nyt': 'django_nyt.south_migrations',
}
else:
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
from django import VERSION
if VERSION <= (1, 6):
INSTALLED_APPS.append('south')
SOUTH_MIGRATION_MODULES = {
'django_nyt': 'django_nyt.south_migrations',
}
# TEST_RUNNER = None
else:
SOUTH_MIGRATION_MODULES = None
# TEST_RUNNER = 'django.test.runner.DiscoverRunner'
settings.configure(
DEBUG=True,
# AUTH_USER_MODEL='testdata.CustomUser',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
SITE_ID=1,
ROOT_URLCONF='testproject.urls',
SOUTH_MIGRATION_MODULES=SOUTH_MIGRATION_MODULES,
INSTALLED_APPS=INSTALLED_APPS,
TEMPLATE_CONTEXT_PROCESSORS=(
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
),
USE_TZ=True,
SOUTH_TESTS_MIGRATE=True,
)
# If you use South for migrations, uncomment this to monkeypatch
# syncdb to get migrations to run.
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
from django.core.management import execute_from_command_line
argv = [sys.argv[0], "test"]
if len(sys.argv) == 1:
# Nothing following 'runtests.py':
if django.VERSION < (1,6):
argv.append("django_nyt")
else:
argv.append("django_nyt.tests")
else:
# Allow tests to be specified:
argv.extend(sys.argv[1:])
execute_from_command_line(argv)
|
{
"content_hash": "745bb85aaef5f45aad4faf2e8a803ce2",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 64,
"avg_line_length": 27.123456790123456,
"alnum_prop": 0.6672735548475194,
"repo_name": "spookylukey/django-nyt",
"id": "9e0e2fc7bc0584c5ea642d70c8b514cc77e0aa78",
"size": "2219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "5580"
},
{
"name": "Python",
"bytes": "77940"
},
{
"name": "Shell",
"bytes": "5104"
}
],
"symlink_target": ""
}
|
from OpenGLCffi.GLES2 import params
@params(api='gles2', prms=['target', 'level', 'internalformat', 'width', 'height', 'depth', 'border', 'format', 'type', 'pixels'])
def glTexImage3DOES(target, level, internalformat, width, height, depth, border, format, type, pixels):
pass
@params(api='gles2', prms=['target', 'level', 'xoffset', 'yoffset', 'zoffset', 'width', 'height', 'depth', 'format', 'type', 'pixels'])
def glTexSubImage3DOES(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, pixels):
pass
@params(api='gles2', prms=['target', 'level', 'xoffset', 'yoffset', 'zoffset', 'x', 'y', 'width', 'height'])
def glCopyTexSubImage3DOES(target, level, xoffset, yoffset, zoffset, x, y, width, height):
pass
@params(api='gles2', prms=['target', 'level', 'internalformat', 'width', 'height', 'depth', 'border', 'imageSize', 'data'])
def glCompressedTexImage3DOES(target, level, internalformat, width, height, depth, border, imageSize, data):
pass
@params(api='gles2', prms=['target', 'level', 'xoffset', 'yoffset', 'zoffset', 'width', 'height', 'depth', 'format', 'imageSize', 'data'])
def glCompressedTexSubImage3DOES(target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data):
pass
@params(api='gles2', prms=['target', 'attachment', 'textarget', 'texture', 'level', 'zoffset'])
def glFramebufferTexture3DOES(target, attachment, textarget, texture, level, zoffset):
pass
|
{
"content_hash": "71ff851da185cc104cdf6b6f2f2ff33f",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 138,
"avg_line_length": 46.54838709677419,
"alnum_prop": 0.693000693000693,
"repo_name": "cydenix/OpenGLCffi",
"id": "a0ca237e4e2f686de7fe2db140c5fdc9218c2af7",
"size": "1443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OpenGLCffi/GLES2/EXT/OES/texture_3D.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1646"
},
{
"name": "C++",
"bytes": "188"
},
{
"name": "Python",
"bytes": "1853617"
}
],
"symlink_target": ""
}
|
"""
Streaming utility for system and simulation data.
author: Jeff Kinnison (jkinniso@nd.edu)
"""
import json
import pika
class PikaAsyncConsumer(object):
"""
The primary entry point for routing incoming messages to the proper handler.
"""
def __init__(self, rabbitmq_url, exchange_name, queue_name, message_handler,
exchange_type="direct", routing_key="#"):
"""
Create a new instance of Streamer.
Arguments:
rabbitmq_url -- URL to RabbitMQ server
exchange_name -- name of RabbitMQ exchange to join
queue_name -- name of RabbitMQ queue to join
Keyword Arguments:
exchange_type -- one of 'direct', 'topic', 'fanout', 'headers'
(default 'direct')
routing_keys -- the routing key that this consumer listens for
(default '#', receives all messages)
"""
self._connection = None
self._channel = None
self._shut_down = False
self._consumer_tag = None
self._url = rabbitmq_url
self._message_handler = message_handler
# The following are necessary to guarantee that both the RabbitMQ
# server and Streamer know where to look for messages. These names will
# be decided before dispatch and should be recorded in a config file or
# else on a per-job basis.
self._exchange = exchange_name
self._exchange_type = exchange_type
self._queue = queue_name
self._routing_key = routing_key
def connect(self):
"""
Create an asynchronous connection to the RabbitMQ server at URL.
"""
return pika.SelectConnection(pika.URLParameters(self._url),
on_open_callback=self.on_connection_open,
on_close_callback=self.on_connection_close,
stop_ioloop_on_close=False)
def on_connection_open(self, unused_connection):
"""
Actions to perform when the connection opens. This may not happen
immediately, so defer action to this callback.
Arguments:
unused_connection -- the created connection (by this point already
available as self._connection)
"""
self._connection.channel(on_open_callback=self.on_channel_open)
def on_connection_close(self, connection, code, text):
"""
Actions to perform when the connection is unexpectedly closed by the
RabbitMQ server.
Arguments:
connection -- the connection that was closed (same as self._connection)
code -- response code from the RabbitMQ server
text -- response body from the RabbitMQ server
"""
self._channel = None
if self._shut_down:
self._connection.ioloop.stop()
else:
self._connection.add_timeout(5, self.reconnect)
def reconnect(self):
"""
Attempt to reestablish a connection with the RabbitMQ server.
"""
self._connection.ioloop.stop() # Stop the ioloop to completely close
if not self._shut_down: # Connect and restart the ioloop
self._connection = self.connect()
self._connection.ioloop.start()
def on_channel_open(self, channel):
"""
Store the opened channel for future use and set up the exchange and
queue to be used.
Arguments:
channel -- the Channel instance opened by the Channel.Open RPC
"""
self._channel = channel
self._channel.add_on_close_callback(self.on_channel_close)
self.declare_exchange()
def on_channel_close(self, channel, code, text):
"""
Actions to perform when the channel is unexpectedly closed by the
RabbitMQ server.
Arguments:
connection -- the connection that was closed (same as self._connection)
code -- response code from the RabbitMQ server
text -- response body from the RabbitMQ server
"""
self._connection.close()
def declare_exchange(self):
"""
Set up the exchange that will route messages to this consumer. Each
RabbitMQ exchange is uniquely identified by its name, so it does not
matter if the exchange has already been declared.
"""
self._channel.exchange_declare(self.declare_exchange_success,
self._exchange,
self._exchange_type)
def declare_exchange_success(self, unused_connection):
"""
Actions to perform on successful exchange declaration.
"""
self.declare_queue()
def declare_queue(self):
"""
Set up the queue that will route messages to this consumer. Each
RabbitMQ queue can be defined with routing keys to use only one
queue for multiple jobs.
"""
self._channel.queue_declare(self.declare_queue_success,
self._queue)
def declare_queue_success(self, method_frame):
"""
Actions to perform on successful queue declaration.
"""
self._channel.queue_bind(self.munch,
self._queue,
self._exchange,
self._routing_key
)
def munch(self, unused):
"""
Begin consuming messages from the Airavata API server.
"""
self._channel.add_on_cancel_callback(self.cancel_channel)
self._consumer_tag = self._channel.basic_consume(self._process_message)
def cancel_channel(self, method_frame):
if self._channel is not None:
self._channel._close()
def _process_message(self, ch, method, properties, body):
"""
Receive and verify a message, then pass it to the router.
Arguments:
ch -- the channel that routed the message
method -- delivery information
properties -- message properties
body -- the message
"""
print("Received Message: %s" % body)
self._message_handler(body)
#self._channel.basic_ack(delivery_tag=method.delivery_tag)
def stop_consuming(self):
"""
Stop the consumer if active.
"""
if self._channel:
self._channel.basic_cancel(self.close_channel, self._consumer_tag)
def close_channel(self):
"""
Close the channel to shut down the consumer and connection.
"""
self._channel.close()
def start(self):
"""
Start a connection with the RabbitMQ server.
"""
self._connection = self.connect()
self._connection.ioloop.start()
def stop(self):
"""
Stop an active connection with the RabbitMQ server.
"""
self._closing = True
self.stop_consuming()
|
{
"content_hash": "bd153e933722f147d17392e705b9722c",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 80,
"avg_line_length": 34.59605911330049,
"alnum_prop": 0.5850776021643173,
"repo_name": "machristie/airavata",
"id": "1c58687316c6a2cf1f7c3fe15b7968302d5ae66f",
"size": "7023",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "sandbox/simstream/simstream/pikaasyncconsumer.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5598"
},
{
"name": "C",
"bytes": "55273"
},
{
"name": "C++",
"bytes": "7107801"
},
{
"name": "CSS",
"bytes": "25498"
},
{
"name": "HTML",
"bytes": "75743"
},
{
"name": "Java",
"bytes": "31936020"
},
{
"name": "PHP",
"bytes": "292649"
},
{
"name": "Python",
"bytes": "277768"
},
{
"name": "Shell",
"bytes": "53742"
},
{
"name": "Thrift",
"bytes": "417035"
},
{
"name": "XSLT",
"bytes": "34643"
}
],
"symlink_target": ""
}
|
import os
import sys
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvesterconfig import harvester_config
if not hasattr(harvester_config.watcher, 'passphraseEnv'):
print ('ERROR: passphraseEnv is not defined in the watcher section of etc/panda/panda_harvester.cfg')
sys.exit(1)
envName = harvester_config.watcher.passphraseEnv
if envName not in os.environ:
print ("ERROR: env variable {0} is undefined in etc/sysconfig/panda_harvester".format(envName))
sys.exit(1)
key = os.environ[envName]
secret = sys.argv[1]
cipher_text = core_utils.encrypt_string(key, secret)
print ("original: {0}".format(secret))
print ("encrypted: {0}".format(cipher_text))
plain_text = core_utils.decrypt_string(key, cipher_text)
print ("decrypted: {0}".format(plain_text))
if secret != plain_text:
print ("ERROR: the encrypted string cannot be correctly decrypted")
|
{
"content_hash": "b11c5792627761199aa02a9f592d0814",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 105,
"avg_line_length": 32.107142857142854,
"alnum_prop": 0.7519466073414905,
"repo_name": "dougbenjamin/panda-harvester",
"id": "51bd1df2823f3079db0522760fcbeead9f6c19d2",
"size": "899",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandaharvester/harvestertest/encryptForWatcher.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1540221"
},
{
"name": "Shell",
"bytes": "21117"
}
],
"symlink_target": ""
}
|
import sys
import unittest
from mock import MagicMock
from libcloud.dns.base import Record, Zone
from libcloud.dns.types import RecordType
from libcloud.dns.types import ZoneDoesNotExistError, ZoneAlreadyExistsError
from libcloud.dns.types import RecordDoesNotExistError
from libcloud.test import LibcloudTestCase, MockHttpTestCase
from libcloud.test.file_fixtures import DNSFileFixtures
from libcloud.test.secrets import DNS_PARAMS_DURABLEDNS
from libcloud.utils.py3 import httplib
from libcloud.dns.drivers.durabledns import DurableDNSDriver
from libcloud.dns.drivers.durabledns import ZONE_EXTRA_PARAMS_DEFAULT_VALUES
from libcloud.dns.drivers.durabledns import DEFAULT_TTL
from libcloud.dns.drivers.durabledns import RECORD_EXTRA_PARAMS_DEFAULT_VALUES
class DurableDNSTests(LibcloudTestCase):
def setUp(self):
DurableDNSDriver.connectionCls.conn_class = DurableDNSMockHttp
DurableDNSMockHttp.type = None
self.driver = DurableDNSDriver(*DNS_PARAMS_DURABLEDNS)
def assertHasKeys(self, dictionary, keys):
for key in keys:
self.assertTrue(key in dictionary, 'key "%s" not in dictionary' %
(key))
def test_list_record_types(self):
record_types = self.driver.list_record_types()
self.assertEqual(len(record_types), 10)
self.assertTrue(RecordType.A in record_types)
self.assertTrue(RecordType.AAAA in record_types)
self.assertTrue(RecordType.CNAME in record_types)
self.assertTrue(RecordType.HINFO in record_types)
self.assertTrue(RecordType.MX in record_types)
self.assertTrue(RecordType.NS in record_types)
self.assertTrue(RecordType.PTR in record_types)
self.assertTrue(RecordType.RP in record_types)
self.assertTrue(RecordType.SRV in record_types)
self.assertTrue(RecordType.TXT in record_types)
def test_list_zones(self):
extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'serial': '1437473456', 'refresh': '13000', 'retry': 7200,
'expire': 1300, 'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=extra)
self.driver.get_zone = MagicMock(return_value=zone)
zones = self.driver.list_zones()
self.assertEqual(len(zones), 2)
zone = zones[0]
self.assertEqual(zone.id, 'myzone.com.')
self.assertEqual(zone.domain, 'myzone.com.')
self.assertEqual(zone.ttl, 1300)
self.assertEqual(zone.extra['ns'], 'ns1.durabledns.com.')
self.assertEqual(zone.extra['mbox'], 'mail.myzone.com')
self.assertEqual(zone.extra['serial'], '1437473456')
self.assertEqual(zone.extra['refresh'], '13000')
self.assertEqual(zone.extra['retry'], 7200)
self.assertEqual(zone.extra['expire'], 1300)
self.assertEqual(zone.extra['minimum'], 13)
self.assertEqual(zone.extra['xfer'], '127.0.0.1')
self.assertEqual(zone.extra['update_acl'], '127.0.0.1')
self.assertEqual(len(zone.extra.keys()), 9)
def test_list_records(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
extra = {'aux': 1, 'ttl': 3600}
record = Record(id='353286987', type='A', zone=zone,
name='record1', data='192.168.0.1',
driver=self, extra=extra)
self.driver.get_record = MagicMock(return_value=record)
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 2)
self.assertEqual(record.id, '353286987')
self.assertEqual(record.name, 'record1')
self.assertEqual(record.type, 'A')
self.assertEqual(record.data, '192.168.0.1')
self.assertEqual(record.zone, zone)
self.assertEqual(record.extra['aux'], 1)
self.assertEqual(record.extra['ttl'], 3600)
def test_list_records_zone_does_not_exist(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.list_records(zone=zone)
except ZoneDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.zone_id, zone.id)
else:
self.fail('Exception was not thrown')
def test_get_zone(self):
zone = self.driver.get_zone(zone_id='myzone.com.')
self.assertEqual(zone.id, 'myzone.com.')
self.assertEqual(zone.domain, 'myzone.com.')
self.assertEqual(zone.ttl, 1300)
self.assertEqual(zone.extra['ns'], 'ns1.durabledns.com.')
self.assertEqual(zone.extra['mbox'], 'mail.myzone.com')
self.assertEqual(zone.extra['serial'], '1437473456')
self.assertEqual(zone.extra['refresh'], '13000')
self.assertEqual(zone.extra['retry'], 7200)
self.assertEqual(zone.extra['expire'], 1300)
self.assertEqual(zone.extra['minimum'], 13)
self.assertEqual(zone.extra['xfer'], '127.0.0.1/32')
self.assertEqual(zone.extra['update_acl'],
'127.0.0.1/32,127.0.0.100/32')
self.assertEqual(len(zone.extra.keys()), 9)
def test_get_zone_does_not_exist(self):
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.get_zone(zone_id='nonexistentzone.com.')
except ZoneDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.zone_id, 'nonexistentzone.com.')
else:
self.fail('Exception was not thrown')
def test_get_record(self):
record = self.driver.get_record(zone_id='myzone.com.',
record_id='record1')
self.assertEqual(record.id, '353286987')
self.assertEqual(record.name, 'record1')
self.assertEqual(record.type, 'A')
self.assertEqual(record.data, '192.168.0.1')
self.assertEqual(record.extra['aux'], 1)
self.assertEqual(record.extra['ttl'], 3600)
def test_get_record_zone_does_not_exist(self):
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.get_record(zone_id='nonexistentzone.com.',
record_id='record1')
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_get_record_record_does_not_exist(self):
DurableDNSMockHttp.type = 'RECORD_DOES_NOT_EXIST'
try:
self.driver.get_record(zone_id='', record_id='')
except RecordDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_create_zone_with_extra_param(self):
DurableDNSMockHttp.type = 'WITH_EXTRA_PARAMS'
zone = self.driver.create_zone(domain='myzone.com.', ttl=4000,
extra={'mbox': 'mail.myzone.com',
'minimum': 50000})
extra = ZONE_EXTRA_PARAMS_DEFAULT_VALUES
self.assertEqual(zone.id, 'myzone.com.')
self.assertEqual(zone.domain, 'myzone.com.')
self.assertEqual(zone.ttl, 4000)
self.assertEqual(zone.extra['ns'], extra['ns'])
self.assertEqual(zone.extra['mbox'], 'mail.myzone.com')
self.assertEqual(zone.extra['serial'], '1437473456')
self.assertEqual(zone.extra['refresh'], extra['refresh'])
self.assertEqual(zone.extra['retry'], extra['retry'])
self.assertEqual(zone.extra['expire'], extra['expire'])
self.assertEqual(zone.extra['minimum'], 50000)
self.assertEqual(zone.extra['xfer'], extra['xfer'])
self.assertEqual(zone.extra['update_acl'], extra['update_acl'])
self.assertEqual(len(zone.extra.keys()), 9)
def test_create_zone_no_extra_param(self):
DurableDNSMockHttp.type = 'NO_EXTRA_PARAMS'
zone = self.driver.create_zone(domain='myzone.com.')
extra = ZONE_EXTRA_PARAMS_DEFAULT_VALUES
self.assertEqual(zone.id, 'myzone.com.')
self.assertEqual(zone.domain, 'myzone.com.')
self.assertEqual(zone.ttl, DEFAULT_TTL)
self.assertEqual(zone.extra['ns'], extra['ns'])
self.assertEqual(zone.extra['mbox'], extra['mbox'])
self.assertEqual(zone.extra['serial'], '1437473456')
self.assertEqual(zone.extra['refresh'], extra['refresh'])
self.assertEqual(zone.extra['retry'], extra['retry'])
self.assertEqual(zone.extra['expire'], extra['expire'])
self.assertEqual(zone.extra['minimum'], extra['minimum'])
self.assertEqual(zone.extra['xfer'], extra['xfer'])
self.assertEqual(zone.extra['update_acl'], extra['update_acl'])
self.assertEqual(len(zone.extra.keys()), 9)
def test_create_zone_zone_already_exist(self):
DurableDNSMockHttp.type = 'ZONE_ALREADY_EXIST'
try:
self.driver.create_zone(domain='myzone.com.')
except ZoneAlreadyExistsError:
pass
else:
self.fail('Exception was not thrown')
def test_create_record_no_extra_param(self):
zone = self.driver.list_zones()[0]
DurableDNSMockHttp.type = 'NO_EXTRA_PARAMS'
record = self.driver.create_record(name='record1', zone=zone,
type=RecordType.A, data='1.2.3.4')
self.assertEqual(record.id, '353367855')
self.assertEqual(record.name, 'record1')
self.assertEqual(record.zone, zone)
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '1.2.3.4')
self.assertEqual(record.extra.get('aux'),
RECORD_EXTRA_PARAMS_DEFAULT_VALUES.get('aux'))
self.assertEqual(record.extra.get('ttl'),
RECORD_EXTRA_PARAMS_DEFAULT_VALUES.get('ttl'))
def test_create_record_with_extra_param(self):
zone = self.driver.list_zones()[0]
DurableDNSMockHttp.type = 'WITH_EXTRA_PARAMS'
record = self.driver.create_record(name='record1', zone=zone,
type=RecordType.A, data='1.2.3.4',
extra={'ttl': 4000})
self.assertEqual(record.id, '353367855')
self.assertEqual(record.name, 'record1')
self.assertEqual(record.zone, zone)
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '1.2.3.4')
self.assertEqual(record.extra.get('aux'),
RECORD_EXTRA_PARAMS_DEFAULT_VALUES.get('aux'))
self.assertEqual(record.extra.get('ttl'), 4000)
def test_create_record_zone_does_not_exist(self):
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='deletedzone.com.', domain='deletedzone.com.',
type='master', ttl=1300, driver=self.driver, extra=z_extra)
try:
self.driver.create_record(name='record1', zone=zone,
type=RecordType.A, data='1.2.3.4')
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_update_zone(self):
# We'll assume that this zone has been created before. So will have
# a serial number in his extra attributes. Later we are going to
# check that after the update, serial number should change to new one.
DurableDNSMockHttp.type = 'UPDATE_ZONE'
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1', 'serial': '1437473456',
'update_acl': '127.0.0.1'}
zone = Zone(id='deletedzone.com.', domain='deletedzone.com.',
type='master', ttl=1300, driver=self.driver, extra=z_extra)
new_extra = {'minimum': 5000, 'expire': 8000}
updated_zone = self.driver.update_zone(zone, zone.domain,
type=zone.type, ttl=4000,
extra=new_extra)
self.assertEqual(updated_zone.id, 'myzone.com.')
self.assertEqual(updated_zone.domain, 'myzone.com.')
self.assertEqual(updated_zone.ttl, 4000)
self.assertEqual(updated_zone.extra['ns'], z_extra['ns'])
self.assertEqual(updated_zone.extra['mbox'], z_extra['mbox'])
self.assertEqual(updated_zone.extra['serial'], '1437475078')
self.assertEqual(updated_zone.extra['refresh'], z_extra['refresh'])
self.assertEqual(updated_zone.extra['retry'], z_extra['retry'])
self.assertEqual(updated_zone.extra['expire'], 8000)
self.assertEqual(updated_zone.extra['minimum'], 5000)
self.assertEqual(updated_zone.extra['xfer'], z_extra['xfer'])
self.assertEqual(updated_zone.extra['update_acl'],
z_extra['update_acl'])
self.assertEqual(len(updated_zone.extra.keys()), 9)
def test_update_zone_zone_does_not_exist(self):
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1', 'serial': '1437473456',
'update_acl': '127.0.0.1'}
zone = Zone(id='deletedzone.com.', domain='deletedzone.com.',
type='master', ttl=1300, driver=self.driver, extra=z_extra)
try:
self.driver.update_zone(zone, zone.domain)
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_update_record(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
extra = {'aux': 1, 'ttl': 3600}
record = Record(id='353286987', type='A', zone=zone,
name='record1', data='192.168.0.1',
driver=self, extra=extra)
new_extra = {'aux': 0, 'ttl': 4500}
updated_record = self.driver.update_record(record, record.name,
record.type, record.data,
extra=new_extra)
self.assertEqual(updated_record.data, '192.168.0.1')
self.assertEqual(updated_record.id, '353286987')
self.assertEqual(updated_record.name, 'record1')
self.assertEqual(updated_record.zone, record.zone)
self.assertEqual(updated_record.type, RecordType.A)
self.assertEqual(updated_record.extra.get('aux'), 0)
self.assertEqual(updated_record.extra.get('ttl'), 4500)
def test_update_record_zone_does_not_exist(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
extra = {'aux': 1, 'ttl': 3600}
record = Record(id='353286987', type='A', zone=zone,
name='record1', data='192.168.0.1',
driver=self, extra=extra)
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.update_record(record, record.name, record.type,
record.data)
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_zone(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
status = self.driver.delete_zone(zone=zone)
self.assertTrue(status)
def test_delete_zone_zone_does_not_exist(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.delete_zone(zone=zone)
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_record(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
extra = {'aux': 1, 'ttl': 3600}
record = Record(id='353286987', type='A', zone=zone,
name='record1', data='192.168.0.1',
driver=self, extra=extra)
status = self.driver.delete_record(record=record)
self.assertTrue(status)
def test_delete_record_record_does_not_exist(self):
z_extra = {'ns': 'ns1.durabledns.com.', 'mbox': 'mail.myzone.com',
'refresh': '13000', 'retry': 7200, 'expire': 1300,
'minimum': 13, 'xfer': '127.0.0.1',
'update_acl': '127.0.0.1'}
zone = Zone(id='myzone.com.', domain='myzone.com.', type='master',
ttl=1300, driver=self.driver, extra=z_extra)
extra = {'aux': 1, 'ttl': 3600}
record = Record(id='353286987', type='A', zone=zone,
name='record1', data='192.168.0.1',
driver=self, extra=extra)
DurableDNSMockHttp.type = 'RECORD_DOES_NOT_EXIST'
try:
self.driver.delete_record(record=record)
except RecordDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_record_zone_does_not_exist(self):
zone = self.driver.list_zones()[0]
record = self.driver.list_records(zone=zone)[0]
DurableDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.delete_record(record=record)
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
class DurableDNSMockHttp(MockHttpTestCase):
fixtures = DNSFileFixtures('durabledns')
def _services_dns_listZones_php(self, method, url, body, headers):
body = self.fixtures.load('list_zones.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_listRecords_php(self, method, url, body, headers):
body = self.fixtures.load('list_records.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_listRecords_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('list_records_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getZone_php(self, method, url, body, headers):
body = self.fixtures.load('get_zone.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getZone_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('get_zone_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getRecord_php(self, method, url, body, headers):
body = self.fixtures.load('get_record.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getRecord_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('get_record_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getRecord_php_RECORD_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('get_record_RECORD_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createZone_php_WITH_EXTRA_PARAMS(self, method, url, body,
headers):
body = self.fixtures.load('create_zone.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getZone_php_WITH_EXTRA_PARAMS(self, method, url, body,
headers):
body = self.fixtures.load('get_zone_WITH_EXTRA_PARAMS.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createZone_php_NO_EXTRA_PARAMS(self, method, url, body,
headers):
body = self.fixtures.load('create_zone.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getZone_php_NO_EXTRA_PARAMS(self, method, url, body,
headers):
body = self.fixtures.load('get_zone_NO_EXTRA_PARAMS.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createZone_php_ZONE_ALREADY_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('create_zone_ZONE_ALREADY_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createRecord_php_NO_EXTRA_PARAMS(self, method, url, body,
headers):
body = self.fixtures.load('create_record_NO_EXTRA_PARAMS.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createRecord_php_WITH_EXTRA_PARAMS(self, method, url,
body, headers):
body = self.fixtures.load('create_record_WITH_EXTRA_PARAMS.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_createRecord_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('create_record_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_updateZone_php_UPDATE_ZONE(self, method, url,
body, headers):
body = self.fixtures.load('update_zone_UPDATE_ZONE.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_getZone_php_UPDATE_ZONE(self, method, url,
body, headers):
body = self.fixtures.load('get_zone_UPDATE_ZONE.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_updateZone_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('update_zone_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_updateRecord_php(self, method, url, body, headers):
body = self.fixtures.load('update_record.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_updateRecord_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('update_record_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_deleteZone_php(self, method, url, body, headers):
body = self.fixtures.load('delete_zone.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_deleteZone_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('delete_zone_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_deleteRecord_php(self, method, url, body, headers):
body = self.fixtures.load('delete_record.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_deleteRecord_php_RECORD_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('delete_record_RECORD_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _services_dns_deleteRecord_php_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('delete_record_ZONE_DOES_NOT_EXIST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
|
{
"content_hash": "e96e5b8174e434e72fd00ce4b345095b",
"timestamp": "",
"source": "github",
"line_count": 548,
"max_line_length": 79,
"avg_line_length": 49.54744525547445,
"alnum_prop": 0.5765689451974072,
"repo_name": "mgogoulos/libcloud",
"id": "5c0b041db074a37c45e2163d8475940473e007ad",
"size": "27901",
"binary": false,
"copies": "4",
"ref": "refs/heads/trunk",
"path": "libcloud/test/dns/test_durabledns.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "6051123"
},
{
"name": "Shell",
"bytes": "4631"
}
],
"symlink_target": ""
}
|
from collections import deque
class InboundPort:
inboundQueue = deque()
def receive(self, message):
self.inboundQueue.append(message)
def printAll(self):
sb = ''
for message in self.inboundQueue:
sb+=message
return sb
|
{
"content_hash": "9f8400ad8b85e772864663bf6b67e3d5",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 35,
"avg_line_length": 15.8,
"alnum_prop": 0.7215189873417721,
"repo_name": "terianil/epic",
"id": "3f37bd51793f46182500aace243823b8afd55b18",
"size": "237",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "epic/InboundPort.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21100"
},
{
"name": "Shell",
"bytes": "74"
}
],
"symlink_target": ""
}
|
import pandas as pd
import numpy as np
# In[5]:
# load the train data
data = pd.read_csv("TrainSet.txt",sep='\t')
# average the duplicates
data = data.groupby(['subject #','Compound Identifier','Intensity','Dilution']).mean() #need this to average the replicates
data.reset_index(level=[1,2,3,0], inplace=True)
# In[6]:
# we don't need the intensity here
data.drop(['Intensity'],axis=1, inplace=1)
# In[7]:
data.columns = [u'individual', u'#oID','Dilution', u'INTENSITY/STRENGTH', u'VALENCE/PLEASANTNESS', u'BAKERY', u'SWEET', u'FRUIT', u'FISH', u'GARLIC', u'SPICES', u'COLD', u'SOUR', u'BURNT', u'ACID', u'WARM', u'MUSKY', u'SWEATY', u'AMMONIA/URINOUS', u'DECAYED', u'WOOD', u'GRASS', u'FLOWER', u'CHEMICAL']
# In[8]:
# replace descriptors with Nan where intensity is zero
data.loc[data['INTENSITY/STRENGTH'] ==0,[u'VALENCE/PLEASANTNESS', u'BAKERY', u'SWEET', u'FRUIT', u'FISH', u'GARLIC', u'SPICES', u'COLD', u'SOUR', u'BURNT', u'ACID', u'WARM', u'MUSKY', u'SWEATY', u'AMMONIA/URINOUS', u'DECAYED', u'WOOD', u'GRASS', u'FLOWER', u'CHEMICAL']] = np.nan
# In[9]:
#average the data
data_avg = data.groupby('#oID').mean()
data_avg.drop('individual',axis=1,inplace=1)
# In[10]:
# average intensity data in duplicates
data_int = data[data.Dilution == '1/1,000 '].groupby('#oID').mean()
data_int.drop('individual',axis=1,inplace=1)
# In[12]:
data_avg['INTENSITY/STRENGTH'] = data_int['INTENSITY/STRENGTH']
# In[12]:
data_avg.to_csv('targets_for_feature_selection.csv')
# In[ ]:
|
{
"content_hash": "38f68205480a9bbbc6d970bd2d2c1880",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 302,
"avg_line_length": 24.721311475409838,
"alnum_prop": 0.6631299734748011,
"repo_name": "dream-olfaction/olfaction-prediction",
"id": "a68b09e5e5a5e8aeae835bbc14a102a8b54bf675",
"size": "1812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opc_python/hulab/feature_selection/creation_of_targets_for_feature_selection_train_data_only.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "952179"
},
{
"name": "Jupyter Notebook",
"bytes": "11495462"
},
{
"name": "MATLAB",
"bytes": "55979"
},
{
"name": "Python",
"bytes": "118002"
},
{
"name": "R",
"bytes": "14592"
},
{
"name": "Shell",
"bytes": "1387"
}
],
"symlink_target": ""
}
|
from __future__ import with_statement
import cPickle as pickle
import logging
import os
import sys
import unittest
import urlparse
import signal
from contextlib import contextmanager, nested, closing
from gzip import GzipFile
from shutil import rmtree
import time
from urllib import quote
from hashlib import md5
from tempfile import mkdtemp
import mock
from eventlet import sleep, spawn, wsgi, listen
import simplejson
from test.unit import connect_tcp, readuntil2crlfs, FakeLogger, \
fake_http_connect, FakeRing, FakeMemcache
from swift.proxy import server as proxy_server
from swift.account import server as account_server
from swift.container import server as container_server
from swift.obj import server as object_server
from swift.common import ring
from swift.common.exceptions import ChunkReadTimeout, SegmentError
from swift.common.constraints import MAX_META_NAME_LENGTH, \
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH
from swift.common import utils
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
from swift.common.wsgi import monkey_patch_mimetools
from swift.proxy.controllers.obj import SegmentedIterable
from swift.proxy.controllers.base import get_container_memcache_key, \
get_account_memcache_key, cors_validation
import swift.proxy.controllers
from swift.common.swob import Request, Response, HTTPNotFound, \
HTTPUnauthorized
# mocks
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
STATIC_TIME = time.time()
_request_instances = 0
_test_coros = _test_servers = _test_sockets = _orig_container_listing_limit = \
_testdir = _orig_SysLogHandler = None
def request_init(self, *args, **kwargs):
global _request_instances
self._orig_init(*args, **kwargs)
_request_instances += 1
def request_del(self):
global _request_instances
if self._orig_del:
self._orig_del()
_request_instances -= 1
def setup():
utils.HASH_PATH_SUFFIX = 'endcap'
global _testdir, _test_servers, _test_sockets, \
_orig_container_listing_limit, _test_coros, _orig_SysLogHandler
_orig_SysLogHandler = utils.SysLogHandler
utils.SysLogHandler = mock.MagicMock()
Request._orig_init = Request.__init__
Request.__init__ = request_init
Request._orig_del = getattr(Request, '__del__', None)
Request.__del__ = request_del
monkey_patch_mimetools()
# Since we're starting up a lot here, we're going to test more than
# just chunked puts; we're also going to test parts of
# proxy_server.Application we couldn't get to easily otherwise.
_testdir = \
os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked')
mkdirs(_testdir)
rmtree(_testdir)
mkdirs(os.path.join(_testdir, 'sda1'))
mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
mkdirs(os.path.join(_testdir, 'sdb1'))
mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
_orig_container_listing_limit = \
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT
conf = {'devices': _testdir, 'swift_dir': _testdir,
'mount_check': 'false', 'allowed_headers':
'content-encoding, x-object-manifest, content-disposition, foo',
'allow_versions': 'True'}
prolis = listen(('localhost', 0))
acc1lis = listen(('localhost', 0))
acc2lis = listen(('localhost', 0))
con1lis = listen(('localhost', 0))
con2lis = listen(('localhost', 0))
obj1lis = listen(('localhost', 0))
obj2lis = listen(('localhost', 0))
_test_sockets = \
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis)
with closing(GzipFile(os.path.join(_testdir, 'account.ring.gz'), 'wb')) \
as f:
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': acc1lis.getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': acc2lis.getsockname()[1]}], 30),
f)
with closing(GzipFile(os.path.join(_testdir, 'container.ring.gz'), 'wb')) \
as f:
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': con1lis.getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': con2lis.getsockname()[1]}], 30),
f)
with closing(GzipFile(os.path.join(_testdir, 'object.ring.gz'), 'wb')) \
as f:
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': obj1lis.getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': obj2lis.getsockname()[1]}], 30),
f)
prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone())
acc1srv = account_server.AccountController(conf)
acc2srv = account_server.AccountController(conf)
con1srv = container_server.ContainerController(conf)
con2srv = container_server.ContainerController(conf)
obj1srv = object_server.ObjectController(conf)
obj2srv = object_server.ObjectController(conf)
_test_servers = \
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv)
nl = NullLogger()
prospa = spawn(wsgi.server, prolis, prosrv, nl)
acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl)
acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl)
con1spa = spawn(wsgi.server, con1lis, con1srv, nl)
con2spa = spawn(wsgi.server, con2lis, con2srv, nl)
obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl)
obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl)
_test_coros = \
(prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa)
# Create account
ts = normalize_timestamp(time.time())
partition, nodes = prosrv.account_ring.get_nodes('a')
for node in nodes:
conn = swift.proxy.controllers.obj.http_connect(node['ip'],
node['port'],
node['device'],
partition, 'PUT', '/a',
{'X-Timestamp': ts,
'x-trans-id': 'test'})
resp = conn.getresponse()
assert(resp.status == 201)
# Create container
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (exp, headers[:len(exp)])
def teardown():
for server in _test_coros:
server.kill()
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = \
_orig_container_listing_limit
rmtree(os.path.dirname(_testdir))
Request.__init__ = Request._orig_init
if Request._orig_del:
Request.__del__ = Request._orig_del
utils.SysLogHandler = _orig_SysLogHandler
def sortHeaderNames(headerNames):
"""
Return the given string of header names sorted.
headerName: a comma-delimited list of header names
"""
headers = [a.strip() for a in headerNames.split(',') if a.strip()]
headers.sort()
return ', '.join(headers)
class FakeMemcacheReturnsNone(FakeMemcache):
def get(self, key):
# Returns None as the timestamp of the container; assumes we're only
# using the FakeMemcache for container existence checks.
return None
@contextmanager
def save_globals():
orig_http_connect = getattr(swift.proxy.controllers.base, 'http_connect',
None)
orig_account_info = getattr(swift.proxy.controllers.Controller,
'account_info', None)
try:
yield True
finally:
swift.proxy.controllers.Controller.account_info = orig_account_info
swift.proxy.controllers.base.http_connect = orig_http_connect
swift.proxy.controllers.obj.http_connect = orig_http_connect
swift.proxy.controllers.account.http_connect = orig_http_connect
swift.proxy.controllers.container.http_connect = orig_http_connect
def set_http_connect(*args, **kwargs):
new_connect = fake_http_connect(*args, **kwargs)
swift.proxy.controllers.base.http_connect = new_connect
swift.proxy.controllers.obj.http_connect = new_connect
swift.proxy.controllers.account.http_connect = new_connect
swift.proxy.controllers.container.http_connect = new_connect
# tests
class TestController(unittest.TestCase):
def setUp(self):
self.account_ring = FakeRing()
self.container_ring = FakeRing()
self.memcache = FakeMemcache()
app = proxy_server.Application(None, self.memcache,
account_ring=self.account_ring,
container_ring=self.container_ring,
object_ring=FakeRing())
self.controller = swift.proxy.controllers.Controller(app)
class FakeReq(object):
def __init__(self):
self.url = "/foo/bar"
self.method = "METHOD"
def as_referer(self):
return self.method + ' ' + self.url
self.account = 'some_account'
self.container = 'some_container'
self.request = FakeReq()
self.read_acl = 'read_acl'
self.write_acl = 'write_acl'
def test_transfer_headers(self):
src_headers = {'x-remove-base-meta-owner': 'x',
'x-base-meta-size': '151M',
'new-owner': 'Kun'}
dst_headers = {'x-base-meta-owner': 'Gareth',
'x-base-meta-size': '150M'}
self.controller.transfer_headers(src_headers, dst_headers)
expected_headers = {'x-base-meta-owner': '',
'x-base-meta-size': '151M'}
self.assertEquals(dst_headers, expected_headers)
def check_account_info_return(self, partition, nodes, is_none=False):
if is_none:
p, n = None, None
else:
p, n = self.account_ring.get_nodes(self.account)
self.assertEqual(p, partition)
self.assertEqual(n, nodes)
def test_account_info_container_count(self):
with save_globals():
set_http_connect(200, count=123)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEquals(count, 123)
with save_globals():
set_http_connect(200, count='123')
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEquals(count, 123)
with save_globals():
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 200, 'container_count': 1234}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEquals(count, 1234)
with save_globals():
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 200, 'container_count': '1234'}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEquals(count, 1234)
def test_make_requests(self):
with save_globals():
set_http_connect(200)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
set_http_connect(201, raise_timeout_exc=True)
self.controller._make_request(
nodes, partition, 'POST', '/', '', '',
self.controller.app.logger.thread_locals)
# tests if 200 is cached and used
def test_account_info_200(self):
with save_globals():
set_http_connect(200)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes)
self.assertEquals(count, 12345)
# Test the internal representation in memcache
# 'container_count' changed from int to str
cache_key = get_account_memcache_key(self.account)
container_info = {'status': 200,
'container_count': '12345',
'total_object_count': None,
'bytes': None,
'meta': {}}
self.assertEquals(container_info,
self.memcache.get(cache_key))
set_http_connect()
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes)
self.assertEquals(count, 12345)
# tests if 404 is cached and used
def test_account_info_404(self):
with save_globals():
set_http_connect(404, 404, 404)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, True)
self.assertEquals(count, None)
# Test the internal representation in memcache
# 'container_count' changed from 0 to None
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 404,
'container_count': None, # internally keep None
'total_object_count': None,
'bytes': None,
'meta': {}}
self.assertEquals(account_info,
self.memcache.get(cache_key))
set_http_connect()
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, True)
self.assertEquals(count, None)
# tests if some http status codes are not cached
def test_account_info_no_cache(self):
def test(*status_list):
set_http_connect(*status_list)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.assertEqual(len(self.memcache.keys()), 0)
self.check_account_info_return(partition, nodes, True)
self.assertEquals(count, None)
with save_globals():
# We cache if we have two 404 responses - fail if only one
test(503, 503, 404)
test(504, 404, 503)
test(404, 507, 503)
test(503, 503, 503)
def test_account_info_no_account(self):
with save_globals():
self.memcache.store = {}
set_http_connect(404, 404, 404)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, is_none=True)
self.assertEquals(count, None)
def check_container_info_return(self, ret, is_none=False):
if is_none:
partition, nodes, read_acl, write_acl = None, None, None, None
else:
partition, nodes = self.container_ring.get_nodes(self.account,
self.container)
read_acl, write_acl = self.read_acl, self.write_acl
self.assertEqual(partition, ret['partition'])
self.assertEqual(nodes, ret['nodes'])
self.assertEqual(read_acl, ret['read_acl'])
self.assertEqual(write_acl, ret['write_acl'])
def test_container_info_invalid_account(self):
def account_info(self, account, request, autocreate=False):
return None, None
with save_globals():
swift.proxy.controllers.Controller.account_info = account_info
ret = self.controller.container_info(self.account,
self.container,
self.request)
self.check_container_info_return(ret, True)
# tests if 200 is cached and used
def test_container_info_200(self):
with save_globals():
headers = {'x-container-read': self.read_acl,
'x-container-write': self.write_acl}
set_http_connect(200, # account_info is found
200, headers=headers) # container_info is found
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEquals(200, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret)
# tests if 404 is cached and used
def test_container_info_404(self):
def account_info(self, account, request):
return True, True, 0
with save_globals():
set_http_connect(503, 204, # account_info found
504, 404, 404) # container_info 'NotFound'
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEquals(404, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
set_http_connect(503, 404, 404)# account_info 'NotFound'
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEquals(404, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
# tests if some http status codes are not cached
def test_container_info_no_cache(self):
def test(*status_list):
set_http_connect(*status_list)
ret = self.controller.container_info(
self.account, self.container, self.request)
self.assertEqual(len(self.memcache.keys()), 0)
self.check_container_info_return(ret, True)
with save_globals():
# We cache if we have two 404 responses - fail if only one
test(503, 503, 404)
test(504, 404, 503)
test(404, 507, 503)
test(503, 503, 503)
class TestProxyServer(unittest.TestCase):
def test_unhandled_exception(self):
class MyApp(proxy_server.Application):
def get_controller(self, path):
raise Exception('this shouldnt be caught')
app = MyApp(None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing(), object_ring=FakeRing())
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
app.update_request(req)
resp = app.handle_request(req)
self.assertEquals(resp.status_int, 500)
def test_internal_method_request(self):
baseapp = proxy_server.Application({},
FakeMemcache(),
container_ring=FakeRing(),
object_ring=FakeRing(),
account_ring=FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a', environ={'REQUEST_METHOD': '__init__'}))
self.assertEquals(resp.status, '405 Method Not Allowed')
def test_inexistent_method_request(self):
baseapp = proxy_server.Application({},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing(),
object_ring=FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a', environ={'REQUEST_METHOD': '!invalid'}))
self.assertEquals(resp.status, '405 Method Not Allowed')
def test_calls_authorize_allow(self):
called = [False]
def authorize(req):
called[0] = True
with save_globals():
set_http_connect(200)
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing())
req = Request.blank('/v1/a')
req.environ['swift.authorize'] = authorize
app.update_request(req)
app.handle_request(req)
self.assert_(called[0])
def test_calls_authorize_deny(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing())
req = Request.blank('/v1/a')
req.environ['swift.authorize'] = authorize
app.update_request(req)
app.handle_request(req)
self.assert_(called[0])
def test_negative_content_length(self):
swift_dir = mkdtemp()
try:
baseapp = proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), FakeLogger(),
FakeRing(), FakeRing(),
FakeRing())
resp = baseapp.handle_request(
Request.blank('/', environ={'CONTENT_LENGTH': '-1'}))
self.assertEquals(resp.status, '400 Bad Request')
self.assertEquals(resp.body, 'Invalid Content-Length')
resp = baseapp.handle_request(
Request.blank('/', environ={'CONTENT_LENGTH': '-123'}))
self.assertEquals(resp.status, '400 Bad Request')
self.assertEquals(resp.body, 'Invalid Content-Length')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_denied_host_header(self):
swift_dir = mkdtemp()
try:
baseapp = proxy_server.Application({'swift_dir': swift_dir,
'deny_host_headers':
'invalid_host.com'},
FakeMemcache(), FakeLogger(),
FakeRing(), FakeRing(),
FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a/c/o',
environ={'HTTP_HOST': 'invalid_host.com'}))
self.assertEquals(resp.status, '403 Forbidden')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_node_timing(self):
baseapp = proxy_server.Application({'sorting_method': 'timing'},
FakeMemcache(),
container_ring=FakeRing(),
object_ring=FakeRing(),
account_ring=FakeRing())
self.assertEquals(baseapp.node_timings, {})
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
baseapp.update_request(req)
resp = baseapp.handle_request(req)
self.assertEquals(resp.status_int, 503) # couldn't connect to anything
exp_timings = {}
self.assertEquals(baseapp.node_timings, exp_timings)
times = [time.time()]
exp_timings = {'127.0.0.1': (0.1, times[0] + baseapp.timing_expiry)}
with mock.patch('swift.proxy.server.time', lambda: times.pop(0)):
baseapp.set_node_timing({'ip': '127.0.0.1'}, 0.1)
self.assertEquals(baseapp.node_timings, exp_timings)
nodes = [{'ip': '127.0.0.1'}, {'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}]
with mock.patch('swift.proxy.server.shuffle', lambda l: l):
res = baseapp.sort_nodes(nodes)
exp_sorting = [{'ip': '127.0.0.2'}, {'ip': '127.0.0.3'},
{'ip': '127.0.0.1'}]
self.assertEquals(res, exp_sorting)
def test_node_affinity(self):
baseapp = proxy_server.Application({'sorting_method': 'affinity',
'read_affinity': 'r1=1'},
FakeMemcache(),
container_ring=FakeRing(),
object_ring=FakeRing(),
account_ring=FakeRing())
nodes = [{'region': 2, 'zone': 1, 'ip': '127.0.0.1'},
{'region': 1, 'zone': 2, 'ip': '127.0.0.2'}]
with mock.patch('swift.proxy.server.shuffle', lambda x: x):
app_sorted = baseapp.sort_nodes(nodes)
exp_sorted = [{'region': 1, 'zone': 2, 'ip': '127.0.0.2'},
{'region': 2, 'zone': 1, 'ip': '127.0.0.1'}]
self.assertEquals(exp_sorted, app_sorted)
class TestObjectController(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing())
monkey_patch_mimetools()
def tearDown(self):
self.app.account_ring.set_replicas(3)
self.app.container_ring.set_replicas(3)
self.app.object_ring.set_replicas(3)
def assert_status_map(self, method, statuses, expected, raise_exc=False):
with save_globals():
kwargs = {}
if raise_exc:
kwargs['raise_exc'] = raise_exc
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c/o',
headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
# repeat test
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c/o',
headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
def test_GET_newest_large_file(self):
calls = [0]
def handler(_junk1, _junk2):
calls[0] += 1
old_handler = signal.signal(signal.SIGPIPE, handler)
try:
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'a' * (1024 * 1024)
path = '/v1/a/c/o.large'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
req = Request.blank(path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type':
'application/octet-stream',
'X-Newest': 'true'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, obj)
self.assertEqual(calls[0], 0)
finally:
signal.signal(signal.SIGPIPE, old_handler)
def test_PUT_expect_header_zero_content_length(self):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
if 'expect' in headers or 'Expect' in headers:
test_errors.append('Expect was in headers for object '
'server!')
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
# The (201, -4) tuples in there have the effect of letting the
# initial connect succeed, after which getexpect() gets called and
# then the -4 makes the response of that actually be 201 instead of
# 100. Perfectly straightforward.
set_http_connect(200, 200, (201, -4), (201, -4), (201, -4),
give_connect=test_connect)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertEqual(test_errors, [])
self.assertTrue(res.status.startswith('201 '), res.status)
def test_PUT_expect_header_nonzero_content_length(self):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
if 'Expect' not in headers:
test_errors.append('Expect was not in headers for '
'non-zero byte PUT!')
with save_globals():
controller = \
proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg')
set_http_connect(200, 200, 201, 201, 201,
give_connect=test_connect)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertTrue(res.status.startswith('201 '))
def test_PUT_respects_write_affinity(self):
written_to = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
written_to.append((ipaddr, port, device))
with save_globals():
def is_r0(node):
return node['region'] == 0
self.app.object_ring.max_more_nodes = 100
self.app.write_affinity_is_local_fn = is_r0
self.app.write_affinity_node_count = lambda r: 3
controller = \
proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg')
set_http_connect(200, 200, 201, 201, 201,
give_connect=test_connect)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertTrue(res.status.startswith('201 '))
self.assertEqual(3, len(written_to))
for ip, port, device in written_to:
# this is kind of a hokey test, but in FakeRing, the port is even
# when the region is 0, and odd when the region is 1, so this test
# asserts that we only wrote to nodes in region 0.
self.assertEqual(0, port % 2)
def test_PUT_respects_write_affinity_with_507s(self):
written_to = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
written_to.append((ipaddr, port, device))
with save_globals():
def is_r0(node):
return node['region'] == 0
self.app.object_ring.max_more_nodes = 100
self.app.write_affinity_is_local_fn = is_r0
self.app.write_affinity_node_count = lambda r: 3
controller = \
proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg')
controller.error_limit(
self.app.object_ring.get_part_nodes(1)[0], 'test')
set_http_connect(200, 200, # account, container
201, 201, 201, # 3 working backends
give_connect=test_connect)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertTrue(res.status.startswith('201 '))
self.assertEqual(3, len(written_to))
# this is kind of a hokey test, but in FakeRing, the port is even when
# the region is 0, and odd when the region is 1, so this test asserts
# that we wrote to 2 nodes in region 0, then went to 1 non-r0 node.
self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device)
self.assertEqual(0, written_to[1][1] % 2)
self.assertNotEqual(0, written_to[2][1] % 2)
def test_PUT_message_length_using_content_length(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'j' * 20
fd.write('PUT /v1/a/c/o.content-length HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_message_length_using_transfer_encoding(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_message_length_using_both(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_bad_message_length(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: gzip\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 400'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_message_length_unsup_xfr_encoding(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: gzip,chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 501'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_message_length_too_large(self):
swift.proxy.controllers.obj.MAX_FILE_SIZE = 10
try:
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n\r\n'
'oh say can you see by the dawns\'\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 413'
self.assertEqual(headers[:len(exp)], exp)
finally:
swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE
def test_expirer_DELETE_on_versioned_object(self):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if method == 'DELETE':
if 'x-if-delete-at' in headers or 'X-If-Delete-At' in headers:
test_errors.append('X-If-Delete-At in headers')
body = simplejson.dumps(
[{"name": "001o/1",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}])
body_iter = ('', '', body, '', '', '', '', '', '', '', '', '', '', '')
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
# HEAD HEAD GET GET HEAD GET GET GET PUT PUT
# PUT DEL DEL DEL
set_http_connect(200, 200, 200, 200, 200, 200, 200, 200, 201, 201,
201, 200, 200, 200,
give_connect=test_connect,
body_iter=body_iter,
headers={'x-versions-location': 'foo'})
self.app.memcache.store = {}
req = Request.blank('/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE'})
self.app.update_request(req)
controller.DELETE(req)
self.assertEquals(test_errors, [])
def test_GET_manifest_no_segments(self):
for hdict in [{"X-Object-Manifest": "segments/seg"},
{"X-Static-Large-Object": "True"}]:
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
'', # GET manifest
simplejson.dumps([])) # GET empty listing
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET manifest
200, # GET empty listing
headers=hdict,
body_iter=response_bodies)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, '')
def test_GET_manifest_limited_listing(self):
listing1 = [{"hash": "454dfc73af632012ce3e6217dc464241",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "seg01",
"content_type": "application/octet-stream"},
{"hash": "474bab96c67528d42d5c0c52b35228eb",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "seg02",
"content_type": "application/octet-stream"}]
listing2 = [{"hash": "116baa5508693d1d1ca36abdd9f9478b",
"last_modified": "2012-11-08T04:05:37.849510",
"bytes": 2,
"name": "seg03",
"content_type": "application/octet-stream"},
{"hash": "7bd6aaa1ef6013353f0420459574ac9d",
"last_modified": "2012-11-08T04:05:37.855180",
"bytes": 2,
"name": "seg04",
"content_type": "application/octet-stream"
}]
listing3 = [{"hash": "6605f80e3cefaa24e9823544df4edbd6",
"last_modified": "2012-11-08T04:05:37.853710",
"bytes": 2,
"name": u'\N{SNOWMAN}seg05',
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
'', # GET manifest
simplejson.dumps(listing1), # GET listing1
'Aa', # GET seg01
'Bb', # GET seg02
simplejson.dumps(listing2), # GET listing2
'Cc', # GET seg03
'Dd', # GET seg04
simplejson.dumps(listing3), # GET listing3
'Ee', # GET seg05
simplejson.dumps([])) # GET final empty listing
with save_globals():
try:
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 2
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET manifest
200, # GET listing1
200, # GET seg01
200, # GET seg02
200, # GET listing2
200, # GET seg03
200, # GET seg04
200, # GET listing3
200, # GET seg05
200, # GET final empty listing
headers={"X-Object-Manifest": "segments/seg"},
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'AaBbCcDdEe')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}],
['GET', '/a/segments',
{'format': 'json', 'prefix': 'seg'}],
['GET', '/a/segments/seg01', {}],
['GET', '/a/segments/seg02', {}],
['GET', '/a/segments',
{'format': 'json', 'prefix': 'seg', 'marker': 'seg02'}],
['GET', '/a/segments/seg03', {}],
['GET', '/a/segments/seg04', {}],
['GET', '/a/segments',
{'format': 'json', 'prefix': 'seg', 'marker': 'seg04'}],
['GET', '/a/segments/\xe2\x98\x83seg05', {}],
['GET', '/a/segments',
{'format': 'json', 'prefix': 'seg',
'marker': '\xe2\x98\x83seg05'}]])
finally:
# other tests in this file get very unhappy if this
# isn't set back, which leads to time-wasting
# debugging of other tests.
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = \
_orig_container_listing_limit
def test_GET_manifest_slo(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
simplejson.dumps(listing), # GET manifest
'Aa', # GET seg01
'Bb') # GET seg02
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
200, # GET seg01
200, # GET seg02
headers=[{}, {}, {"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'AaBb')
self.assertEqual(resp.content_length, 4)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}],
['GET', '/a/d1/seg01', {}],
['GET', '/a/d2/seg02', {}]])
def test_GET_slo_multipart_manifest(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"}]
json_listing = simplejson.dumps(listing)
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
json_listing) # GET manifest
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
headers={"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'},
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest?multipart-manifest=get')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, json_listing)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset.lower(), 'utf-8')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {'multipart-manifest': 'get'}]])
def test_GET_slo_multipart_manifest_from_copy(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"}]
json_listing = simplejson.dumps(listing)
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
json_listing) # GET manifest
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
headers={"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'},
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest?multipart-manifest=get',
headers={'x-copy-from': '/a/c/manifest'})
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, json_listing)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {'multipart-manifest': 'get'}]])
def test_GET_bad_etag_manifest_slo(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "invalidhash",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
simplejson.dumps(listing), # GET manifest
'Aa', # GET seg01
'Bb') # GET seg02
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
200, # GET seg01
200, # GET seg02
headers=[{}, {}, {"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 4) # content incomplete
self.assertEqual(resp.content_type, 'text/html')
self.assertRaises(SegmentError, lambda: resp.body)
# dropped connection, exception is caught by eventlet as it is
# iterating over response
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}],
['GET', '/a/d1/seg01', {}],
['GET', '/a/d2/seg02', {}]])
def test_GET_nested_slo(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "8681fb3ada2715c8754706ee5f23d4f8",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 4,
"name": "/d2/sub_manifest",
"content_type": "application/octet-stream"},
{"hash": "419af6d362a14b7a789ba1c7e772bbae",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg04",
"content_type": "application/octet-stream"}]
sub_listing = [{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg02",
"content_type": "application/octet-stream"},
{"hash": "e4c8f1de1c0855c7c2be33196d3c3537",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg03",
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
simplejson.dumps(listing), # GET manifest
'Aa', # GET seg01
simplejson.dumps(sub_listing), # GET sub_manifest
'Bb', # GET seg02
'Cc', # GET seg03
'Dd') # GET seg04
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
slob_headers = {"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'}
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
200, # GET seg01
200, # GET sub listing1
200, # GET seg02
200, # GET seg03
200, # GET seg04
headers=[{}, {}, slob_headers, {}, slob_headers, {}, {}, {}],
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 8)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}]])
# iterating over body will retrieve manifest and sub manifest's
# objects
self.assertEqual(resp.body, 'AaBbCcDd')
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}],
['GET', '/a/d1/seg01', {}],
['GET', '/a/d2/sub_manifest', {}],
['GET', '/a/d1/seg02', {}],
['GET', '/a/d2/seg03', {}],
['GET', '/a/d1/seg04', {}]])
def test_GET_bad_404_manifest_slo(self):
listing = [{"hash": "98568d540134639be4655198a36614a4",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"},
{"hash": "invalidhash",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg03",
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
simplejson.dumps(listing), # GET manifest
'Aa', # GET seg01
'') # GET seg02
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # GET listing1
200, # GET seg01
404, # GET seg02
headers=[{}, {}, {"X-Static-Large-Object": "True",
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest')
resp = controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 6) # content incomplete
self.assertEqual(resp.content_type, 'text/html')
self.assertRaises(SegmentError, lambda: resp.body)
# dropped connection, exception is caught by eventlet as it is
# iterating over response
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['GET', '/a/c/manifest', {}],
['GET', '/a/d1/seg01', {}],
['GET', '/a/d2/seg02', {}],
['GET', '/a/d2/seg02', {}],
['GET', '/a/d2/seg02', {}]]) # 2nd segment not found
def test_HEAD_manifest_slo(self):
listing = [{"hash": "454dfc73af632012ce3e6217dc464241",
"last_modified": "2012-11-08T04:05:37.866820",
"bytes": 2,
"name": "/d1/seg01",
"content_type": "application/octet-stream"},
{"hash": "474bab96c67528d42d5c0c52b35228eb",
"last_modified": "2012-11-08T04:05:37.846710",
"bytes": 2,
"name": "/d2/seg02",
"content_type": "application/octet-stream"}]
response_bodies = (
'', # HEAD /a
'', # HEAD /a/c
'', # HEAD manifest
simplejson.dumps(listing)) # GET manifest
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'manifest')
requested = []
def capture_requested_paths(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
requested.append([method, path, qs_dict])
set_http_connect(
200, # HEAD /a
200, # HEAD /a/c
200, # HEAD listing1
200, # GET listing1
headers={"X-Static-Large-Object": "True"},
body_iter=response_bodies,
give_connect=capture_requested_paths)
req = Request.blank('/a/c/manifest',
environ={'REQUEST_METHOD': 'HEAD'})
resp = controller.HEAD(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
requested,
[['HEAD', '/a', {}],
['HEAD', '/a/c', {}],
['HEAD', '/a/c/manifest', {}],
['GET', '/a/c/manifest', {}]])
def test_PUT_auto_content_type(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_content_type(filename, expected):
# The three responses here are for account_info() (HEAD to
# account server), container_info() (HEAD to container server)
# and three calls to _connect_put_node() (PUT to three object
# servers)
set_http_connect(201, 201, 201, 201, 201,
give_content_type=lambda content_type:
self.assertEquals(content_type,
expected.next()))
# We need into include a transfer-encoding to get past
# constraints.check_object_creation()
req = Request.blank('/a/c/%s' % filename, {},
headers={'transfer-encoding': 'chunked'})
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
# If we don't check the response here we could miss problems
# in PUT()
self.assertEquals(res.status_int, 201)
test_content_type('test.jpg', iter(['', '', 'image/jpeg',
'image/jpeg', 'image/jpeg']))
test_content_type('test.html', iter(['', '', 'text/html',
'text/html', 'text/html']))
test_content_type('test.css', iter(['', '', 'text/css',
'text/css', 'text/css']))
def test_custom_mime_types_files(self):
swift_dir = mkdtemp()
try:
with open(os.path.join(swift_dir, 'mime.types'), 'w') as fp:
fp.write('foo/bar foo\n')
proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), FakeLogger(),
FakeRing(), FakeRing(),
FakeRing())
self.assertEquals(proxy_server.mimetypes.guess_type('blah.foo')[0],
'foo/bar')
self.assertEquals(proxy_server.mimetypes.guess_type('blah.jpg')[0],
'image/jpeg')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_PUT(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, 201, 201), 201)
test_status_map((200, 200, 201, 201, 500), 201)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 200, 204, 500, 404), 503)
def test_PUT_connect_exceptions(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, 201, -1), 201)
test_status_map((200, 200, 201, 201, -2), 201) # expect timeout
test_status_map((200, 200, 201, 201, -3), 201) # error limited
test_status_map((200, 200, 201, -1, -1), 503)
test_status_map((200, 200, 503, 503, -1), 503)
def test_PUT_send_exceptions(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
self.app.memcache.store = {}
set_http_connect(*statuses)
req = Request.blank('/a/c/o.jpg',
environ={'REQUEST_METHOD': 'PUT'},
body='some data')
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, -1, 201), 201)
test_status_map((200, 200, 201, -1, -1), 503)
test_status_map((200, 200, 503, 503, -1), 503)
def test_PUT_max_size(self):
with save_globals():
set_http_connect(201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', {}, headers={
'Content-Length': str(MAX_FILE_SIZE + 1),
'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status_int, 413)
def test_PUT_bad_content_type(self):
with save_globals():
set_http_connect(201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', {}, headers={
'Content-Length': 0, 'Content-Type': 'foo/bar;swift_hey=45'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status_int, 400)
def test_PUT_getresponse_exceptions(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
self.app.memcache.store = {}
set_http_connect(*statuses)
req = Request.blank('/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
test_status_map((200, 200, 201, 201, -1), 201)
test_status_map((200, 200, 201, -1, -1), 503)
test_status_map((200, 200, 503, 503, -1), 503)
def test_POST(self):
with save_globals():
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = controller.POST(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 200, 202, 202, 202), 202)
test_status_map((200, 200, 202, 202, 500), 202)
test_status_map((200, 200, 202, 500, 500), 503)
test_status_map((200, 200, 202, 404, 500), 503)
test_status_map((200, 200, 202, 404, 404), 404)
test_status_map((200, 200, 404, 500, 500), 503)
test_status_map((200, 200, 404, 404, 404), 404)
def test_POST_as_copy(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = controller.POST(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202)
test_status_map((200, 200, 200, 200, 200, 202, 202, 500), 202)
test_status_map((200, 200, 200, 200, 200, 202, 500, 500), 503)
test_status_map((200, 200, 200, 200, 200, 202, 404, 500), 503)
test_status_map((200, 200, 200, 200, 200, 202, 404, 404), 404)
test_status_map((200, 200, 200, 200, 200, 404, 500, 500), 503)
test_status_map((200, 200, 200, 200, 200, 404, 404, 404), 404)
def test_DELETE(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {})
self.app.update_request(req)
res = controller.DELETE(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
test_status_map((200, 200, 204, 204, 204), 204)
test_status_map((200, 200, 204, 204, 500), 204)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 200, 204, 500, 404), 503)
test_status_map((200, 200, 404, 404, 404), 404)
test_status_map((200, 200, 404, 404, 500), 404)
def test_HEAD(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {})
self.app.update_request(req)
res = controller.HEAD(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assert_('x-works' in res.headers)
self.assertEquals(res.headers['x-works'], 'yes')
self.assert_('accept-ranges' in res.headers)
self.assertEquals(res.headers['accept-ranges'], 'bytes')
test_status_map((200, 200, 200, 404, 404), 200)
test_status_map((200, 200, 200, 500, 404), 200)
test_status_map((200, 200, 304, 500, 404), 304)
test_status_map((200, 200, 404, 404, 404), 404)
test_status_map((200, 200, 404, 404, 500), 404)
test_status_map((200, 200, 500, 500, 500), 503)
def test_HEAD_newest(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'})
self.app.update_request(req)
res = controller.HEAD(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
self.assertEquals(res.headers.get('last-modified'),
expected_timestamp)
# acct cont obj obj obj
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, None), None)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, '1'), '1')
def test_GET_newest(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'})
self.app.update_request(req)
res = controller.GET(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
self.assertEquals(res.headers.get('last-modified'),
expected_timestamp)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, None), None)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, '1'), '1')
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {})
self.app.update_request(req)
res = controller.HEAD(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
self.assertEquals(res.headers.get('last-modified'),
expected_timestamp)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
'1', '2'), None)
def test_POST_meta_val_len(self):
with save_globals():
limit = MAX_META_VALUE_LENGTH
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 202, 202, 202)
# acct cont obj obj obj
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * limit})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * (limit + 1)})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_POST_as_copy_meta_val_len(self):
with save_globals():
limit = MAX_META_VALUE_LENGTH
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
# acct cont objc objc objc obj obj obj
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * limit})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * (limit + 1)})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_POST_meta_key_len(self):
with save_globals():
limit = MAX_META_NAME_LENGTH
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 202, 202, 202)
# acct cont obj obj obj
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * limit): 'x'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_POST_as_copy_meta_key_len(self):
with save_globals():
limit = MAX_META_NAME_LENGTH
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
# acct cont objc objc objc obj obj obj
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * limit): 'x'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers={
'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_POST_meta_count(self):
with save_globals():
limit = MAX_META_COUNT
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
headers = dict(
(('X-Object-Meta-' + str(i), 'a') for i in xrange(limit + 1)))
headers.update({'Content-Type': 'foo/bar'})
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers=headers)
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_POST_meta_size(self):
with save_globals():
limit = MAX_META_OVERALL_SIZE
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
count = limit / 256 # enough to cause the limit to be reached
headers = dict(
(('X-Object-Meta-' + str(i), 'a' * 256)
for i in xrange(count + 1)))
headers.update({'Content-Type': 'foo/bar'})
set_http_connect(202, 202, 202)
req = Request.blank('/a/c/o', {}, headers=headers)
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status_int, 400)
def test_PUT_not_autodetect_content_type(self):
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'o.html')
headers = {'Content-Type': 'something/right', 'Content-Length': 0}
it_worked = []
def verify_content_type(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
if path == '/a/c/o.html':
it_worked.append(
headers['Content-Type'].startswith('something/right'))
set_http_connect(204, 204, 201, 201, 201,
give_connect=verify_content_type)
req = Request.blank('/a/c/o.html', {}, headers=headers)
self.app.update_request(req)
res = controller.PUT(req)
self.assertNotEquals(it_worked, [])
self.assertTrue(all(it_worked))
def test_PUT_autodetect_content_type(self):
with save_globals():
controller = proxy_server.ObjectController(
self.app, 'a', 'c', 'o.html')
headers = {'Content-Type': 'something/wrong', 'Content-Length': 0,
'X-Detect-Content-Type': 'True'}
it_worked = []
def verify_content_type(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
if path == '/a/c/o.html':
it_worked.append(
headers['Content-Type'].startswith('text/html'))
set_http_connect(204, 204, 201, 201, 201,
give_connect=verify_content_type)
req = Request.blank('/a/c/o.html', {}, headers=headers)
self.app.update_request(req)
res = controller.PUT(req)
self.assertNotEquals(it_worked, [])
self.assertTrue(all(it_worked))
def test_client_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.object_ring.get_nodes('account')
for dev in self.app.object_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
class SlowBody():
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
req = Request.blank('/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.app.client_timeout = 0.1
req = Request.blank('/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
set_http_connect(201, 201, 201)
# obj obj obj
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 408)
def test_client_disconnect(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.object_ring.get_nodes('account')
for dev in self.app.object_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
class SlowBody():
def __init__(self):
self.sent = 0
def read(self, size=-1):
raise Exception('Disconnected')
req = Request.blank('/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 499)
def test_node_read_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.object_ring.get_nodes('account')
for dev in self.app.object_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, slow=True)
req.sent_size = 0
resp = controller.GET(req)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assert_(not got_exc)
self.app.node_timeout = 0.1
set_http_connect(200, 200, 200, slow=True)
resp = controller.GET(req)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assert_(got_exc)
def test_node_write_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.object_ring.get_nodes('account')
for dev in self.app.object_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1
req = Request.blank('/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'},
body=' ')
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201, slow=True)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.app.node_timeout = 0.1
set_http_connect(201, 201, 201, slow=True)
req = Request.blank('/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'},
body=' ')
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 503)
def test_iter_nodes(self):
with save_globals():
try:
self.app.object_ring.max_more_nodes = 2
controller = proxy_server.ObjectController(self.app, 'account',
'container',
'object')
partition, nodes = self.app.object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in controller.iter_nodes(self.app.object_ring,
partition):
collected_nodes.append(node)
self.assertEquals(len(collected_nodes), 5)
self.app.object_ring.max_more_nodes = 20
self.app.request_node_count = lambda r: 20
controller = proxy_server.ObjectController(self.app, 'account',
'container',
'object')
partition, nodes = self.app.object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in controller.iter_nodes(self.app.object_ring,
partition):
collected_nodes.append(node)
self.assertEquals(len(collected_nodes), 9)
self.app.log_handoffs = True
self.app.logger = FakeLogger()
self.app.object_ring.max_more_nodes = 2
controller = proxy_server.ObjectController(self.app, 'account',
'container',
'object')
partition, nodes = self.app.object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in controller.iter_nodes(self.app.object_ring,
partition):
collected_nodes.append(node)
self.assertEquals(len(collected_nodes), 5)
self.assertEquals(
self.app.logger.log_dict['warning'],
[(('Handoff requested (1)',), {}),
(('Handoff requested (2)',), {})])
self.app.log_handoffs = False
self.app.logger = FakeLogger()
self.app.object_ring.max_more_nodes = 2
controller = proxy_server.ObjectController(self.app, 'account',
'container',
'object')
partition, nodes = self.app.object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in controller.iter_nodes(self.app.object_ring,
partition):
collected_nodes.append(node)
self.assertEquals(len(collected_nodes), 5)
self.assertEquals(self.app.logger.log_dict['warning'], [])
finally:
self.app.object_ring.max_more_nodes = 0
def test_iter_nodes_calls_sort_nodes(self):
with mock.patch.object(self.app, 'sort_nodes') as sort_nodes:
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
for node in controller.iter_nodes(self.app.object_ring, 0):
pass
sort_nodes.assert_called_once_with(
self.app.object_ring.get_part_nodes(0))
def test_iter_nodes_skips_error_limited(self):
with mock.patch.object(self.app, 'sort_nodes', lambda n: n):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
self.assertTrue(first_nodes[0] in second_nodes)
controller.error_limit(first_nodes[0], 'test')
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
self.assertTrue(first_nodes[0] not in second_nodes)
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 6),
mock.patch.object(self.app.object_ring, 'max_more_nodes', 99)):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
second_nodes = []
for node in controller.iter_nodes(self.app.object_ring, 0):
if not second_nodes:
controller.error_limit(node, 'test')
second_nodes.append(node)
self.assertEquals(len(first_nodes), 6)
self.assertEquals(len(second_nodes), 7)
def test_iter_nodes_with_custom_node_iter(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
node_list = [dict(id=n) for n in xrange(10)]
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 3)):
got_nodes = list(controller.iter_nodes(self.app.object_ring, 0,
node_iter=iter(node_list)))
self.assertEqual(node_list[:3], got_nodes)
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 1000000)):
got_nodes = list(controller.iter_nodes(self.app.object_ring, 0,
node_iter=iter(node_list)))
self.assertEqual(node_list, got_nodes)
def test_best_response_sets_headers(self):
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object', headers=[{'X-Test': '1'},
{'X-Test': '2'},
{'X-Test': '3'}])
self.assertEquals(resp.headers['X-Test'], '1')
def test_best_response_sets_etag(self):
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object')
self.assertEquals(resp.etag, None)
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object',
etag='68b329da9893e34099c7d8ad5cb9c940'
)
self.assertEquals(resp.etag, '68b329da9893e34099c7d8ad5cb9c940')
def test_proxy_passes_content_type(self):
with save_globals():
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.GET(req)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'x-application/test')
set_http_connect(200, 200, 200)
resp = controller.GET(req)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_length, 0)
set_http_connect(200, 200, 200, slow=True)
resp = controller.GET(req)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_length, 4)
def test_proxy_passes_content_length_on_head(self):
with save_globals():
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.HEAD(req)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_length, 0)
set_http_connect(200, 200, 200, slow=True)
resp = controller.HEAD(req)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_length, 4)
def test_error_limiting(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller.app.sort_nodes = lambda l: l
self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200),
200)
self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2)
self.assert_('last_error' in controller.app.object_ring.devs[0])
for _junk in xrange(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD, (200, 200, 503, 503,
503), 503)
self.assertEquals(controller.app.object_ring.devs[0]['errors'],
self.app.error_suppression_limit + 1)
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
503)
self.assert_('last_error' in controller.app.object_ring.devs[0])
self.assert_status_map(controller.PUT, (200, 200, 200, 201, 201,
201), 503)
self.assert_status_map(controller.POST,
(200, 200, 200, 200, 200, 200, 202, 202,
202), 503)
self.assert_status_map(controller.DELETE,
(200, 200, 200, 204, 204, 204), 503)
self.app.error_suppression_interval = -300
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
200)
self.assertRaises(BaseException,
self.assert_status_map, controller.DELETE,
(200, 200, 200, 204, 204, 204), 503,
raise_exc=True)
def test_acc_or_con_missing_returns_404(self):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
for dev in self.app.account_ring.devs.values():
del dev['errors']
del dev['last_error']
for dev in self.app.container_ring.devs.values():
del dev['errors']
del dev['last_error']
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 200)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
self.app.update_request(req)
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 200)
set_http_connect(404, 404, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(503, 404, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(503, 503, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(503, 503, 503)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(200, 200, 204, 204, 204)
# acct cont obj obj obj
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 204)
set_http_connect(200, 404, 404, 404)
# acct cont cont cont
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(200, 503, 503, 503)
# acct cont cont cont
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
for dev in self.app.account_ring.devs.values():
dev['errors'] = self.app.error_suppression_limit + 1
dev['last_error'] = time.time()
set_http_connect(200)
# acct [isn't actually called since everything
# is error limited]
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
for dev in self.app.account_ring.devs.values():
dev['errors'] = 0
for dev in self.app.container_ring.devs.values():
dev['errors'] = self.app.error_suppression_limit + 1
dev['last_error'] = time.time()
set_http_connect(200, 200)
# acct cont [isn't actually called since
# everything is error limited]
# make sure to use a fresh request without cached env
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEquals(resp.status_int, 404)
def test_PUT_POST_requires_container_exist(self):
with save_globals():
self.app.object_post_as_copy = False
self.app.memcache = FakeMemcacheReturnsNone()
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 404, 404, 404, 200, 200, 200)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(200, 404, 404, 404, 200, 200)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'text/plain'})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 404)
def test_PUT_POST_as_copy_requires_container_exist(self):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 404, 404, 404, 200, 200, 200)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(200, 404, 404, 404, 200, 200, 200, 200, 200, 200)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'text/plain'})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 404)
def test_bad_metadata(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-' + ('a' *
MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-' + ('a' *
(MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-Too-Long': 'a' *
MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-Too-Long': 'a' *
(MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
for x in xrange(MAX_META_COUNT):
headers['X-Object-Meta-%d' % x] = 'v'
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
for x in xrange(MAX_META_COUNT + 1):
headers['X-Object-Meta-%d' % x] = 'v'
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
header_value = 'a' * MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < MAX_META_OVERALL_SIZE - 4 - \
MAX_META_VALUE_LENGTH:
size += 4 + MAX_META_VALUE_LENGTH
headers['X-Object-Meta-%04d' % x] = header_value
x += 1
if MAX_META_OVERALL_SIZE - size > 1:
headers['X-Object-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Object-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
def test_copy_from(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
# initial source object PUT
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
self.app.update_request(req)
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
# basic copy
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
# non-zero content length
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5',
'X-Copy-From': 'c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 200, 200, 200, 200, 200)
# acct cont acct cont objc objc objc
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
# extra source path parsing
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2'})
req.account = 'a'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
# space in soure path
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2'})
req.account = 'a'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2')
# repeat tests with leading /
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2'})
req.account = 'a'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
# negative tests
# invalid x-copy-from path
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c'})
self.app.update_request(req)
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int // 100, 4) # client error
# server error
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 503, 503, 503)
# acct cont objc objc objc
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 503)
# not found
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 404, 404, 404)
# acct cont objc objc objc
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 404)
# some missing containers
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
self.app.update_request(req)
set_http_connect(200, 200, 404, 404, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
# test object meta data
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay'})
self.app.update_request(req)
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers.get('x-object-meta-test'),
'testing')
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
# copy-from object is too large to fit in target object
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
self.app.update_request(req)
class LargeResponseBody(object):
def __len__(self):
return MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
body=copy_from_obj_body)
self.app.memcache.store = {}
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 413)
def test_COPY(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
req.account = 'a'
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
req.account = 'a'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
req = Request.blank('/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
req.account = 'a'
controller.object_name = 'o/o2'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
req = Request.blank('/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o/o2'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200)
# acct cont
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 412)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 503, 503, 503)
# acct cont objc objc objc
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 503)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 404, 404, 404)
# acct cont objc objc objc
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 404, 404, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'X-Object-Meta-Ours': 'okay'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers.get('x-object-meta-test'),
'testing')
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEquals(resp.headers.get('x-delete-at'), '9876543210')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
self.app.update_request(req)
class LargeResponseBody(object):
def __len__(self):
return MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
body=copy_from_obj_body)
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 413)
def test_COPY_newest(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
#act cont objc objc objc obj obj obj
timestamps=('1', '1', '1', '3', '2', '4', '4',
'4'))
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-copied-from-last-modified'],
'3')
def test_COPY_delete_at(self):
with save_globals():
given_headers = {}
def fake_connect_put_node(nodes, part, path, headers,
logger_thread_locals):
given_headers.update(headers)
controller = proxy_server.ObjectController(self.app, 'a',
'c', 'o')
controller._connect_put_node = fake_connect_put_node
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
self.app.update_request(req)
controller.COPY(req)
self.assertEquals(given_headers.get('X-Delete-At'), '9876543210')
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_chunked_put(self):
class ChunkedFile():
def __init__(self, bytes):
self.bytes = bytes
self.read_bytes = 0
@property
def bytes_left(self):
return self.bytes - self.read_bytes
def read(self, amt=None):
if self.read_bytes >= self.bytes:
raise StopIteration()
if not amt:
amt = self.bytes_left
data = 'a' * min(amt, self.bytes_left)
self.read_bytes += len(data)
return data
with save_globals():
set_http_connect(201, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Transfer-Encoding': 'chunked',
'Content-Type': 'foo/bar'})
req.body_file = ChunkedFile(10)
self.app.memcache.store = {}
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status_int // 100, 2) # success
# test 413 entity to large
set_http_connect(201, 201, 201, 201)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Transfer-Encoding': 'chunked',
'Content-Type': 'foo/bar'})
req.body_file = ChunkedFile(11)
self.app.memcache.store = {}
self.app.update_request(req)
try:
swift.proxy.controllers.obj.MAX_FILE_SIZE = 10
res = controller.PUT(req)
self.assertEquals(res.status_int, 413)
finally:
swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE
def test_chunked_put_bad_version(self):
# Check bad version
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_bad_path(self):
# Check bad path
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_bad_utf8(self):
# Check invalid utf-8
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_bad_path_no_controller(self):
# Check bad path, no controller
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_bad_method(self):
# Check bad method
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 405'
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_unhandled_exception(self):
# Check unhandled exception
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv,
obj2srv) = _test_servers
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
orig_update_request = prosrv.update_request
def broken_update_request(*args, **kwargs):
raise Exception('fake: this should be printed')
prosrv.update_request = broken_update_request
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 500'
self.assertEquals(headers[:len(exp)], exp)
prosrv.update_request = orig_update_request
def test_chunked_put_head_account(self):
# Head account, just a double check and really is here to test
# the part Application.log_request that 'enforces' a
# content_length on the response.
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('\r\nContent-Length: 0\r\n' in headers)
def test_chunked_put_utf8_all_the_way_down(self):
# Test UTF-8 Unicode all the way through the system
ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \
'\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \
'\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \
'\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \
'\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \
'\xbf\x86.Test'
ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest'
# Create ustr container
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' % quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# List account with ustr container (test plain)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
containers = fd.read().split('\n')
self.assert_(ustr in containers)
# List account with ustr container (test json)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a?format=json HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
listing = simplejson.loads(fd.read())
self.assert_(ustr.decode('utf8') in [l['name'] for l in listing])
# List account with ustr container (test xml)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a?format=xml HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('<name>%s</name>' % ustr in fd.read())
# Create ustr object with ustr metadata in ustr container
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' %
(quote(ustr), quote(ustr), quote(ustr_short),
quote(ustr)))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# List ustr container with ustr object (test plain)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' % quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
objects = fd.read().split('\n')
self.assert_(ustr in objects)
# List ustr container with ustr object (test json)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
listing = simplejson.loads(fd.read())
self.assertEquals(listing[0]['name'], ustr.decode('utf8'))
# List ustr container with ustr object (test xml)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('<name>%s</name>' % ustr in fd.read())
# Retrieve ustr object with ustr metadata
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' %
(quote(ustr), quote(ustr)))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('\r\nX-Object-Meta-%s: %s\r\n' %
(quote(ustr_short).lower(), quote(ustr)) in headers)
def test_chunked_put_chunked_put(self):
# Do chunked object put
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
# Also happens to assert that x-storage-token is taken as a
# replacement for x-auth-token.
fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure we get what we put
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
self.assertEquals(body, 'oh hai123456789abcdef')
def test_version_manifest(self):
versions_to_create = 3
# Create a container for our versioned object testing
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\nX-Versions-Location: vers\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# check that the header was set
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Versions-Location: vers' in headers)
# make the container for the object versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/vers HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the versioned file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the object versions
for segment in xrange(1, versions_to_create):
sleep(.01) # guarantee that the timestamp changes
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s'
'\r\n\r\n%05d\r\n' % (segment, segment))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gets the latest version
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: text/jibberish%s' % segment in headers)
self.assert_('X-Object-Meta-Foo: barbaz' not in headers)
body = fd.read()
self.assertEquals(body, '%05d' % segment)
# Ensure we have the right number of versions saved
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
versions = [x for x in body.split('\n') if x]
self.assertEquals(len(versions), versions_to_create - 1)
# copy a version and make sure the version info is stripped
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('COPY /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: '
't\r\nDestination: versions/copied_name\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response to the COPY
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions/copied_name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
self.assertEquals(body, '%05d' % segment)
# post and make sure it's updated
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('POST /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: '
't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n'
'X-Object-Meta-Bar: foo\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response to the POST
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: foo/bar' in headers)
self.assert_('X-Object-Meta-Bar: foo' in headers)
body = fd.read()
self.assertEquals(body, '%05d' % segment)
# Delete the object versions
for segment in xrange(versions_to_create - 1, 0, -1):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r'
'\nConnection: close\r\nX-Storage-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gets the latest version
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: text/jibberish%s' % (segment - 1)
in headers)
body = fd.read()
self.assertEquals(body, '%05d' % (segment - 1))
# Ensure we have the right number of versions saved
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r'
'\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
versions = [x for x in body.split('\n') if x]
self.assertEquals(len(versions), segment - 1)
# there is now one segment left (in the manifest)
# Ensure we have no saved versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204 No Content'
self.assertEquals(headers[:len(exp)], exp)
# delete the last verision
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
# Ensure it's all gone
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEquals(headers[:len(exp)], exp)
# make sure manifest files don't get versioned
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 0\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\nX-Object-Manifest: vers/foo_\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure we have no saved versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204 No Content'
self.assertEquals(headers[:len(exp)], exp)
# DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\n00000\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\n00001\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name/sub HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\nsub1\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/versions/name/sub HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\nsub2\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/vers?prefix=008name/sub/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
versions = [x for x in body.split('\n') if x]
self.assertEquals(len(versions), 1)
# Check for when the versions target container doesn't exist
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/whoops HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the versioned file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\n00000\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create another version
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\n00001\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEquals(headers[:len(exp)], exp)
# Delete the object
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/whoops/foo HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx response
self.assertEquals(headers[:len(exp)], exp)
def test_chunked_put_lobjects_with_nonzero_size_manifest_file(self):
# Create a container for our segmented/manifest object testing
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) = \
_test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented_nonzero HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the object segments
segment_etags = []
for segment in xrange(5):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented_nonzero/name/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\n1234 ' % str(segment))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
segment_etags.append(md5('1234 ').hexdigest())
# Create the nonzero size manifest file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\nabcd ')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the object manifest file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('POST /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: t\r\n'
'X-Object-Manifest: segmented_nonzero/name/\r\n'
'Foo: barbaz\r\nContent-Type: text/jibberish\r\n'
'\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 202'
self.assertEquals(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gets the whole object
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: '
't\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
self.assert_('Content-Type: text/jibberish' in headers)
self.assert_('Foo: barbaz' in headers)
expected_etag = md5(''.join(segment_etags)).hexdigest()
self.assert_('Etag: "%s"' % expected_etag in headers)
body = fd.read()
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
# Get lobjects with Range smaller than manifest file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
'Range: bytes=0-4\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 206'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
self.assert_('Content-Type: text/jibberish' in headers)
self.assert_('Foo: barbaz' in headers)
expected_etag = md5(''.join(segment_etags)).hexdigest()
body = fd.read()
self.assertEquals(body, '1234 ')
# Get lobjects with Range bigger than manifest file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
'Range: bytes=11-15\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 206'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
self.assert_('Content-Type: text/jibberish' in headers)
self.assert_('Foo: barbaz' in headers)
expected_etag = md5(''.join(segment_etags)).hexdigest()
body = fd.read()
self.assertEquals(body, '234 1')
def test_chunked_put_lobjects(self):
# Create a container for our segmented/manifest object testing
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%20object HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Create the object segments
segment_etags = []
for segment in xrange(5):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%%20object/object%%20name/%s '
'HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 5\r\n'
'\r\n'
'1234 ' % str(segment))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
segment_etags.append(md5('1234 ').hexdigest())
# Create the object manifest file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'X-Object-Manifest: segmented%20object/object%20name/\r\n'
'Content-Type: text/jibberish\r\n'
'Foo: barbaz\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Check retrieving the listing the manifest would retrieve
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented%20object?prefix=object%20name/ '
'HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
self.assertEquals(
body,
'object name/0\n'
'object name/1\n'
'object name/2\n'
'object name/3\n'
'object name/4\n')
# Ensure retrieving the manifest file gets the whole object
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented%20object/object%20name/' in
headers)
self.assert_('Content-Type: text/jibberish' in headers)
self.assert_('Foo: barbaz' in headers)
expected_etag = md5(''.join(segment_etags)).hexdigest()
self.assert_('Etag: "%s"' % expected_etag in headers)
body = fd.read()
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
# Do it again but exceeding the container listing limit
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 2
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented%20object/object%20name/' in
headers)
self.assert_('Content-Type: text/jibberish' in headers)
body = fd.read()
# A bit fragile of a test; as it makes the assumption that all
# will be sent in a single chunk.
self.assertEquals(
body, '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n')
# Make a copy of the manifested object, which should
# error since the number of segments exceeds
# CONTAINER_LISTING_LIMIT.
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%20object/copy HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'X-Copy-From: segmented%20object/object%20name\r\n'
'Content-Length: 0\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 413'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
# After adjusting the CONTAINER_LISTING_LIMIT, make a copy of
# the manifested object which should consolidate the segments.
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 10000
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%20object/copy HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'X-Copy-From: segmented%20object/object%20name\r\n'
'Content-Length: 0\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
body = fd.read()
# Retrieve and validate the copy.
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented%20object/copy HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('x-object-manifest:' not in headers.lower())
self.assert_('Content-Length: 25\r' in headers)
body = fd.read()
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
# Create an object manifest file pointing to nothing
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/segmented%20object/empty HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'X-Object-Manifest: segmented%20object/empty/\r\n'
'Content-Type: text/jibberish\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gives a zero-byte file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/segmented%20object/empty HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('X-Object-Manifest: segmented%20object/empty/' in headers)
self.assert_('Content-Type: text/jibberish' in headers)
body = fd.read()
self.assertEquals(body, '')
# Check copy content type
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/obj HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'Content-Type: text/jibberish\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/obj2 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'X-Copy-From: c/obj\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure getting the copied file gets original content-type
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/obj2 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: text/jibberish' in headers)
# Check set content type
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/obj3 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'Content-Type: foo/bar\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure getting the copied file gets original content-type
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/obj3 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: foo/bar' in
headers.split('\r\n'), repr(headers.split('\r\n')))
# Check set content type with charset
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/obj4 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'Content-Type: foo/bar; charset=UTF-8\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEquals(headers[:len(exp)], exp)
# Ensure getting the copied file gets original content-type
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/obj4 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEquals(headers[:len(exp)], exp)
self.assert_('Content-Type: foo/bar; charset=UTF-8' in
headers.split('\r\n'), repr(headers.split('\r\n')))
def test_mismatched_etags(self):
with save_globals():
# no etag supplied, object servers return success w/ diff values
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
self.app.update_request(req)
set_http_connect(200, 201, 201, 201,
etags=[None,
'68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c941'])
resp = controller.PUT(req)
self.assertEquals(resp.status_int // 100, 5) # server error
# req supplies etag, object servers return 422 - mismatch
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'Content-Length': '0',
'ETag': '68b329da9893e34099c7d8ad5cb9c940',
})
self.app.update_request(req)
set_http_connect(200, 422, 422, 503,
etags=['68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c941',
None,
None])
resp = controller.PUT(req)
self.assertEquals(resp.status_int // 100, 4) # client error
def test_response_get_accept_ranges_header(self):
with save_globals():
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.GET(req)
self.assert_('accept-ranges' in resp.headers)
self.assertEquals(resp.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.HEAD(req)
self.assert_('accept-ranges' in resp.headers)
self.assertEquals(resp.headers['accept-ranges'], 'bytes')
def test_GET_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.GET(req)
self.assert_(called[0])
def test_HEAD_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', {'REQUEST_METHOD': 'HEAD'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.HEAD(req)
self.assert_(called[0])
def test_POST_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
self.app.object_post_as_copy = False
set_http_connect(200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.POST(req)
self.assert_(called[0])
def test_POST_as_copy_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.POST(req)
self.assert_(called[0])
def test_PUT_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.PUT(req)
self.assert_(called[0])
def test_COPY_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.COPY(req)
self.assert_(called[0])
def test_POST_converts_delete_after_to_delete_at(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
self.app.memcache.store = {}
orig_time = time.time
try:
t = time.time()
time.time = lambda: t
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '202 Fake')
self.assertEquals(req.headers.get('x-delete-at'),
str(int(t + 60)))
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container',
'object')
set_http_connect(200, 200, 202, 202, 202)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '202 Fake')
self.assertEquals(req.headers.get('x-delete-at'),
str(int(t + 60)))
finally:
time.time = orig_time
def test_POST_non_int_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60.1'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('Non-integer X-Delete-After' in res.body)
def test_POST_negative_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('X-Delete-At in past' in res.body)
def test_POST_delete_at(self):
with save_globals():
given_headers = {}
def fake_make_requests(req, ring, part, method, path, headers,
query_string=''):
given_headers.update(headers[0])
self.app.object_post_as_copy = False
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller.make_requests = fake_make_requests
set_http_connect(200, 200)
self.app.memcache.store = {}
t = str(int(time.time() + 100))
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
controller.POST(req)
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
t = str(int(time.time() + 100)) + '.1'
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('Non-integer X-Delete-At' in resp.body)
t = str(int(time.time() - 100))
req = Request.blank('/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_PUT_converts_delete_after_to_delete_at(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
orig_time = time.time
try:
t = time.time()
time.time = lambda: t
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '201 Fake')
self.assertEquals(req.headers.get('x-delete-at'),
str(int(t + 60)))
finally:
time.time = orig_time
def test_PUT_non_int_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '60.1'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('Non-integer X-Delete-After' in res.body)
def test_PUT_negative_delete_after(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
self.app.memcache.store = {}
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-After': '-60'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status, '400 Bad Request')
self.assertTrue('X-Delete-At in past' in res.body)
def test_PUT_delete_at(self):
with save_globals():
given_headers = {}
def fake_connect_put_node(nodes, part, path, headers,
logger_thread_locals):
given_headers.update(headers)
controller = proxy_server.ObjectController(self.app, 'account',
'container', 'object')
controller._connect_put_node = fake_connect_put_node
set_http_connect(200, 200)
self.app.memcache.store = {}
t = str(int(time.time() + 100))
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
controller.PUT(req)
self.assertEquals(given_headers.get('X-Delete-At'), t)
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
t = str(int(time.time() + 100)) + '.1'
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('Non-integer X-Delete-At' in resp.body)
t = str(int(time.time() - 100))
req = Request.blank('/a/c/o', {},
headers={'Content-Length': '0',
'Content-Type': 'foo/bar',
'X-Delete-At': t})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEquals(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_leak_1(self):
global _request_instances
prolis = _test_sockets[0]
prosrv = _test_servers[0]
obj_len = prosrv.client_chunk_size * 2
# PUT test file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/test_leak_1 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (obj_len, 'a' * obj_len))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Remember Request instance count
before_request_instances = _request_instances
# GET test file, but disconnect early
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/test_leak_1 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
fd.read(1)
fd.close()
sock.close()
self.assertEquals(before_request_instances, _request_instances)
def test_OPTIONS(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a',
'c', 'o.jpg')
def my_empty_container_info(*args):
return {}
controller.container_info = my_empty_container_info
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
def my_empty_origin_container_info(*args):
return {'cors': {'allow_origin': None}}
controller.container_info = my_empty_origin_container_info
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
def my_container_info(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
'max_age': '999',
}
}
controller.container_info = my_container_info
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
self.assertEquals(
'https://foo.bar',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEquals(
len(resp.headers['access-control-allow-methods'].split(', ')),
7)
self.assertEquals('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
req = Request.blank('/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEquals(len(resp.headers['Allow'].split(', ')), 7)
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
controller.app.cors_allow_origin = ['http://foo.bar', ]
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
def my_container_info_wildcard(*args):
return {
'cors': {
'allow_origin': '*',
'max_age': '999',
}
}
controller.container_info = my_container_info_wildcard
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
self.assertEquals(
'https://bar.baz',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEquals(
len(resp.headers['access-control-allow-methods'].split(', ')),
7)
self.assertEquals('999', resp.headers['access-control-max-age'])
def test_CORS_valid(self):
with save_globals():
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
def stubContainerInfo(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar'
}
}
controller.container_info = stubContainerInfo
def objectGET(controller, req):
return Response(headers={
'X-Object-Meta-Color': 'red',
'X-Super-Secret': 'hush',
})
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(objectGET)(controller, req)
self.assertEquals(200, resp.status_int)
self.assertEquals('http://foo.bar',
resp.headers['access-control-allow-origin'])
self.assertEquals('red', resp.headers['x-object-meta-color'])
# X-Super-Secret is in the response, but not "exposed"
self.assertEquals('hush', resp.headers['x-super-secret'])
self.assertTrue('access-control-expose-headers' in resp.headers)
exposed = set(
h.strip() for h in
resp.headers['access-control-expose-headers'].split(','))
expected_exposed = set(['cache-control', 'content-language',
'content-type', 'expires', 'last-modified',
'pragma', 'etag', 'x-timestamp',
'x-trans-id', 'x-object-meta-color'])
self.assertEquals(expected_exposed, exposed)
def _gather_x_container_headers(self, controller_call, req, *connect_args,
**kwargs):
header_list = kwargs.pop('header_list', ['X-Container-Device',
'X-Container-Host',
'X-Container-Partition'])
seen_headers = []
def capture_headers(ipaddr, port, device, partition, method,
path, headers=None, query_string=None):
captured = {}
for header in header_list:
captured[header] = headers.get(header)
seen_headers.append(captured)
with save_globals():
self.app.allow_account_management = True
set_http_connect(*connect_args, give_connect=capture_headers,
**kwargs)
resp = controller_call(req)
self.assertEqual(2, resp.status_int // 100) # sanity check
# don't care about the account/container HEADs, so chuck
# the first two requests
return sorted(seen_headers[2:],
key=lambda d: d.get(header_list[0]) or 'z')
def test_PUT_x_container_headers_with_equal_replicas(self):
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000',
'X-Container-Partition': '1',
'X-Container-Device': 'sda'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '1',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '1',
'X-Container-Device': 'sdc'}])
def test_PUT_x_container_headers_with_fewer_container_replicas(self):
self.app.container_ring.set_replicas(2)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000',
'X-Container-Partition': '1',
'X-Container-Device': 'sda'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '1',
'X-Container-Device': 'sdb'},
{'X-Container-Host': None,
'X-Container-Partition': None,
'X-Container-Device': None}])
def test_PUT_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '1',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '1',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '1',
'X-Container-Device': 'sdc'}])
def test_POST_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
self.app.object_post_as_copy = False
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'application/stuff'})
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.POST, req,
200, 200, 200, 200, 200) # HEAD HEAD POST POST POST
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '1',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '1',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '1',
'X-Container-Device': 'sdc'}])
def test_DELETE_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'Content-Type': 'application/stuff'})
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.DELETE, req,
200, 200, 200, 200, 200) # HEAD HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '1',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '1',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '1',
'X-Container-Device': 'sdc'}
])
@mock.patch('time.time', new=lambda: STATIC_TIME)
def test_PUT_x_delete_at_with_fewer_container_replicas(self):
self.app.container_ring.set_replicas(2)
delete_at_timestamp = int(time.time()) + 100000
delete_at_container = str(
delete_at_timestamp /
self.app.expiring_objects_container_divisor *
self.app.expiring_objects_container_divisor)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Type': 'application/stuff',
'Content-Length': '0',
'X-Delete-At': str(delete_at_timestamp)})
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
'X-Delete-At-Partition', 'X-Delete-At-Container'))
self.assertEqual(seen_headers, [
{'X-Delete-At-Host': '10.0.0.0:1000',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '1',
'X-Delete-At-Device': 'sda'},
{'X-Delete-At-Host': '10.0.0.1:1001',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '1',
'X-Delete-At-Device': 'sdb'},
{'X-Delete-At-Host': None,
'X-Delete-At-Container': None,
'X-Delete-At-Partition': None,
'X-Delete-At-Device': None}
])
@mock.patch('time.time', new=lambda: STATIC_TIME)
def test_PUT_x_delete_at_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
self.app.expiring_objects_account = 'expires'
self.app.expiring_objects_container_divisor = 60
delete_at_timestamp = int(time.time()) + 100000
delete_at_container = str(
delete_at_timestamp /
self.app.expiring_objects_container_divisor *
self.app.expiring_objects_container_divisor)
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Type': 'application/stuff',
'Content-Length': 0,
'X-Delete-At': str(delete_at_timestamp)})
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
'X-Delete-At-Partition', 'X-Delete-At-Container'))
self.assertEqual(seen_headers, [
{'X-Delete-At-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '1',
'X-Delete-At-Device': 'sda,sdd'},
{'X-Delete-At-Host': '10.0.0.1:1001',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '1',
'X-Delete-At-Device': 'sdb'},
{'X-Delete-At-Host': '10.0.0.2:1002',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '1',
'X-Delete-At-Device': 'sdc'}
])
class TestContainerController(unittest.TestCase):
"Test swift.proxy_server.ContainerController"
def setUp(self):
self.app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing())
def test_transfer_headers(self):
src_headers = {'x-remove-versions-location': 'x',
'x-container-read': '*:user'}
dst_headers = {'x-versions-location': 'backup'}
controller = swift.proxy.controllers.ContainerController(self.app,
'a', 'c')
controller.transfer_headers(src_headers, dst_headers)
expected_headers = {'x-versions-location': '',
'x-container-read': '*:user'}
self.assertEqual(dst_headers, expected_headers)
def assert_status_map(self, method, statuses, expected,
raise_exc=False, missing_container=False):
with save_globals():
kwargs = {}
if raise_exc:
kwargs['raise_exc'] = raise_exc
kwargs['missing_container'] = missing_container
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c', headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c/', headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
def test_HEAD_GET(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def test_status_map(statuses, expected,
c_expected=None, a_expected=None, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c', {})
self.app.update_request(req)
res = controller.HEAD(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assert_('x-works' in res.headers)
self.assertEquals(res.headers['x-works'], 'yes')
if c_expected:
self.assertTrue('swift.container/a/c' in res.environ)
self.assertEquals(res.environ['swift.container/a/c']['status'],
c_expected)
else:
self.assertTrue('swift.container/a/c' not in res.environ)
if a_expected:
self.assertTrue('swift.account/a' in res.environ)
self.assertEquals(res.environ['swift.account/a']['status'],
a_expected)
else:
self.assertTrue('swift.account/a' not in res.environ)
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c', {})
self.app.update_request(req)
res = controller.GET(req)
self.assertEquals(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assert_('x-works' in res.headers)
self.assertEquals(res.headers['x-works'], 'yes')
if c_expected:
self.assertTrue('swift.container/a/c' in res.environ)
self.assertEquals(res.environ['swift.container/a/c']['status'],
c_expected)
else:
self.assertTrue('swift.container/a/c' not in res.environ)
if a_expected:
self.assertTrue('swift.account/a' in res.environ)
self.assertEquals(res.environ['swift.account/a']['status'],
a_expected)
else:
self.assertTrue('swift.account/a' not in res.environ)
# In all the following tests cache 200 for account
# return and ache vary for container
# return 200 and cache 200 for and container
test_status_map((200, 200, 404, 404), 200, 200, 200)
test_status_map((200, 200, 500, 404), 200, 200, 200)
# return 304 dont cache container
test_status_map((200, 304, 500, 404), 304, None, 200)
# return 404 and cache 404 for container
test_status_map((200, 404, 404, 404), 404, 404, 200)
test_status_map((200, 404, 404, 500), 404, 404, 200)
# return 503, dont cache container
test_status_map((200, 500, 500, 500), 503, None, 200)
self.assertFalse(self.app.account_autocreate)
# In all the following tests cache 404 for account
# return 404 (as account is not found) and dont cache container
test_status_map((404, 404, 404), 404, None, 404)
self.app.account_autocreate = True # This should make no difference
test_status_map((404, 404, 404), 404, None, 404)
def test_PUT(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 201, 201, 201), 201, missing_container=True)
test_status_map((200, 201, 201, 500), 201, missing_container=True)
test_status_map((200, 204, 404, 404), 404, missing_container=True)
test_status_map((200, 204, 500, 404), 503, missing_container=True)
self.assertFalse(self.app.account_autocreate)
test_status_map((404, 404, 404), 404, missing_container=True)
self.app.account_autocreate = True
#fail to retrieve account info
test_status_map(
(503, 503, 503), # account_info fails on 503
404, missing_container=True)
# account fail after creation
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
404, 404, 404), # account_info fail
404, missing_container=True)
test_status_map(
(503, 503, 404, # account_info fails on 404
503, 503, 503, # PUT account
503, 503, 404), # account_info fail
404, missing_container=True)
#put fails
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
503, 503, 201), # put container fail
503, missing_container=True)
# all goes according to plan
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
201, 201, 201), # put container success
201, missing_container=True)
test_status_map(
(503, 404, 404, # account_info fails on 404
503, 201, 201, # PUT account
503, 200, # account_info success
503, 201, 201), # put container success
201, missing_container=True)
def test_POST(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a/c', {})
req.content_length = 0
self.app.update_request(req)
res = controller.POST(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((200, 201, 201, 201), 201, missing_container=True)
test_status_map((200, 201, 201, 500), 201, missing_container=True)
test_status_map((200, 204, 404, 404), 404, missing_container=True)
test_status_map((200, 204, 500, 404), 503, missing_container=True)
self.assertFalse(self.app.account_autocreate)
test_status_map((404, 404, 404), 404, missing_container=True)
self.app.account_autocreate = True
test_status_map((404, 404, 404), 404, missing_container=True)
def test_PUT_max_containers_per_account(self):
with save_globals():
self.app.max_containers_per_account = 12346
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT,
(200, 200, 200, 201, 201, 201), 201,
missing_container=True)
self.app.max_containers_per_account = 12345
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT, (201, 201, 201), 403,
missing_container=True)
self.app.max_containers_per_account = 12345
self.app.max_containers_whitelist = ['account']
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT,
(200, 200, 200, 201, 201, 201), 201,
missing_container=True)
def test_PUT_max_container_name_length(self):
with save_globals():
limit = MAX_CONTAINER_NAME_LENGTH
controller = proxy_server.ContainerController(self.app, 'account',
'1' * limit)
self.assert_status_map(controller.PUT,
(200, 200, 200, 201, 201, 201), 201,
missing_container=True)
controller = proxy_server.ContainerController(self.app, 'account',
'2' * (limit + 1))
self.assert_status_map(controller.PUT, (201, 201, 201), 400,
missing_container=True)
def test_PUT_connect_exceptions(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT, (200, 201, 201, -1), 201,
missing_container=True)
self.assert_status_map(controller.PUT, (200, 201, -1, -1), 503,
missing_container=True)
self.assert_status_map(controller.PUT, (200, 503, 503, -1), 503,
missing_container=True)
def test_acc_missing_returns_404(self):
for meth in ('DELETE', 'PUT'):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
for dev in self.app.account_ring.devs.values():
del dev['errors']
del dev['last_error']
controller = proxy_server.ContainerController(self.app,
'account',
'container')
if meth == 'PUT':
set_http_connect(200, 200, 200, 200, 200, 200,
missing_container=True)
else:
set_http_connect(200, 200, 200, 200)
self.app.memcache.store = {}
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
self.app.update_request(req)
resp = getattr(controller, meth)(req)
self.assertEquals(resp.status_int, 200)
set_http_connect(404, 404, 404, 200, 200, 200)
# Make sure it is a blank request wthout env caching
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(503, 404, 404)
# Make sure it is a blank request wthout env caching
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEquals(resp.status_int, 404)
set_http_connect(503, 404, raise_exc=True)
# Make sure it is a blank request wthout env caching
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEquals(resp.status_int, 404)
for dev in self.app.account_ring.devs.values():
dev['errors'] = self.app.error_suppression_limit + 1
dev['last_error'] = time.time()
set_http_connect(200, 200, 200, 200, 200, 200)
# Make sure it is a blank request wthout env caching
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEquals(resp.status_int, 404)
def test_put_locking(self):
class MockMemcache(FakeMemcache):
def __init__(self, allow_lock=None):
self.allow_lock = allow_lock
super(MockMemcache, self).__init__()
@contextmanager
def soft_lock(self, key, timeout=0, retries=5):
if self.allow_lock:
yield True
else:
raise NotImplementedError
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.app.memcache = MockMemcache(allow_lock=True)
set_http_connect(200, 200, 200, 201, 201, 201,
missing_container=True)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEquals(res.status_int, 201)
def test_error_limiting(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
controller.app.sort_nodes = lambda l: l
self.assert_status_map(controller.HEAD, (200, 503, 200, 200), 200,
missing_container=False)
self.assertEquals(
controller.app.container_ring.devs[0]['errors'], 2)
self.assert_('last_error' in controller.app.container_ring.devs[0])
for _junk in xrange(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD,
(200, 503, 503, 503), 503)
self.assertEquals(controller.app.container_ring.devs[0]['errors'],
self.app.error_suppression_limit + 1)
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 503)
self.assert_('last_error' in controller.app.container_ring.devs[0])
self.assert_status_map(controller.PUT, (200, 201, 201, 201), 503,
missing_container=True)
self.assert_status_map(controller.DELETE,
(200, 204, 204, 204), 503)
self.app.error_suppression_interval = -300
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 200)
self.assert_status_map(controller.DELETE, (200, 204, 204, 204),
404, raise_exc=True)
def test_DELETE(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.DELETE,
(200, 204, 204, 204), 204)
self.assert_status_map(controller.DELETE,
(200, 204, 204, 503), 204)
self.assert_status_map(controller.DELETE,
(200, 204, 503, 503), 503)
self.assert_status_map(controller.DELETE,
(200, 204, 404, 404), 404)
self.assert_status_map(controller.DELETE,
(200, 404, 404, 404), 404)
self.assert_status_map(controller.DELETE,
(200, 204, 503, 404), 503)
self.app.memcache = FakeMemcacheReturnsNone()
# 200: Account check, 404x3: Container check
self.assert_status_map(controller.DELETE,
(200, 404, 404, 404), 404)
def test_response_get_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c?format=json')
self.app.update_request(req)
res = controller.GET(req)
self.assert_('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c?format=json')
self.app.update_request(req)
res = controller.HEAD(req)
self.assert_('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_PUT_metadata(self):
self.metadata_helper('PUT')
def test_POST_metadata(self):
self.metadata_helper('POST')
def metadata_helper(self, method):
for test_header, test_value in (
('X-Container-Meta-TestHeader', 'TestValue'),
('X-Container-Meta-TestHeader', ''),
('X-Remove-Container-Meta-TestHeader', 'anything'),
('X-Container-Read', '.r:*'),
('X-Remove-Container-Read', 'anything'),
('X-Container-Write', 'anyone'),
('X-Remove-Container-Write', 'anything')):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c':
find_header = test_header
find_value = test_value
if find_header.lower().startswith('x-remove-'):
find_header = \
find_header.lower().replace('-remove', '', 1)
find_value = ''
for k, v in headers.iteritems():
if k.lower() == find_header.lower() and \
v == find_value:
break
else:
test_errors.append('%s: %s not in %s' %
(find_header, find_value, headers))
with save_globals():
controller = \
proxy_server.ContainerController(self.app, 'a', 'c')
set_http_connect(200, 201, 201, 201, give_connect=test_connect)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={test_header: test_value})
self.app.update_request(req)
getattr(controller, method)(req)
self.assertEquals(test_errors, [])
def test_PUT_bad_metadata(self):
self.bad_metadata_helper('PUT')
def test_POST_bad_metadata(self):
self.bad_metadata_helper('POST')
def bad_metadata_helper(self, method):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
set_http_connect(200, 201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-' +
('a' * MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-' +
('a' * (MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-Too-Long':
'a' * MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-Too-Long':
'a' * (MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
for x in xrange(MAX_META_COUNT):
headers['X-Container-Meta-%d' % x] = 'v'
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {}
for x in xrange(MAX_META_COUNT + 1):
headers['X-Container-Meta-%d' % x] = 'v'
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
header_value = 'a' * MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH:
size += 4 + MAX_META_VALUE_LENGTH
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if MAX_META_OVERALL_SIZE - size > 1:
headers['X-Container-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Container-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
def test_POST_calls_clean_acl(self):
called = [False]
def clean_acl(header, value):
called[0] = True
raise ValueError('fake error')
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Container-Read': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.POST(req)
self.assert_(called[0])
called[0] = False
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Container-Write': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.POST(req)
self.assert_(called[0])
def test_PUT_calls_clean_acl(self):
called = [False]
def clean_acl(header, value):
called[0] = True
raise ValueError('fake error')
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Container-Read': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.PUT(req)
self.assert_(called[0])
called[0] = False
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Container-Write': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.PUT(req)
self.assert_(called[0])
def test_GET_no_content(self):
with save_globals():
set_http_connect(200, 204, 204, 204)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c')
self.app.update_request(req)
res = controller.GET(req)
self.assertEquals(res.environ['swift.container/a/c']['status'], 204)
self.assertEquals(res.content_length, 0)
self.assertTrue('transfer-encoding' not in res.headers)
def test_GET_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
res = controller.GET(req)
self.assertEquals(res.environ['swift.container/a/c']['status'], 201)
self.assert_(called[0])
def test_HEAD_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/a/c', {'REQUEST_METHOD': 'HEAD'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.HEAD(req)
self.assert_(called[0])
def test_OPTIONS(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def my_empty_container_info(*args):
return {}
controller.container_info = my_empty_container_info
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
def my_empty_origin_container_info(*args):
return {'cors': {'allow_origin': None}}
controller.container_info = my_empty_origin_container_info
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
def my_container_info(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
'max_age': '999',
}
}
controller.container_info = my_container_info
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
self.assertEquals(
'https://foo.bar',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEquals(
len(resp.headers['access-control-allow-methods'].split(', ')),
6)
self.assertEquals('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
req = Request.blank('/a/c', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEquals(len(resp.headers['Allow'].split(', ')), 6)
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEquals(401, resp.status_int)
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
controller.app.cors_allow_origin = ['http://foo.bar', ]
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
def my_container_info_wildcard(*args):
return {
'cors': {
'allow_origin': '*',
'max_age': '999',
}
}
controller.container_info = my_container_info_wildcard
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
self.assertEquals(
'https://bar.baz',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEquals(
len(resp.headers['access-control-allow-methods'].split(', ')),
6)
self.assertEquals('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Headers':
'x-foo, x-bar, x-auth-token',
'Access-Control-Request-Method': 'GET'}
)
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
self.assertEquals(
sortHeaderNames('x-foo, x-bar, x-auth-token'),
sortHeaderNames(resp.headers['access-control-allow-headers']))
def test_CORS_valid(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def stubContainerInfo(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar'
}
}
controller.container_info = stubContainerInfo
def containerGET(controller, req):
return Response(headers={
'X-Container-Meta-Color': 'red',
'X-Super-Secret': 'hush',
})
req = Request.blank(
'/a/c',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(containerGET)(controller, req)
self.assertEquals(200, resp.status_int)
self.assertEquals('http://foo.bar',
resp.headers['access-control-allow-origin'])
self.assertEquals('red', resp.headers['x-container-meta-color'])
# X-Super-Secret is in the response, but not "exposed"
self.assertEquals('hush', resp.headers['x-super-secret'])
self.assertTrue('access-control-expose-headers' in resp.headers)
exposed = set(
h.strip() for h in
resp.headers['access-control-expose-headers'].split(','))
expected_exposed = set(['cache-control', 'content-language',
'content-type', 'expires', 'last-modified',
'pragma', 'etag', 'x-timestamp',
'x-trans-id', 'x-container-meta-color'])
self.assertEquals(expected_exposed, exposed)
def _gather_x_account_headers(self, controller_call, req, *connect_args,
**kwargs):
seen_headers = []
to_capture = ('X-Account-Partition', 'X-Account-Host',
'X-Account-Device')
def capture_headers(ipaddr, port, device, partition, method,
path, headers=None, query_string=None):
captured = {}
for header in to_capture:
captured[header] = headers.get(header)
seen_headers.append(captured)
with save_globals():
self.app.allow_account_management = True
set_http_connect(*connect_args, give_connect=capture_headers,
**kwargs)
resp = controller_call(req)
self.assertEqual(2, resp.status_int // 100) # sanity check
# don't care about the account HEAD, so throw away the
# first element
return sorted(seen_headers[1:],
key=lambda d: d['X-Account-Host'] or 'Z')
def test_PUT_x_account_headers_with_fewer_account_replicas(self):
self.app.account_ring.set_replicas(2)
req = Request.blank('/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.PUT, req,
200, 201, 201, 201) # HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000',
'X-Account-Partition': '1',
'X-Account-Device': 'sda'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '1',
'X-Account-Device': 'sdb'},
{'X-Account-Host': None,
'X-Account-Partition': None,
'X-Account-Device': None}
])
def test_PUT_x_account_headers_with_more_account_replicas(self):
self.app.account_ring.set_replicas(4)
req = Request.blank('/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.PUT, req,
200, 201, 201, 201) # HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Account-Partition': '1',
'X-Account-Device': 'sda,sdd'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '1',
'X-Account-Device': 'sdb'},
{'X-Account-Host': '10.0.0.2:1002',
'X-Account-Partition': '1',
'X-Account-Device': 'sdc'}
])
def test_DELETE_x_account_headers_with_fewer_account_replicas(self):
self.app.account_ring.set_replicas(2)
req = Request.blank('/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.DELETE, req,
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000',
'X-Account-Partition': '1',
'X-Account-Device': 'sda'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '1',
'X-Account-Device': 'sdb'},
{'X-Account-Host': None,
'X-Account-Partition': None,
'X-Account-Device': None}
])
def test_DELETE_x_account_headers_with_more_account_replicas(self):
self.app.account_ring.set_replicas(4)
req = Request.blank('/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.DELETE, req,
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Account-Partition': '1',
'X-Account-Device': 'sda,sdd'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '1',
'X-Account-Device': 'sdb'},
{'X-Account-Host': '10.0.0.2:1002',
'X-Account-Partition': '1',
'X-Account-Device': 'sdc'}
])
class TestAccountController(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing)
def assert_status_map(self, method, statuses, expected, env_expected=None):
with save_globals():
set_http_connect(*statuses)
req = Request.blank('/a', {})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
if env_expected:
self.assertEquals(res.environ['swift.account/a']['status'],
env_expected)
set_http_connect(*statuses)
req = Request.blank('/a/', {})
self.app.update_request(req)
res = method(req)
self.assertEquals(res.status_int, expected)
if env_expected:
self.assertEquals(res.environ['swift.account/a']['status'],
env_expected)
def test_OPTIONS(self):
with save_globals():
self.app.allow_account_management = False
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
for verb in 'OPTIONS GET POST HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEquals(len(resp.headers['Allow'].split(', ')), 4)
# Test a CORS OPTIONS request (i.e. including Origin and
# Access-Control-Request-Method headers)
self.app.allow_account_management = False
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
for verb in 'OPTIONS GET POST HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEquals(len(resp.headers['Allow'].split(', ')), 4)
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEquals(200, resp.status_int)
print resp.headers['Allow']
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEquals(len(resp.headers['Allow'].split(', ')), 6)
def test_GET(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
# GET returns after the first successful call to an Account Server
self.assert_status_map(controller.GET, (200,), 200, 200)
self.assert_status_map(controller.GET, (503, 200), 200, 200)
self.assert_status_map(controller.GET, (503, 503, 200), 200, 200)
self.assert_status_map(controller.GET, (204,), 204, 204)
self.assert_status_map(controller.GET, (503, 204), 204, 204)
self.assert_status_map(controller.GET, (503, 503, 204), 204, 204)
self.assert_status_map(controller.GET, (404, 200), 200, 200)
self.assert_status_map(controller.GET, (404, 404, 200), 200, 200)
self.assert_status_map(controller.GET, (404, 503, 204), 204, 204)
# If Account servers fail, if autocreate = False, return majority
# response
self.assert_status_map(controller.GET, (404, 404, 404), 404, 404)
self.assert_status_map(controller.GET, (404, 404, 503), 404, 404)
self.assert_status_map(controller.GET, (404, 503, 503), 503)
self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.GET, (404, 404, 404), 404, 404)
def test_GET_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assertFalse(self.app.account_autocreate)
# Repeat the test for autocreate = False and 404 by all
self.assert_status_map(controller.GET,
(404, 404, 404), 404)
self.assert_status_map(controller.GET,
(404, 503, 404), 404)
# When autocreate is True, if none of the nodes respond 2xx
# And quorum of the nodes responded 404,
# ALL nodes are asked to create the account
# If successful, the GET request is repeated.
controller.app.account_autocreate = True
self.assert_status_map(controller.GET,
(404, 404, 404), 204)
self.assert_status_map(controller.GET,
(404, 503, 404), 204)
# We always return 503 if no majority between 4xx, 3xx or 2xx found
self.assert_status_map(controller.GET,
(500, 500, 400), 503)
def test_HEAD(self):
# Same behaviour as GET
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.HEAD, (200,), 200, 200)
self.assert_status_map(controller.HEAD, (503, 200), 200, 200)
self.assert_status_map(controller.HEAD, (503, 503, 200), 200, 200)
self.assert_status_map(controller.HEAD, (204,), 204, 204)
self.assert_status_map(controller.HEAD, (503, 204), 204, 204)
self.assert_status_map(controller.HEAD, (204, 503, 503), 204, 204)
self.assert_status_map(controller.HEAD, (204,), 204, 204)
self.assert_status_map(controller.HEAD, (404, 404, 404), 404, 404)
self.assert_status_map(controller.HEAD, (404, 404, 200), 200, 200)
self.assert_status_map(controller.HEAD, (404, 200), 200, 200)
self.assert_status_map(controller.HEAD, (404, 404, 503), 404, 404)
self.assert_status_map(controller.HEAD, (404, 503, 503), 503)
self.assert_status_map(controller.HEAD, (404, 503, 204), 204, 204)
def test_HEAD_autocreate(self):
# Same behaviour as GET
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.HEAD,
(404, 404, 404), 404)
controller.app.account_autocreate = True
self.assert_status_map(controller.HEAD,
(404, 404, 404), 204)
self.assert_status_map(controller.HEAD,
(500, 404, 404), 204)
# We always return 503 if no majority between 4xx, 3xx or 2xx found
self.assert_status_map(controller.HEAD,
(500, 500, 400), 503)
def test_POST_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
# first test with autocreate being False
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.POST,
(404, 404, 404), 404)
# next turn it on and test account being created than updated
controller.app.account_autocreate = True
self.assert_status_map(controller.POST,
(404, 404, 404, 202, 202, 202, 201, 201, 201), 201)
# account_info PUT account POST account
self.assert_status_map(controller.POST,
(404, 404, 503, 201, 201, 503, 204, 204, 504), 204)
# what if create fails
self.assert_status_map(controller.POST,
(404, 404, 404, 403, 403, 403, 400, 400, 400), 400)
def test_connection_refused(self):
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = 1 # can't connect on this port
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
resp = controller.HEAD(req)
self.assertEquals(resp.status_int, 503)
def test_other_socket_error(self):
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values():
dev['ip'] = '127.0.0.1'
dev['port'] = -1 # invalid port number
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
resp = controller.HEAD(req)
self.assertEquals(resp.status_int, 503)
def test_response_get_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/a?format=json')
self.app.update_request(req)
res = controller.GET(req)
self.assert_('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/a?format=json')
self.app.update_request(req)
res = controller.HEAD(req)
res.body
self.assert_('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_PUT(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 405)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 201)
test_status_map((201, 201, 500), 201)
test_status_map((201, 500, 500), 503)
test_status_map((204, 500, 404), 503)
def test_PUT_max_account_name_length(self):
with save_globals():
self.app.allow_account_management = True
limit = MAX_ACCOUNT_NAME_LENGTH
controller = proxy_server.AccountController(self.app, '1' * limit)
self.assert_status_map(controller.PUT, (201, 201, 201), 201)
controller = proxy_server.AccountController(
self.app, '2' * (limit + 1))
self.assert_status_map(controller.PUT, (201, 201, 201), 400)
def test_PUT_connect_exceptions(self):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.PUT, (201, 201, -1), 201)
self.assert_status_map(controller.PUT, (201, -1, -1), 503)
self.assert_status_map(controller.PUT, (503, 503, -1), 503)
def test_PUT_metadata(self):
self.metadata_helper('PUT')
def test_POST_metadata(self):
self.metadata_helper('POST')
def metadata_helper(self, method):
for test_header, test_value in (
('X-Account-Meta-TestHeader', 'TestValue'),
('X-Account-Meta-TestHeader', ''),
('X-Remove-Account-Meta-TestHeader', 'anything')):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a':
find_header = test_header
find_value = test_value
if find_header.lower().startswith('x-remove-'):
find_header = \
find_header.lower().replace('-remove', '', 1)
find_value = ''
for k, v in headers.iteritems():
if k.lower() == find_header.lower() and \
v == find_value:
break
else:
test_errors.append('%s: %s not in %s' %
(find_header, find_value, headers))
with save_globals():
self.app.allow_account_management = True
controller = \
proxy_server.AccountController(self.app, 'a')
set_http_connect(201, 201, 201, give_connect=test_connect)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={test_header: test_value})
self.app.update_request(req)
getattr(controller, method)(req)
self.assertEquals(test_errors, [])
def test_PUT_bad_metadata(self):
self.bad_metadata_helper('PUT')
def test_POST_bad_metadata(self):
self.bad_metadata_helper('POST')
def bad_metadata_helper(self, method):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'a')
set_http_connect(200, 201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-' +
('a' * MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-' +
('a' * (MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-Too-Long':
'a' * MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-Too-Long':
'a' * (MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
for x in xrange(MAX_META_COUNT):
headers['X-Account-Meta-%d' % x] = 'v'
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {}
for x in xrange(MAX_META_COUNT + 1):
headers['X-Account-Meta-%d' % x] = 'v'
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
header_value = 'a' * MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH:
size += 4 + MAX_META_VALUE_LENGTH
headers['X-Account-Meta-%04d' % x] = header_value
x += 1
if MAX_META_OVERALL_SIZE - size > 1:
headers['X-Account-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Account-Meta-a'] = \
'a' * (MAX_META_OVERALL_SIZE - size)
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEquals(resp.status_int, 400)
def test_DELETE(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a', {'REQUEST_METHOD': 'DELETE'})
req.content_length = 0
self.app.update_request(req)
res = controller.DELETE(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 405)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 201)
test_status_map((201, 201, 500), 201)
test_status_map((201, 500, 500), 503)
test_status_map((204, 500, 404), 503)
def test_DELETE_with_query_string(self):
# Extra safety in case someone typos a query string for an
# account-level DELETE request that was really meant to be caught by
# some middleware.
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/a?whoops', {'REQUEST_METHOD': 'DELETE'})
req.content_length = 0
self.app.update_request(req)
res = controller.DELETE(req)
expected = str(expected)
self.assertEquals(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 400)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 400)
test_status_map((201, 201, 500), 400)
test_status_map((201, 500, 500), 400)
test_status_map((204, 500, 404), 400)
class TestAccountControllerFakeGetResponse(unittest.TestCase):
"""
Test all the faked-out GET responses for accounts that don't exist. They
have to match the responses for empty accounts that really exist.
"""
def setUp(self):
self.app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
object_ring=FakeRing)
self.app.memcache = FakeMemcacheReturnsNone()
self.controller = proxy_server.AccountController(self.app, 'acc')
self.controller.app.account_autocreate = True
def test_GET_autocreate_accept_json(self):
with save_globals():
set_http_connect(404) # however many backends we ask, they all 404
req = Request.blank('/a', headers={'Accept': 'application/json'})
resp = self.controller.GET(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/json; charset=utf-8',
resp.headers['Content-Type'])
self.assertEqual("[]", resp.body)
def test_GET_autocreate_format_json(self):
with save_globals():
set_http_connect(404) # however many backends we ask, they all 404
req = Request.blank('/a?format=json')
resp = self.controller.GET(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/json; charset=utf-8',
resp.headers['Content-Type'])
self.assertEqual("[]", resp.body)
def test_GET_autocreate_accept_xml(self):
with save_globals():
set_http_connect(404) # however many backends we ask, they all 404
req = Request.blank('/a', headers={"Accept": "text/xml"})
resp = self.controller.GET(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('text/xml; charset=utf-8',
resp.headers['Content-Type'])
empty_xml_listing = ('<?xml version="1.0" encoding="UTF-8"?>\n'
'<account name="acc">\n</account>')
self.assertEqual(empty_xml_listing, resp.body)
def test_GET_autocreate_format_xml(self):
with save_globals():
set_http_connect(404) # however many backends we ask, they all 404
req = Request.blank('/a?format=xml')
resp = self.controller.GET(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/xml; charset=utf-8',
resp.headers['Content-Type'])
empty_xml_listing = ('<?xml version="1.0" encoding="UTF-8"?>\n'
'<account name="acc">\n</account>')
self.assertEqual(empty_xml_listing, resp.body)
class FakeObjectController(object):
def __init__(self):
self.app = self
self.logger = self
self.account_name = 'a'
self.container_name = 'c'
self.object_name = 'o'
self.trans_id = 'tx1'
self.object_ring = FakeRing()
self.node_timeout = 1
self.rate_limit_after_segment = 3
self.rate_limit_segments_per_sec = 2
self.GETorHEAD_base_args = []
def exception(self, *args):
self.exception_args = args
self.exception_info = sys.exc_info()
def GETorHEAD_base(self, *args):
self.GETorHEAD_base_args.append(args)
req = args[0]
path = args[4]
body = data = path[-1] * int(path[-1])
if req.range:
r = req.range.ranges_for_length(len(data))
if r:
(start, stop) = r[0]
body = data[start:stop]
resp = Response(app_iter=iter(body))
return resp
def iter_nodes(self, ring, partition):
for node in ring.get_part_nodes(partition):
yield node
for node in ring.get_more_nodes(partition):
yield node
def sort_nodes(self, nodes):
return nodes
def set_node_timing(self, node, timing):
return
class Stub(object):
pass
class TestSegmentedIterable(unittest.TestCase):
def setUp(self):
self.controller = FakeObjectController()
def test_load_next_segment_unexpected_error(self):
# Iterator value isn't a dict
self.assertRaises(Exception,
SegmentedIterable(self.controller, None,
[None])._load_next_segment)
self.assert_(self.controller.exception_args[0].startswith(
'ERROR: While processing manifest'))
def test_load_next_segment_with_no_segments(self):
self.assertRaises(StopIteration,
SegmentedIterable(self.controller, 'lc',
[])._load_next_segment)
def test_load_next_segment_with_one_segment(self):
segit = SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}])
segit._load_next_segment()
self.assertEquals(
self.controller.GETorHEAD_base_args[0][4], '/a/lc/o1')
data = ''.join(segit.segment_iter)
self.assertEquals(data, '1')
def test_load_next_segment_with_two_segments(self):
segit = SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}, {'name': 'o2'}])
segit._load_next_segment()
self.assertEquals(
self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o1')
data = ''.join(segit.segment_iter)
self.assertEquals(data, '1')
segit._load_next_segment()
self.assertEquals(
self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2')
data = ''.join(segit.segment_iter)
self.assertEquals(data, '22')
def test_load_next_segment_rate_limiting(self):
sleep_calls = []
def _stub_sleep(sleepy_time):
sleep_calls.append(sleepy_time)
orig_sleep = swift.proxy.controllers.obj.sleep
try:
swift.proxy.controllers.obj.sleep = _stub_sleep
segit = SegmentedIterable(
self.controller, 'lc', [
{'name': 'o1'}, {'name': 'o2'}, {'name': 'o3'},
{'name': 'o4'}, {'name': 'o5'}])
# rate_limit_after_segment == 3, so the first 3 segments should
# invoke no sleeping.
for _ in xrange(3):
segit._load_next_segment()
self.assertEquals([], sleep_calls)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o3')
# Loading of next (4th) segment starts rate-limiting.
segit._load_next_segment()
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o4')
sleep_calls = []
segit._load_next_segment()
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o5')
finally:
swift.proxy.controllers.obj.sleep = orig_sleep
def test_load_next_segment_range_req_rate_limiting(self):
sleep_calls = []
def _stub_sleep(sleepy_time):
sleep_calls.append(sleepy_time)
orig_sleep = swift.proxy.controllers.obj.sleep
try:
swift.proxy.controllers.obj.sleep = _stub_sleep
segit = SegmentedIterable(
self.controller, 'lc', [
{'name': 'o0', 'bytes': 5}, {'name': 'o1', 'bytes': 5},
{'name': 'o2', 'bytes': 1}, {'name': 'o3'}, {'name': 'o4'},
{'name': 'o5'}, {'name': 'o6'}])
# this tests for a range request which skips over the whole first
# segment, after that 3 segments will be read in because the
# rate_limit_after_segment == 3, then sleeping starts
segit_iter = segit.app_iter_range(10, None)
segit_iter.next()
for _ in xrange(2):
# this is set to 2 instead of 3 because o2 was loaded after
# o0 and o1 were skipped.
segit._load_next_segment()
self.assertEquals([], sleep_calls)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o4')
# Loading of next (5th) segment starts rate-limiting.
segit._load_next_segment()
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o5')
sleep_calls = []
segit._load_next_segment()
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4],
'/a/lc/o6')
finally:
swift.proxy.controllers.obj.sleep = orig_sleep
def test_load_next_segment_with_two_segments_skip_first(self):
segit = SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}, {'name': 'o2'}])
segit.ratelimit_index = 0
segit.listing.next()
segit._load_next_segment()
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2')
data = ''.join(segit.segment_iter)
self.assertEquals(data, '22')
def test_load_next_segment_with_seek(self):
segit = SegmentedIterable(self.controller, 'lc',
[{'name': 'o1', 'bytes': 1},
{'name': 'o2', 'bytes': 2}])
segit.ratelimit_index = 0
segit.listing.next()
segit.seek = 1
segit._load_next_segment()
self.assertEquals(self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2')
self.assertEquals(str(self.controller.GETorHEAD_base_args[-1][0].range),
'bytes=1-')
data = ''.join(segit.segment_iter)
self.assertEquals(data, '2')
def test_fetching_only_what_you_need(self):
segit = SegmentedIterable(self.controller, 'lc',
[{'name': 'o7', 'bytes': 7},
{'name': 'o8', 'bytes': 8},
{'name': 'o9', 'bytes': 9}])
body = ''.join(segit.app_iter_range(10, 20))
self.assertEqual('8888899999', body)
GoH_args = self.controller.GETorHEAD_base_args
self.assertEquals(2, len(GoH_args))
# Either one is fine, as they both indicate "from byte 3 to (the last)
# byte 8".
self.assert_(str(GoH_args[0][0].range) in ['bytes=3-', 'bytes=3-8'])
# This one must ask only for the bytes it needs; otherwise we waste
# bandwidth pulling bytes from the object server and then throwing
# them out
self.assertEquals(str(GoH_args[1][0].range), 'bytes=0-4')
def test_load_next_segment_with_get_error(self):
def local_GETorHEAD_base(*args):
return HTTPNotFound()
self.controller.GETorHEAD_base = local_GETorHEAD_base
self.assertRaises(Exception,
SegmentedIterable(self.controller, 'lc',
[{'name': 'o1'}])._load_next_segment)
self.assert_(self.controller.exception_args[0].startswith(
'ERROR: While processing manifest'))
self.assertEquals(str(self.controller.exception_info[1]),
'Could not load object segment /a/lc/o1: 404')
def test_iter_unexpected_error(self):
# Iterator value isn't a dict
self.assertRaises(Exception, ''.join,
SegmentedIterable(self.controller, None, [None]))
self.assert_(self.controller.exception_args[0].startswith(
'ERROR: While processing manifest'))
def test_iter_with_no_segments(self):
segit = SegmentedIterable(self.controller, 'lc', [])
self.assertEquals(''.join(segit), '')
def test_iter_with_one_segment(self):
segit = SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}])
segit.response = Stub()
self.assertEquals(''.join(segit), '1')
def test_iter_with_two_segments(self):
segit = SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}, {'name': 'o2'}])
segit.response = Stub()
self.assertEquals(''.join(segit), '122')
def test_iter_with_get_error(self):
def local_GETorHEAD_base(*args):
return HTTPNotFound()
self.controller.GETorHEAD_base = local_GETorHEAD_base
self.assertRaises(Exception, ''.join,
SegmentedIterable(self.controller, 'lc', [{'name':
'o1'}]))
self.assert_(self.controller.exception_args[0].startswith(
'ERROR: While processing manifest'))
self.assertEquals(str(self.controller.exception_info[1]),
'Could not load object segment /a/lc/o1: 404')
def test_app_iter_range_unexpected_error(self):
# Iterator value isn't a dict
self.assertRaises(Exception,
SegmentedIterable(self.controller, None,
[None]).app_iter_range(None,
None).next)
self.assert_(self.controller.exception_args[0].startswith(
'ERROR: While processing manifest'))
def test_app_iter_range_with_no_segments(self):
self.assertEquals(''.join(SegmentedIterable(
self.controller, 'lc', []).app_iter_range(None, None)), '')
self.assertEquals(''.join(SegmentedIterable(
self.controller, 'lc', []).app_iter_range(3, None)), '')
self.assertEquals(''.join(SegmentedIterable(
self.controller, 'lc', []).app_iter_range(3, 5)), '')
self.assertEquals(''.join(SegmentedIterable(
self.controller, 'lc', []).app_iter_range(None, 5)), '')
def test_app_iter_range_with_one_segment(self):
listing = [{'name': 'o1', 'bytes': 1}]
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, None)), '1')
segit = SegmentedIterable(self.controller, 'lc', listing)
self.assertEquals(''.join(segit.app_iter_range(3, None)), '')
segit = SegmentedIterable(self.controller, 'lc', listing)
self.assertEquals(''.join(segit.app_iter_range(3, 5)), '')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, 5)), '1')
def test_app_iter_range_with_two_segments(self):
listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2}]
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, None)), '122')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(1, None)), '22')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(1, 5)), '22')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, 2)), '12')
def test_app_iter_range_with_many_segments(self):
listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2},
{'name': 'o3', 'bytes': 3}, {'name': 'o4', 'bytes': 4},
{'name': 'o5', 'bytes': 5}]
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, None)),
'122333444455555')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(3, None)),
'333444455555')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(5, None)), '3444455555')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, 6)), '122333')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(None, 7)), '1223334')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(3, 7)), '3334')
segit = SegmentedIterable(self.controller, 'lc', listing)
segit.response = Stub()
self.assertEquals(''.join(segit.app_iter_range(5, 7)), '34')
if __name__ == '__main__':
setup()
try:
unittest.main()
finally:
teardown()
|
{
"content_hash": "f4eeae09dcbb80ccf7a0e9a5c5803e22",
"timestamp": "",
"source": "github",
"line_count": 6500,
"max_line_length": 99,
"avg_line_length": 46.21738461538462,
"alnum_prop": 0.5008538245681778,
"repo_name": "orion/swift-config",
"id": "b07166500889164a310da73f63269ceae180f92c",
"size": "301003",
"binary": false,
"copies": "2",
"ref": "refs/heads/dynamic-pipeline",
"path": "test/unit/proxy/test_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15048"
},
{
"name": "Python",
"bytes": "3063773"
},
{
"name": "Shell",
"bytes": "685"
}
],
"symlink_target": ""
}
|
"""Tests for Adam."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.xla.experimental.xla_sharding import xla_sharding
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adam
def adam_update_numpy(param,
g_t,
t,
m,
v,
alpha=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8):
alpha_t = alpha * np.sqrt(1 - beta2**t) / (1 - beta1**t)
m_t = beta1 * m + (1 - beta1) * g_t
v_t = beta2 * v + (1 - beta2) * g_t * g_t
param_t = param - alpha_t * m_t / (np.sqrt(v_t) + epsilon)
return param_t, m_t, v_t
class AdamOptimizerTest(test.TestCase):
def doTestSparse(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
if use_resource:
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
else:
var0 = variables.RefVariable(var0_np)
var1 = variables.RefVariable(var1_np)
grads0_np_indices = np.array([0, 1], dtype=np.int32)
grads0 = ops.IndexedSlices(
constant_op.constant(grads0_np),
constant_op.constant(grads0_np_indices), constant_op.constant([2]))
grads1_np_indices = np.array([0, 1], dtype=np.int32)
grads1 = ops.IndexedSlices(
constant_op.constant(grads1_np),
constant_op.constant(grads1_np_indices), constant_op.constant([2]))
opt = adam.AdamOptimizer()
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Adam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, self.evaluate(beta1_power))
self.assertAllCloseAccordingToType(0.999**t,
self.evaluate(beta2_power))
update.run()
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
def testSparse(self):
with ops.Graph().as_default():
self.doTestSparse(use_resource=False)
def testResourceSparse(self):
with ops.Graph().as_default():
self.doTestSparse(use_resource=True)
def testSparseDevicePlacement(self):
with ops.Graph().as_default():
for index_dtype in [dtypes.int32, dtypes.int64]:
with self.cached_session(force_gpu=test.is_gpu_available()):
# If a GPU is available, tests that all optimizer ops can be placed on
# it (i.e. they have GPU kernels).
var = variables.Variable([[1.0], [2.0]])
indices = constant_op.constant([0, 1], dtype=index_dtype)
gathered_sum = math_ops.reduce_sum(array_ops.gather(var, indices))
optimizer = adam.AdamOptimizer(3.0)
minimize_op = optimizer.minimize(gathered_sum)
self.evaluate(variables.global_variables_initializer())
minimize_op.run()
def testSparseRepeatedIndices(self):
with ops.Graph().as_default():
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
repeated_index_update_var = variables.Variable(
[[1.0], [2.0]], dtype=dtype)
aggregated_update_var = variables.Variable(
[[1.0], [2.0]], dtype=dtype)
grad_repeated_index = ops.IndexedSlices(
constant_op.constant(
[0.1, 0.1], shape=[2, 1], dtype=dtype),
constant_op.constant([1, 1]),
constant_op.constant([2, 1]))
grad_aggregated = ops.IndexedSlices(
constant_op.constant(
[0.2], shape=[1, 1], dtype=dtype),
constant_op.constant([1]),
constant_op.constant([2, 1]))
repeated_update = adam.AdamOptimizer().apply_gradients(
[(grad_repeated_index, repeated_index_update_var)])
aggregated_update = adam.AdamOptimizer().apply_gradients(
[(grad_aggregated, aggregated_update_var)])
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(aggregated_update_var,
self.evaluate(repeated_index_update_var))
for _ in range(3):
repeated_update.run()
aggregated_update.run()
self.assertAllClose(aggregated_update_var,
self.evaluate(repeated_index_update_var))
def doTestBasic(self, use_resource=False, use_callable_params=False):
if context.executing_eagerly() and not use_resource:
self.skipTest(
"Skipping test with use_resource=False and executing eagerly.")
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
with self.session(graph=ops.Graph()):
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
if use_resource:
var0 = resource_variable_ops.ResourceVariable(
var0_np, name="var0_%d" % i)
var1 = resource_variable_ops.ResourceVariable(
var1_np, name="var1_%d" % i)
else:
var0 = variables.RefVariable(var0_np)
var1 = variables.RefVariable(var1_np)
grads0 = constant_op.constant(grads0_np)
grads1 = constant_op.constant(grads1_np)
learning_rate = lambda: 0.001
beta1 = lambda: 0.9
beta2 = lambda: 0.999
epsilon = lambda: 1e-8
if not use_callable_params:
learning_rate = learning_rate()
beta1 = beta1()
beta2 = beta2()
epsilon = epsilon()
opt = adam.AdamOptimizer(learning_rate=learning_rate)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
opt_variables = opt.variables()
beta1_power, beta2_power = opt._get_beta_accumulators()
self.assertTrue(beta1_power is not None)
self.assertTrue(beta2_power is not None)
self.assertIn(beta1_power, opt_variables)
self.assertIn(beta2_power, opt_variables)
# Ensure that non-slot variables are the same type as the requested
# variables.
self.assertEqual(
use_resource,
resource_variable_ops.is_resource_variable(beta1_power))
self.assertEqual(
use_resource,
resource_variable_ops.is_resource_variable(beta2_power))
if not context.executing_eagerly():
with ops.Graph().as_default():
# Shouldn't return non-slot variables from other graphs.
self.assertEqual(0, len(opt.variables()))
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Adam
for t in range(1, 4):
if not context.executing_eagerly():
self.evaluate(update)
elif t > 1:
opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.assertAllCloseAccordingToType(0.9**(t + 1),
self.evaluate(beta1_power))
self.assertAllCloseAccordingToType(0.999**(t + 1),
self.evaluate(beta2_power))
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
if use_resource:
self.assertEqual("var0_%d/Adam:0" % (i,),
opt.get_slot(var=var0, name="m").name)
def testBasic(self):
with self.cached_session():
self.doTestBasic(use_resource=False)
@test_util.run_in_graph_and_eager_modes
@test_util.disable_tfrt("b/168527439: invalid runtime fallback "
"resource variable reference on GPU.")
def testResourceBasic(self):
self.doTestBasic(use_resource=True)
@test_util.disable_tfrt("b/153089059: cannot create half tensor on GPU.")
def testBasicCallableParams(self):
with context.eager_mode():
self.doTestBasic(use_resource=True, use_callable_params=True)
def testTensorLearningRate(self):
with ops.Graph().as_default():
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
var0 = variables.Variable(var0_np)
var1 = variables.Variable(var1_np)
grads0 = constant_op.constant(grads0_np)
grads1 = constant_op.constant(grads1_np)
opt = adam.AdamOptimizer(constant_op.constant(0.001))
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Adam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t,
self.evaluate(beta1_power))
self.assertAllCloseAccordingToType(0.999**t,
self.evaluate(beta2_power))
update.run()
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
def testSharing(self):
with ops.Graph().as_default():
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
var0 = variables.Variable(var0_np)
var1 = variables.Variable(var1_np)
grads0 = constant_op.constant(grads0_np)
grads1 = constant_op.constant(grads1_np)
opt = adam.AdamOptimizer()
update1 = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
update2 = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(variables.global_variables_initializer())
beta1_power, beta2_power = opt._get_beta_accumulators()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
# Run 3 steps of intertwined Adam1 and Adam2.
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t,
self.evaluate(beta1_power))
self.assertAllCloseAccordingToType(0.999**t,
self.evaluate(beta2_power))
if t % 2 == 0:
update1.run()
else:
update2.run()
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
@test_util.disable_tfrt("b/168527439: invalid runtime fallback "
"resource variable reference on GPU.")
def testTwoSessions(self):
optimizer = adam.AdamOptimizer()
with context.eager_mode():
var0 = variables.Variable(np.array([1.0, 2.0]), name="v0")
grads0 = constant_op.constant(np.array([0.1, 0.1]))
optimizer.apply_gradients([(grads0, var0)])
g = ops.Graph()
with g.as_default():
with session.Session():
var0 = variables.Variable(np.array([1.0, 2.0]), name="v0")
grads0 = constant_op.constant(np.array([0.1, 0.1]))
optimizer.apply_gradients([(grads0, var0)])
gg = ops.Graph()
with gg.as_default():
with session.Session():
var0 = variables.Variable(np.array([1.0, 2.0]), name="v0")
grads0 = constant_op.constant(np.array([0.1, 0.1]))
# If the optimizer saves any state not keyed by graph the following line
# fails.
optimizer.apply_gradients([(grads0, var0)])
@test_util.disable_tfrt("b/168527439: invalid runtime fallback "
"resource variable reference on GPU.")
def testSlotsUniqueEager(self):
with context.eager_mode():
v1 = resource_variable_ops.ResourceVariable(1.)
v2 = resource_variable_ops.ResourceVariable(1.)
opt = adam.AdamOptimizer(1.)
opt.minimize(lambda: v1 + v2)
# There should be two non-slot variables, and two unique slot variables
# for v1 and v2 respectively.
self.assertEqual(6, len({id(v) for v in opt.variables()}))
@test_util.deprecated_graph_mode_only
def testXlaSharding(self):
dtype = dtypes.float32
with self.session(graph=ops.Graph()):
# Initialize variables for numpy implementation.
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
var0 = resource_variable_ops.ResourceVariable(var0_np, name="var0")
var1 = resource_variable_ops.ResourceVariable(var1_np, name="var1")
var0, var1 = [
xla_sharding.mesh_split(
v, np.array([0, 1]), [0], use_sharding_op=False)
for v in (var0, var1)
]
grads0 = constant_op.constant(grads0_np)
grads1 = constant_op.constant(grads1_np)
learning_rate = lambda: 0.001
opt = adam.AdamOptimizer(learning_rate=learning_rate)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
self.evaluate(variables.global_variables_initializer())
self.evaluate(update)
# The beta accumulators are not sharded.
beta1_power, beta2_power = opt._get_beta_accumulators()
self.assertIsNone(xla_sharding.get_tensor_sharding(beta1_power))
self.assertIsNone(xla_sharding.get_tensor_sharding(beta2_power))
# Variables and slots are sharded.
for v in (var0, var1):
self.assertIsNotNone(xla_sharding.get_tensor_sharding(v))
for slot_name in ("m", "v"):
slot = opt.get_slot(v, slot_name)
self.assertIsNotNone(xla_sharding.get_tensor_sharding(slot))
if __name__ == "__main__":
test.main()
|
{
"content_hash": "043a0f2edb2f18f2863a20f6ac196ed8",
"timestamp": "",
"source": "github",
"line_count": 408,
"max_line_length": 80,
"avg_line_length": 43.06617647058823,
"alnum_prop": 0.6072505833475613,
"repo_name": "sarvex/tensorflow",
"id": "06cbc386e532419f8b4cebe1b0172b4252aff68d",
"size": "18260",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "tensorflow/python/training/adam_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "148184"
},
{
"name": "C++",
"bytes": "6224499"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "650478"
},
{
"name": "Java",
"bytes": "53519"
},
{
"name": "JavaScript",
"bytes": "6659"
},
{
"name": "Jupyter Notebook",
"bytes": "777935"
},
{
"name": "Objective-C",
"bytes": "1288"
},
{
"name": "Protocol Buffer",
"bytes": "61743"
},
{
"name": "Python",
"bytes": "3474762"
},
{
"name": "Shell",
"bytes": "45640"
},
{
"name": "TypeScript",
"bytes": "283668"
}
],
"symlink_target": ""
}
|
from pathlib import Path
from genopandas import GenomicDataFrame
import pytest
from pyim.model import Insertion, CisSite
from pyim.vendor.frozendict import frozendict
@pytest.fixture(scope='session')
def gtf_path():
"""Path to example GTF file."""
return Path(str(pytest.helpers.data_path('reference.gtf.gz')))
@pytest.fixture(scope='session')
def genes(gtf_path):
"""Genes from example GTF file."""
genes = GenomicDataFrame.from_gtf(
gtf_path, filter_=lambda rec: rec['feature'] == 'gene')
genes['strand'] = genes['strand'].map({'+': 1, '-': -1})
return genes
@pytest.fixture(scope='session')
def insertions():
"""Example insertions."""
# Trp53bp2 location: 1: 182,409,172-182,462,432.
# Myh9 location: 15: 77,760,587-77,842,175.
return [
# 1000 bp upstream of Trp53bp2.
Insertion(id='INS1', chromosome='1', position=182408171,
strand=1, support=2, sample='s1', metadata=frozendict()),
# 2000 bp downstream of Myh9.
Insertion(id='INS2', chromosome='15', position=77758586,
strand=1, support=2, sample='s1', metadata=frozendict()),
# Different chromosome.
Insertion(id='INS3', chromosome='4', position=77843175,
strand=1, support=2, sample='s1', metadata=frozendict())
] # yapf: disable
@pytest.fixture(scope='session')
def cis_insertions():
"""Example insertions with CIS annotations."""
return [
# 1000 bp upstream of Trp53bp2.
Insertion(id='INS1', chromosome='1', position=182408172,
strand=1, support=2, sample='s1',
metadata=frozendict({'cis_id': 'CIS1'})),
# Different chromosome.
Insertion(id='INS2', chromosome='4', position=77843175,
strand=1, support=2, sample='s1',
metadata=frozendict({'cis_id': 'CIS2'}))
] # yapf: disable
@pytest.fixture(scope='session')
def cis_sites():
"""Example CIS sites."""
return [
CisSite(id='CIS1', chromosome='1', position=182408172,
strand=1, metadata=frozendict()),
CisSite(id='CIS2', chromosome='4', position=132408091,
strand=1, metadata=frozendict())
] # yapf: disable
|
{
"content_hash": "15624b3d98f84a289c03c5a3dad47a53",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 75,
"avg_line_length": 32.3,
"alnum_prop": 0.6130030959752322,
"repo_name": "jrderuiter/pyim",
"id": "b6eb9f19806c4ce091520b67fa1f7232bfb592ea",
"size": "2261",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/pyim/annotate/annotators/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2585"
},
{
"name": "Python",
"bytes": "97613"
},
{
"name": "Shell",
"bytes": "69"
}
],
"symlink_target": ""
}
|
"""
Messaging module
"""
module = "msg"
if deployment_settings.has_module(module):
# Settings
resourcename = "setting"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("outgoing_sms_handler"),
# Moved to deployment_settings
#Field("default_country_code", "integer", default=44),
migrate=migrate)
table.outgoing_sms_handler.requires = IS_IN_SET(["Modem", "Gateway", "Tropo"], zero=None)
#------------------------------------------------------------------------
resourcename = "email_settings"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("inbound_mail_server"),
Field("inbound_mail_type"),
Field("inbound_mail_ssl", "boolean"),
Field("inbound_mail_port", "integer"),
Field("inbound_mail_username"),
Field("inbound_mail_password"),
Field("inbound_mail_delete", "boolean"),
# Also needs to be used by Auth (order issues), DB calls are overheads
# - as easy for admin to edit source in 000_config.py as to edit DB (although an admin panel can be nice)
#Field("outbound_mail_server"),
#Field("outbound_mail_from"),
migrate=migrate)
table.inbound_mail_type.requires = IS_IN_SET(["imap", "pop3"], zero=None)
#------------------------------------------------------------------------
resourcename = "modem_settings"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
#Field("account_name"), # Nametag to remember account - To be used later
Field("modem_port"),
Field("modem_baud", "integer", default = 115200),
Field("enabled", "boolean", default = False),
#Field("preference", "integer", default = 5), To be used later
migrate=migrate)
#------------------------------------------------------------------------
resourcename = "gateway_settings"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("url",
default = "https://api.clickatell.com/http/sendmsg"),
Field("parameters",
default="user=yourusername&password=yourpassword&api_id=yourapiid"),
Field("message_variable", "string", default = "text"),
Field("to_variable", "string", default = "to"),
Field("enabled", "boolean", default = False),
#Field("preference", "integer", default = 5), To be used later
migrate=migrate)
#------------------------------------------------------------------------
resourcename = "tropo_settings"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("token_messaging"),
#Field("token_voice"),
migrate=migrate)
#------------------------------------------------------------------------
resourcename = "twitter_settings"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("pin"),
Field("oauth_key"),
Field("oauth_secret"),
Field("twitter_account"),
migrate=migrate)
table.oauth_key.writable = False
table.oauth_secret.writable = False
### comment these 2 when debugging
table.oauth_key.readable = False
table.oauth_secret.readable = False
table.twitter_account.writable = False
def twitter_settings_onvalidation(form):
""" Complete oauth: take tokens from session + pin from form, and do the 2nd API call to Twitter """
if form.vars.pin and session.s3.twitter_request_key and session.s3.twitter_request_secret:
try:
import tweepy
except:
raise HTTP(501, body=T("Can't import tweepy"))
oauth = tweepy.OAuthHandler(deployment_settings.twitter.oauth_consumer_key,
deployment_settings.twitter.oauth_consumer_secret)
oauth.set_request_token(session.s3.twitter_request_key, session.s3.twitter_request_secret)
try:
oauth.get_access_token(form.vars.pin)
form.vars.oauth_key = oauth.access_token.key
form.vars.oauth_secret = oauth.access_token.secret
twitter = tweepy.API(oauth)
form.vars.twitter_account = twitter.me().screen_name
form.vars.pin = "" # we won't need it anymore
return
except tweepy.TweepError:
session.error = T("Settings were reset because authenticating with Twitter failed")
# Either user asked to reset, or error - clear everything
for k in ["oauth_key", "oauth_secret", "twitter_account"]:
form.vars[k] = None
for k in ["twitter_request_key", "twitter_request_secret"]:
session.s3[k] = ""
s3xrc.model.configure(table, onvalidation=twitter_settings_onvalidation)
#------------------------------------------------------------------------
# Message priority
msg_priority_opts = {
3:T("High"),
2:T("Medium"),
1:T("Low")
}
#------------------------------------------------------------------------
# Message Log - This is where all the messages / logs go into
resourcename = "log"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
super_link(db.pr_pentity), # pe_id, Sender
Field("sender"), # The name to go out incase of the email, if set used
Field("fromaddress"), # From address if set changes sender to this
Field("recipient"),
Field("subject", length=78),
Field("message", "text"),
#Field("attachment", "upload", autodelete = True), #TODO
Field("verified", "boolean", default = False),
Field("verified_comments", "text"),
Field("actionable", "boolean", default = True),
Field("actioned", "boolean", default = False),
Field("actioned_comments", "text"),
# Hide until actually wired-up for something
#Field("priority", "integer", default = 1),
Field("inbound", "boolean", default = False),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
#table.priority.requires = IS_NULL_OR(IS_IN_SET(msg_priority_opts))
#table.priority.label = T("Priority")
table.inbound.label = T("Direction")
table.inbound.represent = lambda direction: (direction and ["In"] or ["Out"])[0]
#@ToDo More Labels for i18n
s3xrc.model.configure(table,
list_fields=["id",
"inbound",
"pe_id",
"fromaddress",
"recipient",
"subject",
"message",
"verified",
#"verified_comments",
"actionable",
"actioned",
#"actioned_comments",
#"priority"
])
# Reusable Message ID
message_id = S3ReusableField("message_id", db.msg_log,
requires = IS_NULL_OR(IS_ONE_OF(db, "msg_log.id")),
# FIXME: Subject works for Email but not SMS
represent = lambda id: db(db.msg_log.id == id).select(db.msg_log.subject,
limitby=(0, 1)).first().subject,
ondelete = "RESTRICT"
)
#------------------------------------------------------------------------
# Message Tag - Used to tag a message to a resource
resourcename = "tag"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
message_id(),
Field("resource"),
Field("record_uuid", # null in this field implies subscription to the entire resource
type=s3uuid,
length=128),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
s3xrc.model.configure(table,
list_fields=[ "id",
"message_id",
"record_uuid",
"resource",
])
#------------------------------------------------------------------------
# Twitter Search Queries
resourcename = "twitter_search"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("search_query", length = 140),
migrate = migrate
)
#------------------------------------------------------------------------
# Twitter Search Results
resourcename = "twitter_search_results"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("tweet", length=140),
Field("posted_by"),
Field("posted_at"),
Field("twitter_search", db.msg_twitter_search),
migrate = migrate
)
#table.twitter_search.requires = IS_ONE_OF(db, "twitter_search.search_query")
#table.twitter_search.represent = lambda id: db(db.msg_twitter_search.id == id).select(db.msg_twitter_search.search_query, limitby = (0,1)).first().search_query
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(msg_twitter_search="twitter_search"))
s3xrc.model.configure(table,
list_fields=[ "id",
"tweet",
"posted_by",
"posted_at",
"twitter_search",
])
#------------------------------------------------------------------------
# The following was added to show only the supported messaging methods
msg_contact_method_opts = { # pr_contact_method dependency
1:T("Email"),
2:T("Mobile Phone"),
#3:T("XMPP"),
4:T("Twitter"),
}
# Channel - For inbound messages this tells which channel the message came in from.
resourcename = "channel"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
message_id(),
Field("pr_message_method", "integer",
requires = IS_IN_SET(msg_contact_method_opts, zero=None),
default = 1),
Field("log"),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
#------------------------------------------------------------------------
# Status
resourcename = "email_inbound_status"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("status"),
migrate=migrate)
# Valid message outbox statuses
msg_status_type_opts = {
1:T("Unsent"),
2:T("Sent"),
3:T("Draft"),
4:T("Invalid")
}
opt_msg_status = db.Table(None, "opt_msg_status",
Field("status", "integer", notnull=True,
requires = IS_IN_SET(msg_status_type_opts, zero=None),
default = 1,
label = T("Status"),
represent = lambda opt: msg_status_type_opts.get(opt, UNKNOWN_OPT)))
# Outbox - needs to be separate to Log since a single message sent needs different outbox entries for each recipient
resourcename = "outbox"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
message_id(),
super_link(db.pr_pentity), # pe_id, Person/Group to send the message out to
Field("address"), # If set used instead of picking up from pe_id
Field("pr_message_method", "integer",
requires = IS_IN_SET(msg_contact_method_opts, zero=None),
default = 1,
label = T("Contact Method"),
represent = lambda opt: msg_contact_method_opts.get(opt, UNKNOWN_OPT)),
opt_msg_status,
Field("system_generated", "boolean", default = False),
Field("log"),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(msg_log="message_id"))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
s3xrc.model.configure(table,
list_fields=[ "id",
"message_id",
"pe_id",
"status",
"log",
])
# Message Read Status - To replace Message Outbox #TODO
resourcename = "read_status"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
message_id(),
person_id(),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
s3xrc.model.configure(table,
list_fields=[ "id",
"message_id",
"person_id",
])
#------------------------------------------------------------------------
# Tropo Scratch pad for outbound messaging
resourcename = "tropo_scratch"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("row_id","integer"),
Field("message_id","integer"),
Field("recipient"),
Field("message"),
Field("network"),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
# SMS store for persistence and scratch pad for combining incoming xform chunks
resourcename = "xforms_store"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("sender", "string", length = 20),
Field("fileno", "integer"),
Field("totalno", "integer"),
Field("partno", "integer"),
Field("message", "string", length = 160),
migrate=migrate)
#------------------------------------------------------------------------
# CAP: Common Alerting Protocol
# http://docs.oasis-open.org/emergency/cap/v1.2/CAP-v1.2.html
# CAP alert Status Code (status)
cap_alert_status_code_opts = {
"Actual":T("Actionable by all targeted recipients"),
"Exercise":T("Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>"),
"System":T("For messages that support alert network internal functions"),
"Test":T("Technical testing only, all recipients disregard"),
"Draft":T("preliminary template or draft, not actionable in its current form"),
}
# CAP info Event Category (category)
cap_info_category_opts = {
"Geo":T("Geophysical (inc. landslide)"),
"Met":T("Meteorological (inc. flood)"),
"Safety":T("General emergency and public safety"),
"Security":T("Law enforcement, military, homeland and local/private security"),
"Rescue":T("Rescue and recovery"),
"Fire":T("Fire suppression and rescue"),
"Health":T("Medical and public health"),
"Env":T("Pollution and other environmental"),
"Transport":T("Public and private transportation"),
"Infra":T("Utility, telecommunication, other non-transport infrastructure"),
"CBRNE":T("Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack"),
"Other":T("Other events"),
}
# CAP info Response Type (responseType)
cap_info_responseType_opts = {
"Shelter":T("Take shelter in place or per <instruction>"),
"Evacuate":T("Relocate as instructed in the <instruction>"),
"Prepare":T("Make preparations per the <instruction>"),
"Execute":T("Execute a pre-planned activity identified in <instruction>"),
"Avoid":T("Avoid the subject event as per the <instruction>"),
"Monitor":T("Attend to information sources as described in <instruction>"),
"Assess":T("Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)"),
"AllClear":T("The subject event no longer poses a threat or concern and any follow on action is described in <instruction>"),
"None":T("No action recommended"),
}
# Reports
# Verified reports ready to be sent out as alerts or displayed on a map
msg_report_type_opts = {
"Shelter":T("Take shelter in place or per <instruction>"),
"Evacuate":T("Relocate as instructed in the <instruction>"),
"Prepare":T("Make preparations per the <instruction>"),
"Execute":T("Execute a pre-planned activity identified in <instruction>"),
"Avoid":T("Avoid the subject event as per the <instruction>"),
"Monitor":T("Attend to information sources as described in <instruction>"),
"Assess":T("Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)"),
"AllClear":T("The subject event no longer poses a threat or concern and any follow on action is described in <instruction>"),
"None":T("No action recommended"),
}
resourcename = "report"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
message_id(),
location_id(),
Field("image", "upload", autodelete = True),
Field("url", requires=IS_NULL_OR(IS_URL())),
migrate=migrate,
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
table.uuid.requires = IS_NOT_ONE_OF(db, "%s.uuid" % tablename)
#------------------------------------------------------------------------
def shn_msg_compose( redirect_module = "msg",
redirect_function = "compose",
redirect_vars = None,
title_name = "Send Message" ):
"""
Form to Compose a Message
@param redirect_module: Redirect to the specified module's url after login.
@param redirect_function: Redirect to the specified function
@param redirect_vars: Dict with vars to include in redirects
@param title_name: Title of the page
"""
resourcename1 = "log"
tablename1 = "msg" + "_" + resourcename1
table1 = db[tablename1]
resourcename2 = "outbox"
tablename2 = "msg" + "_" + resourcename2
table2 = db[tablename2]
if auth.is_logged_in() or auth.basic():
pass
else:
redirect(URL(r=request, c="default", f="user", args="login",
vars={"_next":URL(r=request, c=redirect_module, f=redirect_function, vars=redirect_vars)}))
# Model options
table1.sender.writable = table1.sender.readable = False
table1.fromaddress.writable = table1.fromaddress.readable = False
table1.pe_id.writable = table1.pe_id.readable = False
table1.verified.writable = table1.verified.readable = False
table1.verified_comments.writable = table1.verified_comments.readable = False
table1.actioned.writable = table1.actioned.readable = False
table1.actionable.writable = table1.actionable.readable = False
table1.actioned_comments.writable = table1.actioned_comments.readable = False
table1.subject.label = T("Subject")
table1.message.label = T("Message")
#table1.priority.label = T("Priority")
table2.pe_id.writable = table2.pe_id.readable = True
table2.pe_id.label = T("Recipients")
table2.pe_id.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Recipients"),
T("Please enter the first few letters of the Person/Group for the autocomplete.")))
def compose_onvalidation(form):
""" Set the sender and use msg.send_by_pe_id to route the message """
if not request.vars.pe_id:
session.error = T("Please enter the recipient")
redirect(URL(r=request,c=redirect_module, f=redirect_function, vars=redirect_vars))
sender_pe_id = db(db.pr_person.uuid == auth.user.person_uuid).select(db.pr_person.pe_id,
limitby=(0, 1)).first().pe_id
if msg.send_by_pe_id(request.vars.pe_id,
request.vars.subject,
request.vars.message,
sender_pe_id,
request.vars.pr_message_method):
session.flash = T("Check outbox for the message status")
redirect(URL(r=request, c=redirect_module, f=redirect_function, vars=redirect_vars))
else:
session.error = T("Error in message")
redirect(URL(r=request,c=redirect_module, f=redirect_function, vars=redirect_vars))
logform = crud.create(table1,
onvalidation = compose_onvalidation)
outboxform = crud.create(table2)
return dict(logform = logform, outboxform = outboxform, title = T(title_name))
|
{
"content_hash": "d05c27081f82c34c8be527e881c5e6c3",
"timestamp": "",
"source": "github",
"line_count": 486,
"max_line_length": 164,
"avg_line_length": 50.135802469135804,
"alnum_prop": 0.4814495608634983,
"repo_name": "ptressel/sahana-eden-madpub",
"id": "0842a6f931eb694efef4707b220175d539c66327",
"size": "24391",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "models/msg.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "14896489"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "14827014"
},
{
"name": "Shell",
"bytes": "1171"
}
],
"symlink_target": ""
}
|
CMarketRspInfoField = {
"ErrorID": "int",
"ErrorMsg": "string",
}
CMarketReqUserLoginField = {
"UserId": "string",
"UserPwd": "string",
"UserType": "string",
"MacAddress": "string",
"ComputerName": "string",
"SoftwareName": "string",
"SoftwareVersion": "string",
"AuthorCode": "string",
"ErrorDescription": "string",
}
CMarketRspUserLoginField = {
"UserName": "string",
"UserPwd": "string",
"UserType": "string",
}
CMarketReqUserLogoutField = {
"BrokerID": "string",
"UserId": "string",
"ErrorDescription": "string",
}
CMarketReqMarketDataField = {
"MarketType": "char",
"SubscMode": "char",
"MarketCount": "int",
"MarketTrcode[MAX_SUB_COUNT]": "string",
"ErrorDescription": "string",
}
CMarketRspMarketDataField = {
"ExchangeCode": "string",
"TreatyCode": "string",
"BuyPrice": "string",
"BuyNumber": "string",
"SalePrice": "string",
"SaleNumber": "string",
"CurrPrice": "string",
"CurrNumber": "string",
"High": "string",
"Low": "string",
"Open": "string",
"IntradaySettlePrice": "string",
"Close": "string",
"Time": "string",
"FilledNum": "string",
"HoldNum": "string",
"BuyPrice2": "string",
"BuyPrice3": "string",
"BuyPrice4": "string",
"BuyPrice5": "string",
"BuyNumber2": "string",
"BuyNumber3": "string",
"BuyNumber4": "string",
"BuyNumber5": "string",
"SalePrice2": "string",
"SalePrice3": "string",
"SalePrice4": "string",
"SalePrice5": "string",
"SaleNumber2": "string",
"SaleNumber3": "string",
"SaleNumber4": "string",
"SaleNumber5": "string",
"HideBuyPrice": "string",
"HideBuyNumber": "string",
"HideSalePrice": "string",
"HideSaleNumber": "string",
"LimitDownPrice": "string",
"LimitUpPrice": "string",
"TradeDay": "string",
"BuyPrice6": "string",
"BuyPrice7": "string",
"BuyPrice8": "string",
"BuyPrice9": "string",
"BuyPrice10": "string",
"BuyNumber6": "string",
"BuyNumber7": "string",
"BuyNumber8": "string",
"BuyNumber9": "string",
"BuyNumber10": "string",
"SalePrice6": "string",
"SalePrice7": "string",
"SalePrice8": "string",
"SalePrice9": "string",
"SalePrice10": "string",
"SaleNumber6": "string",
"SaleNumber7": "string",
"SaleNumber8": "string",
"SaleNumber9": "string",
"SaleNumber10": "string",
"TradeFlag": "string",
"DataTimestamp": "string",
"DataSourceId": "string",
"CanSellVol": "string",
"QuoteType": "string",
"AggressorSide": "string",
"PreSettlementPrice": "string",
}
CMarketReqBrokerDataField = {
"ContCode": "string",
"ErrorDescription": "string",
}
CMarketRspBrokerDataField = {
"BrokerData": "string",
}
CMarketRspTradeDateField = {
"TradeDate": "string",
"TradeProduct": "string",
}
|
{
"content_hash": "20dbbc03de9f34e127478d90d088b9e2",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 44,
"avg_line_length": 24.445378151260503,
"alnum_prop": 0.5905809556548642,
"repo_name": "bigdig/vnpy",
"id": "63144a7d88b14f7dd7f990e80ac5846f044ea90e",
"size": "2909",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vnpy/api/da/generator/da_market_struct.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "390"
},
{
"name": "C",
"bytes": "1652953"
},
{
"name": "C++",
"bytes": "13737810"
},
{
"name": "Objective-C",
"bytes": "1200"
},
{
"name": "Python",
"bytes": "2979947"
},
{
"name": "Shell",
"bytes": "6050"
}
],
"symlink_target": ""
}
|
from ctypes import *
try:
import _idaapi
except:
print("Please try me from inside IDA")
sys.exit(0)
try:
import pywraps
pywraps_there = True
print("Choose2: using pywraps")
_idaapi.choose2_create = pywraps.py_choose2_create
_idaapi.choose2_activate = pywraps.py_choose2_activate
_idaapi.choose2_refresh = pywraps.py_choose2_refresh
_idaapi.choose2_close = pywraps.py_choose2_close
_idaapi.choose2_add_command = pywraps.py_choose2_add_command
_idaapi.choose2_get_embedded = pywraps.py_choose2_get_embedded
_idaapi.choose2_get_embedded_selection = pywraps.py_choose2_get_embedded_selection
try:
# Get function address
# void test_embedded(chooser_info_t *)
TEST_EMBEDDED = CFUNCTYPE(c_void_p, c_void_p)
test_embedded = TEST_EMBEDDED(pywraps.py_choose2_get_test_embedded())
except Exception as e:
test_embedded = None
print("Choose2: Exception: %s" % str(e))
except Exception as e:
pywraps_there = False
print("Choose2: Not using pywraps: %s" % str(e))
# -----------------------------------------------------------------------
#<pycode(py_choose2)>
class Choose2(object):
"""
Choose2 wrapper class.
Some constants are defined in this class. Please refer to kernwin.hpp for more information.
"""
CH_MODAL = 0x01
"""Modal chooser"""
CH_MULTI = 0x02
"""Allow multi selection"""
CH_MULTI_EDIT = 0x04
CH_NOBTNS = 0x08
CH_ATTRS = 0x10
CH_NOIDB = 0x20
"""use the chooser even without an open database, same as x0=-2"""
CH_UTF8 = 0x40
"""string encoding is utf-8"""
CH_BUILTIN_MASK = 0xF80000
# column flags (are specified in the widths array)
CHCOL_PLAIN = 0x00000000
CHCOL_PATH = 0x00010000
CHCOL_HEX = 0x00020000
CHCOL_DEC = 0x00030000
CHCOL_FORMAT = 0x00070000
def __init__(self, title, cols, flags=0, popup_names=None,
icon=-1, x1=-1, y1=-1, x2=-1, y2=-1, deflt=-1,
embedded=False, width=None, height=None):
"""
Constructs a chooser window.
@param title: The chooser title
@param cols: a list of colums; each list item is a list of two items
example: [ ["Address", 10 | Choose2.CHCOL_HEX], ["Name", 30 | Choose2.CHCOL_PLAIN] ]
@param flags: One of CH_XXXX constants
@param deflt: Default starting item
@param popup_names: list of new captions to replace this list ["Insert", "Delete", "Edit", "Refresh"]
@param icon: Icon index (the icon should exist in ida resources or an index to a custom loaded icon)
@param x1, y1, x2, y2: The default location
@param embedded: Create as embedded chooser
@param width: Embedded chooser width
@param height: Embedded chooser height
"""
self.title = title
self.flags = flags
self.cols = cols
self.deflt = deflt
self.popup_names = popup_names
self.icon = icon
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
self.embedded = embedded
if embedded:
self.x1 = width
self.y1 = height
def Embedded(self):
"""
Creates an embedded chooser (as opposed to Show())
@return: Returns 1 on success
"""
return _idaapi.choose2_create(self, True)
def GetEmbSelection(self):
"""
Returns the selection associated with an embedded chooser
@return:
- None if chooser is not embedded
- A list with selection indices (0-based)
"""
return _idaapi.choose2_get_embedded_selection(self)
def Show(self, modal=False):
"""
Activates or creates a chooser window
@param modal: Display as modal dialog
@return: For modal choosers it will return the selected item index (0-based)
"""
if modal:
self.flags |= Choose2.CH_MODAL
# Disable the timeout
old = _idaapi.set_script_timeout(0)
n = _idaapi.choose2_create(self, False)
_idaapi.set_script_timeout(old)
# Delete the modal chooser instance
self.Close()
return n
else:
self.flags &= ~Choose2.CH_MODAL
return _idaapi.choose2_create(self, False)
def Activate(self):
"""Activates a visible chooser"""
return _idaapi.choose2_activate(self)
def Refresh(self):
"""Causes the refresh callback to trigger"""
return _idaapi.choose2_refresh(self)
def Close(self):
"""Closes the chooser"""
return _idaapi.choose2_close(self)
def AddCommand(self,
caption,
flags = _idaapi.CHOOSER_POPUP_MENU,
menu_index = -1,
icon = -1,
emb=None):
"""
Deprecated: Use
- register_action()
- attach_action_to_menu()
- attach_action_to_popup()
"""
# Use the 'emb' as a sentinel. It will be passed the correct value from the EmbeddedChooserControl
if self.embedded and ((emb is None) or (emb != 2002)):
raise RuntimeError("Please add a command through EmbeddedChooserControl.AddCommand()")
return _idaapi.choose2_add_command(self, caption, flags, menu_index, icon)
#
# Implement these methods in the subclass:
#
#<pydoc>
# def OnClose(self):
# """
# Called when the window is being closed.
# This callback is mandatory.
# @return: nothing
# """
# pass
#
# def OnGetLine(self, n):
# """Called when the chooser window requires lines.
# This callback is mandatory.
# @param n: Line number (0-based)
# @return: The user should return a list with ncols elements.
# example: a list [col1, col2, col3, ...] describing the n-th line
# """
# return ["col1 val", "col2 val"]
#
# def OnGetSize(self):
# """Returns the element count.
# This callback is mandatory.
# @return: Number of elements
# """
# return len(self.the_list)
#
# def OnEditLine(self, n):
# """
# Called when an item is being edited.
# @param n: Line number (0-based)
# @return: Nothing
# """
# pass
#
# def OnInsertLine(self):
# """
# Called when 'Insert' is selected either via the hotkey or popup menu.
# @return: Nothing
# """
# pass
#
# def OnSelectLine(self, n):
# """
# Called when a line is selected and then Ok or double click was pressed
# @param n: Line number (0-based)
# """
# pass
#
# def OnSelectionChange(self, sel_list):
# """
# Called when the selection changes
# @param sel_list: A list of selected item indices
# """
# pass
#
# def OnDeleteLine(self, n):
# """
# Called when a line is about to be deleted
# @param n: Line number (0-based)
# """
# return self.n
#
# def OnRefresh(self, n):
# """
# Triggered when the 'Refresh' is called from the popup menu item.
#
# @param n: The currently selected line (0-based) at the time of the refresh call
# @return: Return the number of elements
# """
# return self.n
#
# def OnRefreshed(self):
# """
# Triggered when a refresh happens (for example due to column sorting)
# @param n: Line number (0-based)
# @return: Return the number of elements
# """
# return self.n
#
# def OnCommand(self, n, cmd_id):
# """Return int ; check add_chooser_command()"""
# return 0
#
# def OnGetIcon(self, n):
# """
# Return icon number for a given item (or -1 if no icon is avail)
# @param n: Line number (0-based)
# """
# return -1
#
# def OnGetLineAttr(self, n):
# """
# Return list [bgcolor, flags=CHITEM_XXXX] or None; check chooser_item_attrs_t
# @param n: Line number (0-based)
# """
# return [0x0, CHITEM_BOLD]
#</pydoc>
#</pycode(py_choose2)>
|
{
"content_hash": "95fe30a4fbf687e2e4ab93fe16eeeb79",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 109,
"avg_line_length": 30.364963503649633,
"alnum_prop": 0.5667067307692307,
"repo_name": "nihilus/src",
"id": "ceb374564a0a95f00f04bdf0d307e0ade804a2f3",
"size": "8424",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pywraps/py_choose2.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "120"
},
{
"name": "C++",
"bytes": "510891"
},
{
"name": "CSS",
"bytes": "5162"
},
{
"name": "Makefile",
"bytes": "14977"
},
{
"name": "Python",
"bytes": "572378"
}
],
"symlink_target": ""
}
|
"""Support for AquaLogic devices."""
from datetime import timedelta
import logging
import threading
import time
from aqualogic.core import AquaLogic
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
DOMAIN = "aqualogic"
UPDATE_TOPIC = f"{DOMAIN}_update"
CONF_UNIT = "unit"
RECONNECT_INTERVAL = timedelta(seconds=10)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up AquaLogic platform."""
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
processor = AquaLogicProcessor(hass, host, port)
hass.data[DOMAIN] = processor
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, processor.start_listen)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, processor.shutdown)
_LOGGER.debug("AquaLogicProcessor %s:%i initialized", host, port)
return True
class AquaLogicProcessor(threading.Thread):
"""AquaLogic event processor thread."""
def __init__(self, hass, host, port):
"""Initialize the data object."""
super().__init__(daemon=True)
self._hass = hass
self._host = host
self._port = port
self._shutdown = False
self._panel = None
def start_listen(self, event):
"""Start event-processing thread."""
_LOGGER.debug("Event processing thread started")
self.start()
def shutdown(self, event):
"""Signal shutdown of processing event."""
_LOGGER.debug("Event processing signaled exit")
self._shutdown = True
def data_changed(self, panel):
"""Aqualogic data changed callback."""
self._hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
def run(self):
"""Event thread."""
while True:
self._panel = AquaLogic()
self._panel.connect(self._host, self._port)
self._panel.process(self.data_changed)
if self._shutdown:
return
_LOGGER.error("Connection to %s:%d lost", self._host, self._port)
time.sleep(RECONNECT_INTERVAL.total_seconds())
@property
def panel(self):
"""Retrieve the AquaLogic object."""
return self._panel
|
{
"content_hash": "05703e399615b97e9f4b79211e12a245",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 82,
"avg_line_length": 28.72826086956522,
"alnum_prop": 0.6492622020431328,
"repo_name": "GenericStudent/home-assistant",
"id": "0c4ecaa16833b31a02db4d2c8f9433db010bb386",
"size": "2643",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/aqualogic/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from devito.logger import info
__all__ = ['Executor']
class Executor(object):
"""
Abstract container class for a single benchmark data point.
"""
def setup(self, **kwargs):
"""
Prepares a single benchmark invocation.
"""
pass
def teardown(self, **kwargs):
"""
Cleans up a single benchmark invocation.
"""
pass
def postprocess(self, **kwargs):
"""
Global post-processing method to collect meta-data.
"""
pass
def reset(self):
"""
Reset the data dictionaries.
"""
self.meta = {}
self.timings = defaultdict(lambda: defaultdict(lambda: defaultdict(float)))
def run(self, **kwargs):
"""
This method needs to be overridden by the user.
"""
raise NotImplementedError("No custom executor function specified")
def register(self, value, event='execute', measure='time', rank=0):
"""
Register a single timing value for a given event key.
Parameters
----------
event : str
key for the measured event, ie. 'assembly' or 'solve'
value : float
measured value to store
measure : str
name of the value type, eg. 'time' or 'flops'
"""
self.timings[rank][event][measure] += value
def execute(self, warmups=1, repeats=3, **params):
"""
Execute a single benchmark repeatedly, including
setup, teardown and postprocessing methods.
"""
info("Running %d repeats - parameters: %s" % (repeats,
', '.join(['%s: %s' % (k, v) for k, v in params.items()])))
self.reset()
for i in range(warmups):
info("--- Warmup %d ---" % i)
self.setup(**params)
self.run(**params)
self.teardown(**params)
info("--- Warmup %d finished ---" % i)
self.reset()
for i in range(repeats):
info("--- Run %d ---" % i)
self.setup(**params)
self.run(**params)
self.teardown(**params)
info("--- Run %d finished ---" % i)
info("")
# Average timings across repeats
for rank in self.timings.keys():
for event in self.timings[rank].keys():
for measure in self.timings[rank][event].keys():
self.timings[rank][event][measure] /= repeats
# Collect meta-information via post-processing methods
self.postprocess(**params)
|
{
"content_hash": "0af8aaf5d8c13825a81d3171ee6aa9b1",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 83,
"avg_line_length": 27.872340425531913,
"alnum_prop": 0.5320610687022901,
"repo_name": "opesci/devito",
"id": "878032f7a045386921c822a58eb568ace6a7db9a",
"size": "2620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmarks/user/tools/executor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "812"
},
{
"name": "Python",
"bytes": "1683413"
},
{
"name": "Shell",
"bytes": "3900"
}
],
"symlink_target": ""
}
|
"""
Cool/specialized/interactive utilities which use
both STDIN and STDOUT utilities.
"""
# import sys
# import functools
from . import stdin
from . import stdout
from . import settings
from .utils import conversion
# def console_intermission(enable_on_call=True, clear=80*24):
# """
# Decorator to allow functions to exist as brief intermissions
# in terms console echoing. Simply put, if you normally have
# echoing DISABLED but want one function to be an exception, wrap
# it with this decorator. Vice versa is also true -- just pass
# enable_on_call=False instead.
# Can also clear the console on entrace/exit if provided the window size.
# """
# def toggle(to_enable):
# """Toggle console echoing on/off"""
# if to_enable:
# stdout.enable_echoing()
# else:
# stdout.disable_echoing()
# def maybe_clear():
# if clear:
# stdout.printsl(stdout.clear(clear))
# def _console_intermission(func):
# @functools.wraps(func)
# def wrapped(*args, **kwargs):
# maybe_clear()
# toggle(enable_on_call)
# try:
# out = func(*args, **kwargs)
# toggle(not enable_on_call)
# maybe_clear()
# return out
# except:
# # SOURCE: http://stackoverflow.com/questions/9005941/python-exception-decorator-how-to-preserve-stacktrace
# (errorobj, errortype, errtraceback) = sys.exc_info() # error/type/traceback
# toggle(not enable_on_call)
# maybe_clear()
# raise errorobj, errortype, errtraceback
# return wrapped
# return _console_intermission
####################################
def getch_until_enter_echo(echo=True, hidden=True, can_delete=True, strip_last=True, max_chars=0):
"""
Runs getch, accumulating chars until an 'enter' key
Warning: running with settings (1,0,0,-)
(aka, echoing, non-hidden, and non-deleting)
can cause visual problems if the delete key is pressed
(since we're basically printing deletion characters).
Warning: Printing tabs and other such characters is usually a bad idea.
Perhaps a "banned-for-print" list is sensible.
"""
buff = []
while True:
char = stdin.getch()
prev_len = len(buff)
buff.append(char)
clear_len = len(buff)
if char in settings.ENTER_KEYS:
break
elif can_delete and char in settings.DELETE_KEYS:
buff = buff[:-2] # delete the 'DEL' that was added, and the previous
clear_len = prev_len
elif can_delete and char == conversion.chars(settings.KEYS['FN-DEL']):
buff = []
clear_len = prev_len
elif char == settings.CONTROL_C: # arbitrary decision.
raise KeyboardInterrupt
elif max_chars and len(buff) > max_chars:
buff = buff[:-1]
continue
if echo:
echo_str = "*" * len(buff) if hidden else ''.join(buff)
stdout.printsl(stdout.back(prev_len))
stdout.printsl(stdout.clear(clear_len))
stdout.printsl(echo_str)
if strip_last:
buff = buff[:-1]
return ''.join(buff)
def getpass():
"""Runs getch until a carriage return
NOTE: respects the BS/DEL keys"""
return getch_until_enter_echo(strip_last=True) # ignore trailing carriage return
def getpass_cool():
# stdout.disable_echoing()
stdout.print_and_drop_cursor_formatted("Password: [{} ]")
out = getch_until_enter_echo(max_chars=8)
# stdout.enable_echoing()
return out
|
{
"content_hash": "78ddddf3334b64e54425d24ea8ebbad3",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 124,
"avg_line_length": 30.796747967479675,
"alnum_prop": 0.5828933474128828,
"repo_name": "MattCCS/PyGetch",
"id": "97b9b786c3cf543dc8783fb712081b24650206b9",
"size": "3788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pygetch/special.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24612"
},
{
"name": "Shell",
"bytes": "1429"
}
],
"symlink_target": ""
}
|
import sys
from oslo.config import cfg
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.common import constants as q_const
from neutron.common import exceptions as q_exc
from neutron.common import rpc as q_rpc
from neutron.common import topics
from neutron.common import utils
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import allowedaddresspairs_db as addr_pair_db
from neutron.db import db_base_plugin_v2
from neutron.db import dhcp_rpc_base
from neutron.db import external_net_db
from neutron.db import extradhcpopt_db
from neutron.db import extraroute_db
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_gwmode_db
from neutron.db import l3_rpc_base
from neutron.db import portbindings_db
from neutron.db import quota_db # noqa
from neutron.db import securitygroups_rpc_base as sg_db_rpc
from neutron.extensions import allowedaddresspairs as addr_pair
from neutron.extensions import extra_dhcp_opt as edo_ext
from neutron.extensions import portbindings
from neutron.extensions import providernet as provider
from neutron.extensions import nat
from neutron import manager
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import rpc
from neutron.openstack.common.rpc import proxy
from neutron.plugins.common import constants as svc_constants
from neutron.plugins.common import utils as plugin_utils
from neutron.plugins.openvswitch.common import config # noqa
from neutron.plugins.openvswitch.common import constants
from neutron.plugins.openvswitch import ovs_db_v2
LOG = logging.getLogger(__name__)
class OVSRpcCallbacks(dhcp_rpc_base.DhcpRpcCallbackMixin,
l3_rpc_base.L3RpcCallbackMixin,
sg_db_rpc.SecurityGroupServerRpcCallbackMixin):
# history
# 1.0 Initial version
# 1.1 Support Security Group RPC
RPC_API_VERSION = '1.1'
def __init__(self, notifier, tunnel_type):
self.notifier = notifier
self.tunnel_type = tunnel_type
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return q_rpc.PluginRpcDispatcher([self,
agents_db.AgentExtRpcCallback()])
@classmethod
def get_port_from_device(cls, device):
port = ovs_db_v2.get_port_from_device(device)
if port:
port['device'] = device
return port
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s details requested from %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = ovs_db_v2.get_port(device)
if port:
binding = ovs_db_v2.get_network_binding(None, port['network_id'])
entry = {'device': device,
'network_id': port['network_id'],
'port_id': port['id'],
'admin_state_up': port['admin_state_up'],
'network_type': binding.network_type,
'segmentation_id': binding.segmentation_id,
'physical_network': binding.physical_network}
new_status = (q_const.PORT_STATUS_ACTIVE if port['admin_state_up']
else q_const.PORT_STATUS_DOWN)
if port['status'] != new_status:
ovs_db_v2.set_port_status(port['id'], new_status)
else:
entry = {'device': device}
LOG.debug(_("%s can not be found in database"), device)
return entry
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
host = kwargs.get('host')
port = ovs_db_v2.get_port(device)
LOG.debug(_("Device %(device)s no longer exists on %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
if port:
entry = {'device': device,
'exists': True}
plugin = manager.NeutronManager.get_plugin()
if (host and
not plugin.get_port_host(rpc_context, port['id']) == host):
LOG.debug(_("Device %(device)s not bound to the"
" agent host %(host)s"),
{'device': device, 'host': host})
elif port['status'] != q_const.PORT_STATUS_DOWN:
# Set port status to DOWN
ovs_db_v2.set_port_status(port['id'],
q_const.PORT_STATUS_DOWN)
else:
entry = {'device': device,
'exists': False}
LOG.debug(_("%s can not be found in database"), device)
return entry
def update_device_up(self, rpc_context, **kwargs):
"""Device is up on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
host = kwargs.get('host')
port = ovs_db_v2.get_port(device)
LOG.debug(_("Device %(device)s up on %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
plugin = manager.NeutronManager.get_plugin()
if port:
if (host and
not plugin.get_port_host(rpc_context, port['id']) == host):
LOG.debug(_("Device %(device)s not bound to the"
" agent host %(host)s"),
{'device': device, 'host': host})
return
elif port['status'] != q_const.PORT_STATUS_ACTIVE:
ovs_db_v2.set_port_status(port['id'],
q_const.PORT_STATUS_ACTIVE)
else:
LOG.debug(_("%s can not be found in database"), device)
def tunnel_sync(self, rpc_context, **kwargs):
"""Update new tunnel.
Updates the datbase with the tunnel IP. All listening agents will also
be notified about the new tunnel IP.
"""
tunnel_ip = kwargs.get('tunnel_ip')
# Update the database with the IP
tunnel = ovs_db_v2.add_tunnel_endpoint(tunnel_ip)
tunnels = ovs_db_v2.get_tunnel_endpoints()
entry = dict()
entry['tunnels'] = tunnels
# Notify all other listening agents
self.notifier.tunnel_update(rpc_context, tunnel.ip_address,
tunnel.id, self.tunnel_type)
# Return the list of tunnels IP's to the agent
return entry
class AgentNotifierApi(proxy.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin):
'''Agent side of the openvswitch rpc API.
API version history:
1.0 - Initial version.
'''
BASE_RPC_API_VERSION = '1.0'
def __init__(self, topic):
super(AgentNotifierApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.topic_network_delete = topics.get_topic_name(topic,
topics.NETWORK,
topics.DELETE)
self.topic_port_update = topics.get_topic_name(topic,
topics.PORT,
topics.UPDATE)
self.topic_tunnel_update = topics.get_topic_name(topic,
constants.TUNNEL,
topics.UPDATE)
def network_delete(self, context, network_id):
self.fanout_cast(context,
self.make_msg('network_delete',
network_id=network_id),
topic=self.topic_network_delete)
def port_update(self, context, port, network_type, segmentation_id,
physical_network):
self.fanout_cast(context,
self.make_msg('port_update',
port=port,
network_type=network_type,
segmentation_id=segmentation_id,
physical_network=physical_network),
topic=self.topic_port_update)
def tunnel_update(self, context, tunnel_ip, tunnel_id, tunnel_type):
self.fanout_cast(context,
self.make_msg('tunnel_update',
tunnel_ip=tunnel_ip,
tunnel_id=tunnel_id,
tunnel_type=tunnel_type),
topic=self.topic_tunnel_update)
class OVSNeutronPluginV2(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin,
extraroute_db.ExtraRoute_db_mixin,
l3_gwmode_db.L3_NAT_db_mixin,
sg_db_rpc.SecurityGroupServerRpcMixin,
l3_agentschedulers_db.L3AgentSchedulerDbMixin,
agentschedulers_db.DhcpAgentSchedulerDbMixin,
portbindings_db.PortBindingMixin,
extradhcpopt_db.ExtraDhcpOptMixin,
addr_pair_db.AllowedAddressPairsMixin):
"""Implement the Neutron abstractions using Open vSwitch.
Depending on whether tunneling is enabled, either a GRE, VXLAN tunnel or
a new VLAN is created for each network. An agent is relied upon to
perform the actual OVS configuration on each host.
The provider extension is also supported. As discussed in
https://bugs.launchpad.net/neutron/+bug/1023156, this class could
be simplified, and filtering on extended attributes could be
handled, by adding support for extended attributes to the
NeutronDbPluginV2 base class. When that occurs, this class should
be updated to take advantage of it.
The port binding extension enables an external application relay
information to and from the plugin.
"""
# This attribute specifies whether the plugin supports or not
# bulk/pagination/sorting operations. Name mangling is used in
# order to ensure it is qualified by class
__native_bulk_support = True
__native_pagination_support = True
__native_sorting_support = True
_supported_extension_aliases = ["provider", "external-net", "router",
"ext-gw-mode", "binding", "quotas",
"security-group", "agent", "extraroute",
"l3_agent_scheduler",
"dhcp_agent_scheduler",
"extra_dhcp_opt",
"allowed-address-pairs",
"nat"]
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
sg_rpc.disable_security_group_extension_if_noop_driver(aliases)
self._aliases = aliases
return self._aliases
def __init__(self, configfile=None):
self.base_binding_dict = {
portbindings.VIF_TYPE: portbindings.VIF_TYPE_OVS,
portbindings.CAPABILITIES: {
portbindings.CAP_PORT_FILTER:
'security-group' in self.supported_extension_aliases}}
ovs_db_v2.initialize()
self._parse_network_vlan_ranges()
ovs_db_v2.sync_vlan_allocations(self.network_vlan_ranges)
self.tenant_network_type = cfg.CONF.OVS.tenant_network_type
if self.tenant_network_type not in [constants.TYPE_LOCAL,
constants.TYPE_VLAN,
constants.TYPE_GRE,
constants.TYPE_VXLAN,
constants.TYPE_NONE]:
LOG.error(_("Invalid tenant_network_type: %s. "
"Server terminated!"),
self.tenant_network_type)
sys.exit(1)
self.enable_tunneling = cfg.CONF.OVS.enable_tunneling
self.tunnel_type = None
if self.enable_tunneling:
self.tunnel_type = cfg.CONF.OVS.tunnel_type or constants.TYPE_GRE
elif cfg.CONF.OVS.tunnel_type:
self.tunnel_type = cfg.CONF.OVS.tunnel_type
self.enable_tunneling = True
self.tunnel_id_ranges = []
if self.enable_tunneling:
self._parse_tunnel_id_ranges()
ovs_db_v2.sync_tunnel_allocations(self.tunnel_id_ranges)
elif self.tenant_network_type in constants.TUNNEL_NETWORK_TYPES:
LOG.error(_("Tunneling disabled but tenant_network_type is '%s'. "
"Server terminated!"), self.tenant_network_type)
sys.exit(1)
self.setup_rpc()
self.network_scheduler = importutils.import_object(
cfg.CONF.network_scheduler_driver
)
self.router_scheduler = importutils.import_object(
cfg.CONF.router_scheduler_driver
)
def setup_rpc(self):
# RPC support
self.service_topics = {svc_constants.CORE: topics.PLUGIN,
svc_constants.L3_ROUTER_NAT: topics.L3PLUGIN}
self.conn = rpc.create_connection(new=True)
self.notifier = AgentNotifierApi(topics.AGENT)
self.agent_notifiers[q_const.AGENT_TYPE_DHCP] = (
dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
)
self.agent_notifiers[q_const.AGENT_TYPE_L3] = (
l3_rpc_agent_api.L3AgentNotify
)
self.callbacks = OVSRpcCallbacks(self.notifier, self.tunnel_type)
self.dispatcher = self.callbacks.create_rpc_dispatcher()
for svc_topic in self.service_topics.values():
self.conn.create_consumer(svc_topic, self.dispatcher, fanout=False)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def _parse_network_vlan_ranges(self):
try:
self.network_vlan_ranges = plugin_utils.parse_network_vlan_ranges(
cfg.CONF.OVS.network_vlan_ranges)
except Exception as ex:
LOG.error(_("%s. Server terminated!"), ex)
sys.exit(1)
LOG.info(_("Network VLAN ranges: %s"), self.network_vlan_ranges)
def _parse_tunnel_id_ranges(self):
for entry in cfg.CONF.OVS.tunnel_id_ranges:
entry = entry.strip()
try:
tun_min, tun_max = entry.split(':')
self.tunnel_id_ranges.append((int(tun_min), int(tun_max)))
except ValueError as ex:
LOG.error(_("Invalid tunnel ID range: "
"'%(range)s' - %(e)s. Server terminated!"),
{'range': entry, 'e': ex})
sys.exit(1)
LOG.info(_("Tunnel ID ranges: %s"), self.tunnel_id_ranges)
def _extend_network_dict_provider(self, context, network):
binding = ovs_db_v2.get_network_binding(context.session,
network['id'])
network[provider.NETWORK_TYPE] = binding.network_type
if binding.network_type in constants.TUNNEL_NETWORK_TYPES:
network[provider.PHYSICAL_NETWORK] = None
network[provider.SEGMENTATION_ID] = binding.segmentation_id
elif binding.network_type == constants.TYPE_FLAT:
network[provider.PHYSICAL_NETWORK] = binding.physical_network
network[provider.SEGMENTATION_ID] = None
elif binding.network_type == constants.TYPE_VLAN:
network[provider.PHYSICAL_NETWORK] = binding.physical_network
network[provider.SEGMENTATION_ID] = binding.segmentation_id
elif binding.network_type == constants.TYPE_LOCAL:
network[provider.PHYSICAL_NETWORK] = None
network[provider.SEGMENTATION_ID] = None
def _process_provider_create(self, context, attrs):
network_type = attrs.get(provider.NETWORK_TYPE)
physical_network = attrs.get(provider.PHYSICAL_NETWORK)
segmentation_id = attrs.get(provider.SEGMENTATION_ID)
network_type_set = attributes.is_attr_set(network_type)
physical_network_set = attributes.is_attr_set(physical_network)
segmentation_id_set = attributes.is_attr_set(segmentation_id)
if not (network_type_set or physical_network_set or
segmentation_id_set):
return (None, None, None)
if not network_type_set:
msg = _("provider:network_type required")
raise q_exc.InvalidInput(error_message=msg)
elif network_type == constants.TYPE_FLAT:
if segmentation_id_set:
msg = _("provider:segmentation_id specified for flat network")
raise q_exc.InvalidInput(error_message=msg)
else:
segmentation_id = constants.FLAT_VLAN_ID
elif network_type == constants.TYPE_VLAN:
if not segmentation_id_set:
msg = _("provider:segmentation_id required")
raise q_exc.InvalidInput(error_message=msg)
if not utils.is_valid_vlan_tag(segmentation_id):
msg = (_("provider:segmentation_id out of range "
"(%(min_id)s through %(max_id)s)") %
{'min_id': q_const.MIN_VLAN_TAG,
'max_id': q_const.MAX_VLAN_TAG})
raise q_exc.InvalidInput(error_message=msg)
elif network_type in constants.TUNNEL_NETWORK_TYPES:
if not self.enable_tunneling:
msg = _("%s networks are not enabled") % network_type
raise q_exc.InvalidInput(error_message=msg)
if physical_network_set:
msg = _("provider:physical_network specified for %s "
"network") % network_type
raise q_exc.InvalidInput(error_message=msg)
else:
physical_network = None
if not segmentation_id_set:
msg = _("provider:segmentation_id required")
raise q_exc.InvalidInput(error_message=msg)
elif network_type == constants.TYPE_LOCAL:
if physical_network_set:
msg = _("provider:physical_network specified for local "
"network")
raise q_exc.InvalidInput(error_message=msg)
else:
physical_network = None
if segmentation_id_set:
msg = _("provider:segmentation_id specified for local "
"network")
raise q_exc.InvalidInput(error_message=msg)
else:
segmentation_id = None
else:
msg = _("provider:network_type %s not supported") % network_type
raise q_exc.InvalidInput(error_message=msg)
if network_type in [constants.TYPE_VLAN, constants.TYPE_FLAT]:
if physical_network_set:
if physical_network not in self.network_vlan_ranges:
msg = _("Unknown provider:physical_network "
"%s") % physical_network
raise q_exc.InvalidInput(error_message=msg)
elif 'default' in self.network_vlan_ranges:
physical_network = 'default'
else:
msg = _("provider:physical_network required")
raise q_exc.InvalidInput(error_message=msg)
return (network_type, physical_network, segmentation_id)
def create_network(self, context, network):
(network_type, physical_network,
segmentation_id) = self._process_provider_create(context,
network['network'])
session = context.session
#set up default security groups
tenant_id = self._get_tenant_id_for_create(
context, network['network'])
self._ensure_default_security_group(context, tenant_id)
with session.begin(subtransactions=True):
if not network_type:
# tenant network
network_type = self.tenant_network_type
if network_type == constants.TYPE_NONE:
raise q_exc.TenantNetworksDisabled()
elif network_type == constants.TYPE_VLAN:
(physical_network,
segmentation_id) = ovs_db_v2.reserve_vlan(session)
elif network_type in constants.TUNNEL_NETWORK_TYPES:
segmentation_id = ovs_db_v2.reserve_tunnel(session)
# no reservation needed for TYPE_LOCAL
else:
# provider network
if network_type in [constants.TYPE_VLAN, constants.TYPE_FLAT]:
ovs_db_v2.reserve_specific_vlan(session, physical_network,
segmentation_id)
elif network_type in constants.TUNNEL_NETWORK_TYPES:
ovs_db_v2.reserve_specific_tunnel(session, segmentation_id)
# no reservation needed for TYPE_LOCAL
net = super(OVSNeutronPluginV2, self).create_network(context,
network)
ovs_db_v2.add_network_binding(session, net['id'], network_type,
physical_network, segmentation_id)
self._process_l3_create(context, net, network['network'])
self._extend_network_dict_provider(context, net)
# note - exception will rollback entire transaction
LOG.debug(_("Created network: %s"), net['id'])
return net
def update_network(self, context, id, network):
provider._raise_if_updates_provider_attributes(network['network'])
session = context.session
with session.begin(subtransactions=True):
net = super(OVSNeutronPluginV2, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
self._extend_network_dict_provider(context, net)
return net
def delete_network(self, context, id):
session = context.session
with session.begin(subtransactions=True):
binding = ovs_db_v2.get_network_binding(session, id)
super(OVSNeutronPluginV2, self).delete_network(context, id)
if binding.network_type in constants.TUNNEL_NETWORK_TYPES:
ovs_db_v2.release_tunnel(session, binding.segmentation_id,
self.tunnel_id_ranges)
elif binding.network_type in [constants.TYPE_VLAN,
constants.TYPE_FLAT]:
ovs_db_v2.release_vlan(session, binding.physical_network,
binding.segmentation_id,
self.network_vlan_ranges)
# the network_binding record is deleted via cascade from
# the network record, so explicit removal is not necessary
self.notifier.network_delete(context, id)
def get_network(self, context, id, fields=None):
session = context.session
with session.begin(subtransactions=True):
net = super(OVSNeutronPluginV2, self).get_network(context,
id, None)
self._extend_network_dict_provider(context, net)
return self._fields(net, fields)
def get_networks(self, context, filters=None, fields=None,
sorts=None,
limit=None, marker=None, page_reverse=False):
session = context.session
with session.begin(subtransactions=True):
nets = super(OVSNeutronPluginV2,
self).get_networks(context, filters, None, sorts,
limit, marker, page_reverse)
for net in nets:
self._extend_network_dict_provider(context, net)
return [self._fields(net, fields) for net in nets]
def create_port(self, context, port):
# Set port status as 'DOWN'. This will be updated by agent
port['port']['status'] = q_const.PORT_STATUS_DOWN
port_data = port['port']
session = context.session
with session.begin(subtransactions=True):
self._ensure_default_security_group_on_port(context, port)
sgids = self._get_security_groups_on_port(context, port)
dhcp_opts = port['port'].get(edo_ext.EXTRADHCPOPTS, [])
port = super(OVSNeutronPluginV2, self).create_port(context, port)
self._process_portbindings_create_and_update(context,
port_data, port)
self._process_port_create_security_group(context, port, sgids)
self._process_port_create_extra_dhcp_opts(context, port,
dhcp_opts)
port[addr_pair.ADDRESS_PAIRS] = (
self._process_create_allowed_address_pairs(
context, port,
port_data.get(addr_pair.ADDRESS_PAIRS)))
self.notify_security_groups_member_updated(context, port)
return port
def _extend_port_dict_nat(self, context, port):
forward = ovs_db_v2.get_port_forwarding(context.session, port['id'])
if forward:
port[nat.FORWARD_PORTS] = forward
else:
port[nat.FORWARD_PORTS] = None
def _process_nat_update(self, context, attrs, id):
forward_ports = attrs.get(nat.FORWARD_PORTS)
forward_ports_set = attributes.is_attr_set(forward_ports)
if not forward_ports_set:
return None
# LOG.info("forward ports %s" % forward_ports)
valid_protocols = ["tcp", "udp"]
for entry in forward_ports:
if not isinstance(entry, dict):
msg = _("nat:forward_ports: must specify a list of dicts (ex: 'l4_protocol=tcp,l4_port=80')")
raise q_exc.InvalidInput(error_message=msg)
if not ("l4_protocol" in entry and "l4_port" in entry):
msg = _("nat:forward_ports: dict is missing l4_protocol and l4_port (ex: 'l4_protocol=tcp,l4_port=80')")
raise q_exc.InvalidInput(error_message=msg)
if entry['l4_protocol'] not in valid_protocols:
msg = _("nat:forward_ports: invalid protocol (only tcp and udp allowed)")
raise q_exc.InvalidInput(error_message=msg)
l4_port = entry['l4_port']
if ":" in l4_port:
try:
(first, last) = l4_port.split(":")
first = int(first)
last = int(last)
except:
msg = _("nat:forward_ports: l4_port range must be integer:integer")
raise q_exc.InvalidInput(error_message=msg)
else:
try:
l4_port = int(l4_port)
except:
msg = _("nat:forward_ports: l4_port must be an integer")
raise q_exc.InvalidInput(error_message=msg)
return forward_ports
def get_port(self, context, id, fields=None):
session = context.session
with session.begin(subtransactions=True):
port = super(OVSNeutronPluginV2, self).get_port(context, id, None)
self._extend_port_dict_nat(context, port)
return self._fields(port, fields)
def get_ports(self, context, filters=None, fields=None):
session = context.session
with session.begin(subtransactions=True):
ports = super(OVSNeutronPluginV2, self).get_ports(context, filters,
None)
for port in ports:
self._extend_port_dict_nat(context, port)
return [self._fields(port, fields) for port in ports]
def update_port(self, context, id, port):
forward_ports = self._process_nat_update(context, port['port'], id)
session = context.session
need_port_update_notify = False
changed_fixed_ips = 'fixed_ips' in port['port']
with session.begin(subtransactions=True):
original_port = super(OVSNeutronPluginV2, self).get_port(
context, id)
updated_port = super(OVSNeutronPluginV2, self).update_port(
context, id, port)
if addr_pair.ADDRESS_PAIRS in port['port']:
self._delete_allowed_address_pairs(context, id)
self._process_create_allowed_address_pairs(
context, updated_port,
port['port'][addr_pair.ADDRESS_PAIRS])
need_port_update_notify = True
elif changed_fixed_ips:
self._check_fixed_ips_and_address_pairs_no_overlap(
context, updated_port)
if forward_ports:
ovs_db_v2.clear_port_forwarding(session, updated_port['id'])
ovs_db_v2.add_port_forwarding(session, updated_port['id'], forward_ports)
self._extend_port_dict_nat(context, updated_port)
need_port_update_notify |= self.update_security_group_on_port(
context, id, port, original_port, updated_port)
self._process_portbindings_create_and_update(context,
port['port'],
updated_port)
need_port_update_notify |= self._update_extra_dhcp_opts_on_port(
context, id, port, updated_port)
need_port_update_notify |= self.is_security_group_member_updated(
context, original_port, updated_port)
if original_port['admin_state_up'] != updated_port['admin_state_up']:
need_port_update_notify = True
if need_port_update_notify:
binding = ovs_db_v2.get_network_binding(None,
updated_port['network_id'])
self.notifier.port_update(context, updated_port,
binding.network_type,
binding.segmentation_id,
binding.physical_network)
return updated_port
def delete_port(self, context, id, l3_port_check=True):
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
session = context.session
with session.begin(subtransactions=True):
self.disassociate_floatingips(context, id)
port = self.get_port(context, id)
self._delete_port_security_group_bindings(context, id)
super(OVSNeutronPluginV2, self).delete_port(context, id)
self.notify_security_groups_member_updated(context, port)
|
{
"content_hash": "5fdffde6e1d1d031ca50449d4f201031",
"timestamp": "",
"source": "github",
"line_count": 685,
"max_line_length": 120,
"avg_line_length": 46.575182481751824,
"alnum_prop": 0.5652896188565697,
"repo_name": "wathsalav/xos",
"id": "abf0f806c241b20414f7f302105c4e826223d5aa",
"size": "32918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xos/neutron_extension/1:2013.2.2-0ubuntu1~cloud0/ovs_neutron_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "370"
},
{
"name": "CSS",
"bytes": "37088"
},
{
"name": "HTML",
"bytes": "636864"
},
{
"name": "JavaScript",
"bytes": "760492"
},
{
"name": "Makefile",
"bytes": "2717"
},
{
"name": "Python",
"bytes": "1160110"
},
{
"name": "Shell",
"bytes": "10483"
}
],
"symlink_target": ""
}
|
"""Runs all tests available in VisTrails modules by importing all of
them, stealing the classes that look like unit tests, and running
all of them.
runtestsuite.py also reports all VisTrails modules that don't export
any unit tests, as a crude measure of code coverage.
"""
import atexit
from distutils.version import LooseVersion
#import doctest
import locale
import os
import sys
import traceback
from optparse import OptionParser
import platform
import re
import shutil
import tempfile
# This makes sure we use unittest2 everywhere
# If we are running 2.6, since our tests are in the same files as our code,
# VisTrails might choke up because of missing unittest features
try:
import unittest2
except ImportError:
pass
else:
sys.modules['unittest'] = unittest2
import unittest
if 'vistrails' not in sys.modules:
# Makes sure we can import modules as if we were running VisTrails
# from the root directory
_this_dir = os.path.dirname(os.path.realpath(__file__))
_root_directory = os.path.realpath(os.path.join(_this_dir, '..'))
sys.path.insert(0, os.path.realpath(os.path.join(_root_directory, '..')))
# Use a different temporary directory
test_temp_dir = tempfile.mkdtemp(prefix='vt_testsuite_')
tempfile.tempdir = test_temp_dir
@apply
class clean_tempdir(object):
def __init__(self):
atexit.register(self.clean)
self.listdir = os.listdir
self.isdir = os.path.isdir
self.join = os.path.join
self.test_temp_dir = test_temp_dir
self.rmtree = shutil.rmtree
self.out = sys.stdout.write
def clean(self):
nb_dirs = 0
nb_files = 0
for f in self.listdir(self.test_temp_dir):
if self.isdir(self.join(self.test_temp_dir,f)):
nb_dirs += 1
else:
nb_files += 1
if nb_dirs > 0 or nb_files > 0:
self.out("Warning: %d dirs and %d files were left behind in "
"tempdir, cleaning up\n" % (nb_dirs, nb_files))
self.rmtree(self.test_temp_dir, ignore_errors=True)
# Parse the command-line
usage = "Usage: %prog [options] [module1 module2 ...]"
parser = OptionParser(usage=usage)
parser.add_option("-V", "--verbose", action="store", type="int",
default=0, dest="verbose",
help="set verboseness level(0--2, default=0, "
"higher means more verbose)")
parser.add_option("-v", "--vistrails-verbose", action="store", type="int",
default=0, dest="debugLevel",
help="set the debugLevel in VisTrails (0--2, default=0)")
parser.add_option("-e", "--examples", action="store_true",
default=False,
help="run vistrails examples")
parser.add_option("-i", "--images", action="store_true",
default=False,
help="perform image comparisons")
parser.add_option("--installbundles", action='store_true',
default=False,
help=("Attempt to install missing Python packages "
"automatically"))
parser.add_option("-S", "--startup", action="store", type="str", default=None,
dest="dotVistrails",
help="Set startup file (default is temporary directory)")
parser.add_option('-L', '--locale', action='store', type='str', default='',
dest='locale',
help="set locale to this string")
parser.add_option('-D', '--debug', action='store_true',
default=False,
help="start interactive debugger on unexpected error")
parser.add_option('--no-unbuffered', action='store_false', dest='unbuffered',
default=True,
help="Don't make output stream unbuffered")
(options, test_modules) = parser.parse_args()
# remove empty strings
test_modules = filter(len, test_modules)
verbose = options.verbose
locale.setlocale(locale.LC_ALL, options.locale or '')
test_examples = options.examples
test_images = options.images
installbundles = options.installbundles
dotVistrails = options.dotVistrails
debug_mode = options.debug
vistrails_verbose = options.debugLevel
# Makes stdout unbuffered, so python -u is not needed
class Unbuffered(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
if options.unbuffered:
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
# Use PyQt API v2
def setNewPyQtAPI():
try:
import sip
# We now use the new PyQt API - IPython needs it
sip.setapi('QString', 2)
sip.setapi('QVariant', 2)
except Exception:
print "Could not set PyQt API, is PyQt4 installed?"
setNewPyQtAPI()
# Start debugger on test failure
if debug_mode:
from vistrails.tests.utils import DebugTestCaseMetaBase
unittest.TestCase = DebugTestCaseMetaBase
# Log to the console
import vistrails.core.debug
vistrails.core.debug.DebugPrint.getInstance().log_to_console()
# Disable usage reporting
os.environ['VISTRAILS_USAGE_STATS'] = 'off'
from vistrails.core import reportusage
reportusage.setup_usage_report()
import vistrails.tests
import vistrails.core
import vistrails.core.db.io
import vistrails.core.db.locator
from vistrails.core import debug
import vistrails.gui.application
from vistrails.core.system import vistrails_root_directory, \
vistrails_examples_directory
from vistrails.core.packagemanager import get_package_manager
# reinitializing arguments and options so VisTrails does not try parsing them
sys.argv = sys.argv[:1]
vistrails.gui.application.VistrailsApplicationSingleton.use_event_filter = \
False
root_directory = os.path.realpath(vistrails_root_directory())
###############################################################################
# Testing Examples
EXAMPLES_PATH = vistrails_examples_directory()
#dictionary of examples that will be run with the workflows that will be ignored
VT_EXAMPLES = { 'EMBOSS_webservices.vt': ["ProphetOutput"],
'KEGGPathway.vt': [],
'KEGG_SearchEntities_webservice.vt': [],
'KEGG_webservices.vt': [],
'brain_vistrail.vt': [],
'chebi_webservice.vt': [],
'head.vt': [],
'infovis.vt': [],
'noaa_webservices.vt': [],
'offscreen.vt': [],
'plot.vt': [],
'spx.vt': [],
'structure_or_id_webservice.vt': [],
'terminator.vt': ["Isosurface Script"],
'triangle_area.vt': [],
'vtk.vt': [],
'vtk_book_3rd_p189.vt': ["quadric", "SmapleFunction",
"Almost there"],
'vtk_book_3rd_p193.vt': ["modules", "connections",
"lookup table"],
'vtk_http.vt': [],
}
###############################################################################
# Utility
def sub_print(s, overline=False):
"""Prints line with underline (and optionally overline) ASCII dashes."""
if overline:
print "-" * len(s)
print s
print "-" * len(s)
###############################################################################
if len(test_modules) > 0:
test_modules = test_modules
else:
test_modules = None
if os.path.exists(EXAMPLES_PATH):
test_images = True
def module_filter(name):
if test_modules is None:
return True
for mod in test_modules:
if name.startswith(mod):
return True
return False
###############################################################################
# creates the app so that testing can happen
# We need the windows so we can test events, etc.
optionsDict = {
'batch': False,
'executionLog': False,
'singleInstance': False,
'installBundles': installbundles,
'enablePackagesSilently': True,
'handlerDontAsk': True,
'developerDebugger': debug_mode,
'debugLevel': vistrails_verbose,
'dontUnloadModules': True,
'showVistrailsNews': False,
}
if dotVistrails:
optionsDict['dotVistrails'] = dotVistrails
else:
optionsDict['spawned'] = True
v = vistrails.gui.application.start_application(optionsDict)
if v != 0:
app = vistrails.gui.application.get_vistrails_application()
if app:
app.finishSession()
sys.exit(v)
# make sure that fixedCellSize is turned on
spreadsheet_conf = get_package_manager().get_package_configuration("spreadsheet")
spreadsheet_conf.fixedCellSize = True
# disable first vistrail
app = vistrails.gui.application.get_vistrails_application()
app.builderWindow.auto_view = False
app.builderWindow.close_all_vistrails(True)
print "Test Suite for VisTrails"
print "Locale settings: %s" % ', '.join('%s: %s' % (s, locale.setlocale(getattr(locale, s), None)) for s in ('LC_ALL', 'LC_TIME'))
print "Running on %s" % ', '.join(platform.uname())
print "Python is %s" % sys.version
try:
from PyQt4 import QtCore
print "Using PyQt4 %s with Qt %s" % (QtCore.PYQT_VERSION_STR, QtCore.qVersion())
except ImportError:
print "PyQt4 not available"
for pkg in ('numpy', 'scipy', 'matplotlib'):
try:
ipkg = __import__(pkg, globals(), locals(), [], -1)
print "Using %s %s" % (pkg, ipkg.__version__)
except ImportError:
print "%s not available" % pkg
try:
import vtk
print "Using vtk %s" % vtk.vtkVersion().GetVTKVersion()
except ImportError:
print "vtk not available"
print ""
tests_passed = True
main_test_suite = unittest.TestSuite()
test_loader = unittest.TestLoader()
import_skip_regex = re.compile(r'(?i)# *pragma[: ]*no *testimport')
if test_modules:
sub_print("Trying to import some of the modules")
else:
sub_print("Trying to import all modules")
for (p, subdirs, files) in os.walk(root_directory):
# skip subversion subdirectories
if p.find('.svn') != -1 or p.find('.git') != -1 :
continue
for filename in files:
# skip files that don't look like VisTrails python modules
if not filename.endswith('.py'):
continue
module_file = os.path.join(p, filename)
module = os.path.join("vistrails", p[len(root_directory)+1:],
filename[:-3])
if (module.startswith(os.sep) or
('#' in module)):
continue
# use qualified import names with periods instead of
# slashes to avoid duplicates in sys.modules
module = module.replace('/','.')
module = module.replace('\\','.')
if module.endswith('__init__'):
module = module[:-9]
if not module_filter(module):
continue
if module.startswith('vistrails.tests.resources'):
continue
if ('.system.' in module and not
module.endswith('__init__')):
continue
with open(module_file) as fp:
l = fp.readline()
if l.startswith('#!'): # shebang
l = fp.readline()
if import_skip_regex.match(l):
if verbose >= 1:
print >>sys.stderr, ("Skipping %s, not an importable "
"module" % module)
continue
m = None
try:
if '.' in module:
m = __import__(module, globals(), locals(), ['foo'])
else:
m = __import__(module)
except BaseException:
print >>sys.stderr, "ERROR: Could not import module: %s" % module
if verbose >= 1:
traceback.print_exc(file=sys.stderr)
continue
# Load the unittest TestCases
suite = test_loader.loadTestsFromModule(m)
# Load the doctests
#try:
# suite.addTests(doctest.DocTestSuite(m))
#except ValueError:
# pass # No doctest is fine, we check that some tests exist later
# The doctests are currently opt-in; a load_tests method can be
# defined to build a DocTestSuite
# This is because some modules have interpreter-formatted examples that
# are NOT doctests, and because mining the codebase for doctests is
# painfully slow
main_test_suite.addTests(suite)
if suite.countTestCases() == 0 and verbose >= 1:
print >>sys.stderr, "WARNING: module has no tests: %s" % module
elif verbose >= 2:
print >>sys.stderr, "OK: module has %d test cases: %s" % (
suite.countTestCases(),
module)
sub_print("Imported modules. Running %d tests%s..." % (
main_test_suite.countTestCases(),
", and thumbnails comparison" if test_images else ''),
overline=True)
############## TEST VISTRAIL IMAGES ####################
# Compares thumbnails with the generated images to detect broken visualizations
image_tests = [("terminator.vt", [("terminator_isosurface", "Isosurface"),
("terminator_VRSW", "Volume Rendering SW"),
("terminator_CPSW", "Clipping Plane SW"),
("terminator_CRSW", "Combined Rendering SW"),
("terminator_ISSW", "Image Slices SW")])
]
compare_use_vtk = False
try:
import vtk
if LooseVersion(vtk.vtkVersion().GetVTKVersion()) >= LooseVersion('5.8.0'):
compare_use_vtk = True
except ImportError:
pass
if compare_use_vtk:
def compare_thumbnails(prev, next):
#vtkImageDifference assumes RGB, so strip alpha
def removeAlpha(file):
freader = vtk.vtkPNGReader()
freader.SetFileName(file)
removealpha = vtk.vtkImageExtractComponents()
removealpha.SetComponents(0,1,2)
removealpha.SetInputConnection(freader.GetOutputPort())
removealpha.Update()
return removealpha.GetOutput()
#do the image comparison
a = removeAlpha(prev)
b = removeAlpha(next)
idiff = vtk.vtkImageDifference()
if LooseVersion(vtk.vtkVersion().GetVTKVersion()) >= \
LooseVersion('6.0.0'):
idiff.SetInputData(a)
idiff.SetImageData(b)
else:
idiff.SetInput(a)
idiff.SetImage(b)
idiff.Update()
return idiff.GetThresholdedError()
else:
try:
from scipy.misc import imread
except ImportError:
imread = None
if test_images:
print "Warning: old VTK version detected, NOT comparing thumbnails"
if imread is not None:
def compare_thumbnails(prev, next):
prev_img = imread(prev)
next_img = imread(next)
assert len(prev_img.shape) == 3
assert len(next_img.shape) == 3
if prev_img.shape[:2] == next_img.shape[:2]:
return 0
else:
return float('Inf')
else:
def compare_thumbnails(prev, next):
if os.path.isfile(prev) and os.path.isfile(next):
return 0
else:
return float('Inf')
def image_test_generator(vtfile, version):
from vistrails.core.db.locator import FileLocator
from vistrails.core.db.io import load_vistrail
import vistrails.core.console_mode
def test(self):
try:
errs = []
filename = os.path.join(EXAMPLES_PATH, vtfile)
locator = FileLocator(os.path.abspath(filename))
(v, abstractions, thumbnails, mashups) = load_vistrail(locator)
errs = vistrails.core.console_mode.run(
[(locator, version)],
update_vistrail=False,
extra_info={'compare_thumbnails': compare_thumbnails})
if len(errs) > 0:
for err in errs:
print(" *** Error in %s:%s:%s -- %s" % err)
self.fail(str(err))
except Exception, e:
self.fail(debug.format_exception(e))
return test
class TestVistrailImages(unittest.TestCase):
pass
if test_images:
for vt, t in image_tests:
for name, version in t:
test_name = 'test_%s' % name
test = image_test_generator(vt, version)
setattr(TestVistrailImages, test_name, test)
main_test_suite.addTest(TestVistrailImages(test_name))
############## RUN TEST SUITE ####################
class TestResult(unittest.TextTestResult):
def addSkip(self, test, reason):
self.stream.writeln("skipped '{0}': {1}".format(str(test), reason))
super(TestResult, self).addSkip(test, reason)
runner = unittest.TextTestRunner(
verbosity=max(verbose, 1),
resultclass=TestResult)
result = runner.run(main_test_suite)
if not result.wasSuccessful():
tests_passed = False
sub_print("Tests finished.", overline=True)
if test_examples:
import vistrails.core.console_mode
sub_print("Testing examples:")
summary = {}
nworkflows = 0
nvtfiles = 0
for vtfile in VT_EXAMPLES.keys():
try:
errs = []
filename = os.path.join(EXAMPLES_PATH,
vtfile)
print filename
locator = vistrails.core.db.locator.FileLocator(os.path.abspath(filename))
(v, abstractions, thumbnails, mashups) = vistrails.core.db.io.load_vistrail(locator)
w_list = []
for version,tag in v.get_tagMap().iteritems():
if tag not in VT_EXAMPLES[vtfile]:
w_list.append((locator,version))
nworkflows += 1
if len(w_list) > 0:
errs = vistrails.core.console_mode.run(w_list, update_vistrail=False)
summary[vtfile] = errs
except Exception, e:
errs.append((vtfile,"None", "None", debug.format_exception(e)))
summary[vtfile] = errs
nvtfiles += 1
print "-" * 79
print "Summary of Examples: %s workflows in %s vistrail files" % (
nworkflows, nvtfiles)
print ""
errors = False
for vtfile, errs in summary.iteritems():
print vtfile
if len(errs) > 0:
for err in errs:
print(" *** Error in %s:%s:%s -- %s" % err)
errors = True
else:
print " Ok."
print "-" * 79
if errors:
tests_passed = False
sub_print("There were errors. See summary for more information")
else:
sub_print("Examples ran successfully.")
vistrails.gui.application.get_vistrails_application().finishSession()
vistrails.gui.application.stop_application()
# Test Runners can use the return value to know if the tests passed
sys.exit(0 if tests_passed else 1)
|
{
"content_hash": "eb758fb7559c95855a767e155558700d",
"timestamp": "",
"source": "github",
"line_count": 541,
"max_line_length": 130,
"avg_line_length": 35.30314232902033,
"alnum_prop": 0.5875176710822556,
"repo_name": "minesense/VisTrails",
"id": "3e0e673a1929aa026fecb14604f5e49730cb6ec4",
"size": "21059",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vistrails/tests/runtestsuite.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1129"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66613"
},
{
"name": "PHP",
"bytes": "49302"
},
{
"name": "Python",
"bytes": "21260847"
},
{
"name": "R",
"bytes": "782836"
},
{
"name": "Ruby",
"bytes": "875"
},
{
"name": "SQLPL",
"bytes": "2323"
},
{
"name": "Shell",
"bytes": "26542"
},
{
"name": "TeX",
"bytes": "147247"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
import os
import shutil
from ....utils import catkin_failure
from ....utils import catkin_success
from ....utils import in_temporary_directory
from ....utils import redirected_stdio
TEST_DIR = os.path.dirname(__file__)
RESOURCES_DIR = os.path.join(os.path.dirname(__file__), '..', '..', 'resources')
BUILD = ['build', '--no-notify', '--no-status']
TEST = ['test', '--no-notify', '--no-status']
@in_temporary_directory
def test_catkin_success():
"""Test running working unit tests"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'catkin_pkgs', 'python_tests'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['python_tests'])
assert catkin_success(TEST + ['python_tests'])
@in_temporary_directory
def test_catkin_failure():
"""Test running broken unit tests"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'catkin_pkgs', 'python_tests_err'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['python_tests_err'])
assert catkin_failure(TEST + ['python_tests_err'])
@in_temporary_directory
def test_cmake_success():
"""Test vanilla cmake package"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'cmake_pkgs', 'test_pkg'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['test_pkg'])
assert catkin_success(TEST + ['test_pkg'])
@in_temporary_directory
def test_cmake_failure():
"""Test failing vanilla cmake package"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'cmake_pkgs', 'test_err_pkg'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['test_err_pkg'])
assert catkin_failure(TEST + ['test_err_pkg'])
@in_temporary_directory
def test_skip_missing_test():
"""Test to skip packages without tests"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'cmake_pkgs', 'cmake_pkg'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['cmake_pkg'])
assert catkin_success(TEST)
@in_temporary_directory
def test_other_target():
"""Test with a manually specified target"""
cwd = os.getcwd()
source_space = os.path.join(cwd, 'src')
shutil.copytree(os.path.join(RESOURCES_DIR, 'catkin_pkgs', 'python_tests_targets'), source_space)
with redirected_stdio():
assert catkin_success(BUILD + ['python_tests_targets'])
assert catkin_success(TEST + ['--test-target', 'run_tests_python_tests_targets_nosetests_test_good.py'])
assert catkin_failure(TEST + ['--test-target', 'run_tests_python_tests_targets_nosetests_test_bad.py'])
|
{
"content_hash": "86c54d4c068c87e1c4c9ef39bbe04b7a",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 112,
"avg_line_length": 34.94047619047619,
"alnum_prop": 0.6616695059625213,
"repo_name": "rhaschke/catkin_tools",
"id": "ac7b9f9f58197b04c3d30b83c5a715fe625a2133",
"size": "2935",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/system/verbs/catkin_test/test_unit_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "860"
},
{
"name": "CMake",
"bytes": "5822"
},
{
"name": "Objective-C",
"bytes": "3354"
},
{
"name": "Python",
"bytes": "461125"
},
{
"name": "Rich Text Format",
"bytes": "872"
},
{
"name": "Shell",
"bytes": "8476"
}
],
"symlink_target": ""
}
|
import csv
import logging
SCIENCE_CSV = "missions_mission_titles_phases.csv" # Exported from CMS DB
COMBINED_TLES = "COMBINED.txt"
OUTPUT_CSV = "output.csv" # will be gratuitously, silently overwritten
OUTPUT_KEYS = [ # sucks these have to be hand-coded
'title', 'slug', 'tle_name',
'full_name', 'home_page',
'noradid', 'intldesig',
'noradid_url', 'intldesig_url', 'mission_url',
'launch_date', 'phase_id', # are these needed in output?
]
PHASE_NAMES = {'1': 'Under Study',
'2': 'Development',
'3': 'Operating',
'4': 'Past',
'14': 'Studied',
'15': 'Extended'}
PHASES_WITH_TLES = ['3', '15']
logging.basicConfig(level=logging.INFO)
# cache the TLE info
tles = {}
with open(COMBINED_TLES, 'r') as tle_file:
while True:
name = tle_file.readline().strip()
if not name: # end of file
break
tle1 = tle_file.readline().strip()
tle2 = tle_file.readline().strip()
tles[name] = {'noradid' : tle1[2:7].strip(),
'intldesig': tle1[9:17].strip() # YYNNNPPP
}
logging.info('Read %d TLE entries' % len(tles))
# Try to find each sat title/slug in TLEs data.
# Don't know what to do yet for ones we can't find
# Store sats like:
# sat['science_name'] = {'tle_name': 'ACTS 1', 'norad': '02608', 'intldesig': 2}
# CSV exported from CMS has: title, slug, full_name, launch_date, phase_id, home_page
sats = {}
num_with_ids = 0
num_sans_ids = 0
num_with_tles = 0
with open(SCIENCE_CSV) as science_csv:
reader = csv.DictReader(science_csv)
for linenum, row in enumerate(reader):
if row['phase_id'] not in PHASES_WITH_TLES:
logging.info('Skip phase_id=%s (%s) for %s' %
(row['phase_id'], PHASE_NAMES[row['phase_id']], row['slug']))
num_sans_ids += 1
continue
num_with_ids += 1
title = row['title']
slug = row['slug']
upslug = slug.upper()
satName = False
# Try to find a match by a couple attribute heuristics; don't be too sloppy
if title in tles:
satName = title
logging.info('Found by title="%s"' % title)
elif upslug in tles:
satName = upslug
logging.info('Found by upslug="%s"' % upslug)
# If we got one, store the other attrs:
if satName:
num_with_tles += 1
tle = tles[satName]
# TODO: add intldesig field
sats[satName] = {'tle_name' : satName,
'noradid_url' : '/sot/noradid/%s' % tle['noradid'],
'intldesig_url' : '/sot/intldesig/%s' % tle['intldesig'],
'mission_url' : '/missions/%s' % row['slug'],
}
sats[satName].update(tle) # noradid, intldesig
sats[satName].update(row) # title, slug, full_name, launch_date, phase_id, home_page
# TODO: if we did NOT find one, store what we know, leave blank what we don't -- by what key?
else:
logging.warning('Can\'t find title="%s" slug="%s"' %
(title, slug))
# picking 'title' as key arbitrarily, could be 'slug' I suppose
sats[title] = row # title, slug, full_name, launch_date, phase_id, home_page
# TODO: add intldesig field
sats[title].update({'mission_url' : '/missions/%s' % row['slug']})
logging.info('Num with IDs=%d sans=%d' % (num_with_ids, num_sans_ids))
logging.info('Num with TLEs=%d' % num_with_tles)
logging.info('Found %d sats of %d in TLEs' % (len(sats), linenum + 1))
from pprint import pprint as pp
# Save it as a CSV so we can manually update it.
# Really want URLs to be clickable to assist humans; may have to do as HTML rather than CSV.
with open(OUTPUT_CSV, 'w') as output_csv:
writer = csv.DictWriter(output_csv, OUTPUT_KEYS)
writer.writeheader()
# for k, v in sats.items():
# writer.writerow(v)
writer.writerows(sats.values())
|
{
"content_hash": "2843a60c861e62e56a7e20cc64927f56",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 101,
"avg_line_length": 38.53703703703704,
"alnum_prop": 0.5559827006246997,
"repo_name": "koansys/isat",
"id": "710cbb846c9c382bf98fb56531255c6f3e4b0f47",
"size": "5101",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "science-sats/find_science_ids.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8240"
},
{
"name": "C++",
"bytes": "147111"
},
{
"name": "CSS",
"bytes": "43232"
},
{
"name": "FORTRAN",
"bytes": "297501"
},
{
"name": "Java",
"bytes": "196811"
},
{
"name": "JavaScript",
"bytes": "11397933"
},
{
"name": "M",
"bytes": "8483077"
},
{
"name": "Matlab",
"bytes": "1486226"
},
{
"name": "Python",
"bytes": "27953"
},
{
"name": "Shell",
"bytes": "3986"
}
],
"symlink_target": ""
}
|
from setuptools import setup
__version__ = '0.5.0'
setup(name='pql',
version=__version__,
description='A python expression to MongoDB query translator',
author='Alon Horev',
author_email='alon@horev.net',
url='https://github.com/alonho/pql',
classifiers = ["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.5",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X"],
license='BSD',
# I know it's bad practice to not specify a pymongo version, but we only
# require the bson.ObjectId type, It's safe to assume it won't change (famous last words)
install_requires=['pymongo',
'python-dateutil'],
packages=['pql'])
|
{
"content_hash": "d4dee3c04152eea337f1958b296a8d64",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 95,
"avg_line_length": 42.22727272727273,
"alnum_prop": 0.5618945102260495,
"repo_name": "alonho/pql",
"id": "43d78e67c79706253898414786a7b5bf3fb2f703",
"size": "929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "36418"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import logging
from sentry.utils.services import Service
from sentry.tasks.post_process import post_process_group
logger = logging.getLogger(__name__)
class ForwarderNotRequired(NotImplementedError):
"""
Exception raised if this backend does not require a forwarder process to
enqueue post-processing tasks.
"""
class EventStream(Service):
__all__ = (
"insert",
"start_delete_groups",
"end_delete_groups",
"start_merge",
"end_merge",
"start_unmerge",
"end_unmerge",
"start_delete_tag",
"end_delete_tag",
"requires_post_process_forwarder",
"run_post_process_forwarder",
)
def _dispatch_post_process_group_task(
self,
event,
is_new,
is_sample,
is_regression,
is_new_group_environment,
primary_hash,
skip_consume=False,
):
if skip_consume:
logger.info("post_process.skip.raw_event", extra={"event_id": event.id})
else:
post_process_group.delay(
event=event,
is_new=is_new,
is_sample=is_sample,
is_regression=is_regression,
is_new_group_environment=is_new_group_environment,
primary_hash=primary_hash,
)
def insert(
self,
group,
event,
is_new,
is_sample,
is_regression,
is_new_group_environment,
primary_hash,
skip_consume=False,
):
self._dispatch_post_process_group_task(
event,
is_new,
is_sample,
is_regression,
is_new_group_environment,
primary_hash,
skip_consume,
)
def start_delete_groups(self, project_id, group_ids):
pass
def end_delete_groups(self, state):
pass
def start_merge(self, project_id, previous_group_ids, new_group_id):
pass
def end_merge(self, state):
pass
def start_unmerge(self, project_id, hashes, previous_group_id, new_group_id):
pass
def end_unmerge(self, state):
pass
def start_delete_tag(self, project_id, tag):
pass
def end_delete_tag(self, state):
pass
def requires_post_process_forwarder(self):
return False
def run_post_process_forwarder(
self,
consumer_group,
commit_log_topic,
synchronize_commit_group,
commit_batch_size=100,
initial_offset_reset="latest",
):
assert not self.requires_post_process_forwarder()
raise ForwarderNotRequired
|
{
"content_hash": "73fefeedd5065b0b565209fe79333c8b",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 84,
"avg_line_length": 24.053097345132745,
"alnum_prop": 0.565121412803532,
"repo_name": "mvaled/sentry",
"id": "9ab3810e2231bc50904e71d8152f6ce2ca88bfbc",
"size": "2718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/eventstream/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "226439"
},
{
"name": "Dockerfile",
"bytes": "6431"
},
{
"name": "HTML",
"bytes": "173429"
},
{
"name": "JavaScript",
"bytes": "9314175"
},
{
"name": "Lua",
"bytes": "65885"
},
{
"name": "Makefile",
"bytes": "9225"
},
{
"name": "Python",
"bytes": "50385401"
},
{
"name": "Ruby",
"bytes": "168"
},
{
"name": "Shell",
"bytes": "5685"
},
{
"name": "TypeScript",
"bytes": "773664"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
from datetime import datetime
import boto3
from airflow.decorators import task
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator
from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder
DAG_ID = "example_dynamodb_to_s3"
sys_test_context_task = SystemTestContextBuilder().build()
TABLE_ATTRIBUTES = [
{"AttributeName": "ID", "AttributeType": "S"},
{"AttributeName": "Value", "AttributeType": "S"},
]
TABLE_KEY_SCHEMA = [
{"AttributeName": "ID", "KeyType": "HASH"},
{"AttributeName": "Value", "KeyType": "RANGE"},
]
TABLE_THROUGHPUT = {"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}
S3_KEY_PREFIX = "dynamodb-segmented-file"
@task
def set_up_table(table_name: str):
dynamo_resource = boto3.resource("dynamodb")
table = dynamo_resource.create_table(
AttributeDefinitions=TABLE_ATTRIBUTES,
TableName=table_name,
KeySchema=TABLE_KEY_SCHEMA,
ProvisionedThroughput=TABLE_THROUGHPUT,
)
boto3.client("dynamodb").get_waiter("table_exists").wait(
TableName=table_name, WaiterConfig={"Delay": 10, "MaxAttempts": 10}
)
table.put_item(Item={"ID": "123", "Value": "Testing"})
@task
def wait_for_bucket(s3_bucket_name):
waiter = boto3.client("s3").get_waiter("bucket_exists")
waiter.wait(Bucket=s3_bucket_name)
@task(trigger_rule=TriggerRule.ALL_DONE)
def delete_dynamodb_table(table_name: str):
boto3.resource("dynamodb").Table(table_name).delete()
boto3.client("dynamodb").get_waiter("table_not_exists").wait(
TableName=table_name, WaiterConfig={"Delay": 10, "MaxAttempts": 10}
)
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
) as dag:
test_context = sys_test_context_task()
env_id = test_context[ENV_ID_KEY]
table_name = f"{env_id}-dynamodb-table"
bucket_name = f"{env_id}-dynamodb-bucket"
create_table = set_up_table(table_name=table_name)
create_bucket = S3CreateBucketOperator(task_id="create_bucket", bucket_name=bucket_name)
# [START howto_transfer_dynamodb_to_s3]
backup_db = DynamoDBToS3Operator(
task_id="backup_db",
dynamodb_table_name=table_name,
s3_bucket_name=bucket_name,
# Max output file size in bytes. If the Table is too large, multiple files will be created.
file_size=20,
)
# [END howto_transfer_dynamodb_to_s3]
# [START howto_transfer_dynamodb_to_s3_segmented]
# Segmenting allows the transfer to be parallelized into {segment} number of parallel tasks.
backup_db_segment_1 = DynamoDBToS3Operator(
task_id="backup_db_segment_1",
dynamodb_table_name=table_name,
s3_bucket_name=bucket_name,
# Max output file size in bytes. If the Table is too large, multiple files will be created.
file_size=1000,
s3_key_prefix=f"{S3_KEY_PREFIX}-1-",
dynamodb_scan_kwargs={
"TotalSegments": 2,
"Segment": 0,
},
)
backup_db_segment_2 = DynamoDBToS3Operator(
task_id="backup_db_segment_2",
dynamodb_table_name=table_name,
s3_bucket_name=bucket_name,
# Max output file size in bytes. If the Table is too large, multiple files will be created.
file_size=1000,
s3_key_prefix=f"{S3_KEY_PREFIX}-2-",
dynamodb_scan_kwargs={
"TotalSegments": 2,
"Segment": 1,
},
)
# [END howto_transfer_dynamodb_to_s3_segmented]
delete_table = delete_dynamodb_table(table_name=table_name)
delete_bucket = S3DeleteBucketOperator(
task_id="delete_bucket",
bucket_name=bucket_name,
trigger_rule=TriggerRule.ALL_DONE,
force_delete=True,
)
chain(
# TEST SETUP
test_context,
create_table,
create_bucket,
wait_for_bucket(s3_bucket_name=bucket_name),
# TEST BODY
backup_db,
backup_db_segment_1,
backup_db_segment_2,
# TEST TEARDOWN
delete_table,
delete_bucket,
)
from tests.system.utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests.system.utils import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
|
{
"content_hash": "5d8aa17dd18a1c2726c4691f4a5c624b",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 100,
"avg_line_length": 32.9041095890411,
"alnum_prop": 0.664654454621149,
"repo_name": "nathanielvarona/airflow",
"id": "b56efaf2ce1ba28c20ebe31b9e96c2e5327c5125",
"size": "5589",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/system/providers/amazon/aws/example_dynamodb_to_s3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "70681"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "173025"
},
{
"name": "JavaScript",
"bytes": "142848"
},
{
"name": "Jinja",
"bytes": "38895"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "23169682"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "211967"
},
{
"name": "TypeScript",
"bytes": "484556"
}
],
"symlink_target": ""
}
|
from django.db import migrations
import saleor.webhook.models
class Migration(migrations.Migration):
dependencies = [
("webhook", "0005_drop_manage_webhooks_permission"),
]
operations = [
migrations.AlterField(
model_name="webhook",
name="target_url",
field=saleor.webhook.models.WebhookURLField(max_length=255),
),
]
|
{
"content_hash": "d3818fe80ca84508907a2fb2931743c2",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 72,
"avg_line_length": 22.166666666666668,
"alnum_prop": 0.6190476190476191,
"repo_name": "mociepka/saleor",
"id": "195ef0542f2f65fbffe8c6e24c01785fdbc5296e",
"size": "448",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "saleor/webhook/migrations/0006_auto_20200731_1440.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "249248"
},
{
"name": "Procfile",
"bytes": "290"
},
{
"name": "Python",
"bytes": "12686831"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
}
|
"""
Functions for generating sigma algebras on finite sets.
Chetan Jhurani
http://users.ices.utexas.edu/~chetan/Publications.html
http://users.ices.utexas.edu/~chetan/reports/2009-03-ices-set_algebra_algorithms.pdf
"""
from collections import defaultdict
import numpy as np
from dit.utils import powerset
__all__ = ['is_sigma_algebra', 'sigma_algebra', 'atom_set']
def sets2matrix(C, X=None):
"""Returns the sets in C as binary strings representing elements in X.
Paramters
---------
C : set of frozensets
The set of subsets of X.
X : frozenset, None
The underlying set. If None, then X is taken to be the union of the
sets in C.
Returns
-------
Cmatrix : NumPy array, shape ( len(C), len(X) )
The 0-1 matrix whose rows represent sets in C. The columns tell us
if the corresponding element in X is present in the subset of C.
Xset : frozenset
The underlying set that was used to construct Cmatrix.
"""
# make sure C consists of frozensets and that X is frozen
C = set([frozenset(c) for c in C])
if X is None:
Xset = frozenset().union(*C)
else:
Xset = frozenset(X)
for cet in C:
if not Xset.issuperset(cet):
msg = "Set {0} is not a subset of {1}".format(cet, Xset)
raise Exception(msg)
# Each element of C will be represented as a binary string of 0s and 1s.
# Note, X is frozen, so its iterating order is fixed.
Cmatrix = [[1 if x in cet else 0 for x in Xset] for cet in C]
Cmatrix = np.array(Cmatrix, dtype=int)
return Cmatrix, Xset
def unique_columns(Cmatrix):
"""Returns a dictionary mapping columns to identical column indexes.
Parameters
----------
Cmatrix : NumPy array
A 0-1 matrix whose rows represent subsets of an underlying set. The
columns express membership of the underlying set's elements in
each of the subsets.
Returns
-------
unique_cols : defaultdict(set)
A dictionary mapping columns in Cmatrix to sets of column indexes.
All indexes that mapped from the same set represent identical columns.
"""
unique_cols = defaultdict(set)
for idx, col in enumerate(Cmatrix.transpose()):
unique_cols[tuple(col)].add(idx)
return unique_cols
def sigma_algebra(C, X=None):
"""Returns the sigma algebra generated by the subsets in C.
Let X be a set and C be a collection of subsets of X. The sigma algebra
generated by the subsets in C is the smallest sigma-algebra which contains
every subset in C.
Parameters
----------
C : set of frozensets
The set of subsets of X.
X : frozenset, None
The underlying set. If None, then X is taken to be the union of the
sets in C.
Returns
-------
sC : frozenset of frozensets
The sigma-algebra generated by C.
Notes
-----
The algorithm run time is generally exponential in |X|, the size of X.
"""
from itertools import product
Cmatrix, X = sets2matrix(C, X)
unique_cols = unique_columns(Cmatrix)
# Create a lookup from column IDs representing identical columns to the
# index of a unique representative in the list of unique representatives.
# This will be used to repopulate the larger binary string representation.
lookups = {}
for i, indexes in enumerate(unique_cols.values()):
for index in indexes:
lookups[index] = i
# The total number of elements is given by the powerset on all unique
# indexes. That is, we just generate all binary strings. Then, for each
# binary string, we construct the subset in the sigma algebra.
sC = set([])
for word in product([0, 1], repeat=len(unique_cols)):
subset = [x for i, x in enumerate(X) if word[lookups[i]] == 1]
sC.add(frozenset(subset))
sC = frozenset(sC)
return sC
def is_sigma_algebra(F, X=None):
"""Returns True if F is a sigma algebra on X.
Parameters
----------
F : set of frozensets
The candidate sigma algebra.
X : frozenset, None
The universal set. If None, then X is taken to be the union of the
sets in F.
Returns
-------
issa : bool
True if F is a sigma algebra and False if not.
Notes
-----
The time complexity of this algorithm is O ( len(F) * len(X) ).
"""
# The idea is to construct the matrix representing F. Then count the number
# of redundant columns. Denote this number by q. If F is a sigma algebra
# on a finite set X, then we must have:
# m + 2 == 2**(len(X) - q)).
# where m is the number of elements in F not equal to the empty set or
# or the universal set X.
Fmatrix, X = sets2matrix(F, X)
unique_cols = unique_columns(Fmatrix)
m = len(F)
emptyset = set([])
if emptyset in F:
m -= 1
if X in F:
m -= 1
if m + 2 == 2**len(unique_cols):
return True
else:
return False
def is_sigma_algebra__brute(F, X=None):
"""Returns True if F is a sigma algebra on X.
Parameters
----------
F : set of frozensets
The candidate sigma algebra.
X : frozenset, None
The universal set. If None, then X is taken to be the union of the
sets in F.
Returns
-------
issa : bool
True if F is a sigma algebra and False if not.
Notes
-----
This is a brute force check against the definition of a sigma algebra
on a finite set. Its time complexity is O( len(F)**2 ).
"""
# This works because its not necessary to test countable unions if the
# base set X is finite. One need only consider pairwise unions.
if X is None:
X = frozenset().union(*F)
else:
X = frozenset(X)
for subset1 in F:
if X.difference(subset1) not in F:
return False
for subset2 in F:
if subset1.union(subset2) not in F:
return False
else:
return True
def atom_set(F, X=None):
"""
Returns the atoms of the sigma-algebra F.
Parameters
----------
F : set of frozensets
The candidate sigma algebra.
X : frozenset, None
The universal set. If None, then X is taken to be the union of the
sets in F.
Returns
-------
atoms : frozenset
A frozenset of frozensets, representing the atoms of the sigma algebra.
"""
if not isinstance(next(iter(F)), frozenset):
raise Exception('Input to `atom_set` must contain frozensets.')
def method1():
"""
# of ops = len(F) * 2**len(largest element in F)
"""
atoms = []
for cet in F:
if not cet:
# An atom must be nonempty.
continue
# Now look at all nonempty, proper subsets of cet.
#
# If you have a sample space with 64 elements, and then consider
# the trivial sigma algebra, then one element of F will be the
# empty set, while the other will have 64 elements. Taking the
# powerset of this set will require going through a list of 2^64
# elements...in addition to taking forever, we can't even store
# that in memory.
#
subsets = list(powerset(cet))[1:-1] # nonempty and proper
for subset in subsets:
if frozenset(subset) in F:
break
else:
# Then `cet` has no nonempty proper subset that is also in F.
atoms.append(frozenset(cet))
return atoms
def method2():
"""
# of ops = len(F) * len(F)
"""
atoms = []
for cet in F:
if len(cet) == 0:
# An atom must be nonempty.
continue
# We just go through the elements of F. If another nonempty
# element is a strict subset cet, then cet is not an atom.
#
for other_cet in F:
# We need to find an other_cet which is a non-empty proper subset
# of cet. Then, cet cannot be an atom.
L = len(other_cet)
if L == 0 or L == len(cet):
continue
elif other_cet.issubset(cet):
break
else:
atoms.append(frozenset(cet))
return atoms
atoms = method2()
return frozenset(atoms)
|
{
"content_hash": "1b300619dcf31f73c52ce220f430655c",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 88,
"avg_line_length": 29.788194444444443,
"alnum_prop": 0.5880638769087306,
"repo_name": "chebee7i/dit",
"id": "7c98779518699037f8be40609da184c1656f5405",
"size": "8579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dit/math/sigmaalgebra.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5938"
},
{
"name": "HTML",
"bytes": "265"
},
{
"name": "PHP",
"bytes": "614"
},
{
"name": "Python",
"bytes": "714621"
}
],
"symlink_target": ""
}
|
from .engine import Box2DEngine, Vec2
from nytram.engine import NytramEngine, LoopCallback
from ctypes import byref
class World:
""" Represents a World in the Box2D Engine """
def __init__(self, gravity=Vec2(0, 0), velocityIterations=10, positionIterations=10):
""" Initialize the world """
self.id = None
self.gravity = gravity
self.velocityIterations = velocityIterations
self.positionIterations = positionIterations
self.loopCallback = LoopCallback(self.step)
def step(self, msSinceLastFrame):
""" Step the world """
Box2DEngine.World_Step(self.id, msSinceLastFrame/1000, self.velocityIterations, self.positionIterations)
def start(self):
""" Start the world """
self.id = Box2DEngine.World_Add(byref(self.gravity))
NytramEngine.Loop_AddCallback(self.loopCallback)
|
{
"content_hash": "1c808e4005f8393f31b1a9e7db7513b6",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 112,
"avg_line_length": 37.16,
"alnum_prop": 0.6501614639397201,
"repo_name": "cloew/NytramBox2D",
"id": "d0b3f1f4ad82612b003fa0d349701628df0c9b2c",
"size": "929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nytram_box2d/world.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3852"
},
{
"name": "C++",
"bytes": "9051"
},
{
"name": "Python",
"bytes": "24392"
}
],
"symlink_target": ""
}
|
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._phone_numbers_operations import (
build_cancel_operation_request,
build_get_by_number_request,
build_get_operation_request,
build_get_search_result_request,
build_list_phone_numbers_request,
build_purchase_phone_numbers_request,
build_release_phone_number_request,
build_search_available_phone_numbers_request,
build_update_capabilities_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PhoneNumbersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.communication.phonenumbers.aio.PhoneNumbersClient`'s
:attr:`phone_numbers` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _search_available_phone_numbers_initial(
self, country_code: str, body: Union[_models.PhoneNumberSearchRequest, IO], **kwargs: Any
) -> _models.PhoneNumberSearchResult:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PhoneNumberSearchResult]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(body, (IO, bytes)):
_content = body
else:
_json = self._serialize.body(body, "PhoneNumberSearchRequest")
request = build_search_available_phone_numbers_request(
country_code=country_code,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._search_available_phone_numbers_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["search-id"] = self._deserialize("str", response.headers.get("search-id"))
deserialized = self._deserialize("PhoneNumberSearchResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_search_available_phone_numbers_initial.metadata = {"url": "/availablePhoneNumbers/countries/{countryCode}/:search"} # type: ignore
@overload
async def begin_search_available_phone_numbers(
self,
country_code: str,
body: _models.PhoneNumberSearchRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.PhoneNumberSearchResult]:
"""Search for available phone numbers to purchase.
Search for available phone numbers to purchase.
:param country_code: The ISO 3166-2 country code, e.g. US. Required.
:type country_code: str
:param body: The phone number search request. Required.
:type body: ~azure.communication.phonenumbers.models.PhoneNumberSearchRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PhoneNumberSearchResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.communication.phonenumbers.models.PhoneNumberSearchResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_search_available_phone_numbers(
self, country_code: str, body: IO, *, content_type: str = "application/json", **kwargs: Any
) -> AsyncLROPoller[_models.PhoneNumberSearchResult]:
"""Search for available phone numbers to purchase.
Search for available phone numbers to purchase.
:param country_code: The ISO 3166-2 country code, e.g. US. Required.
:type country_code: str
:param body: The phone number search request. Required.
:type body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PhoneNumberSearchResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.communication.phonenumbers.models.PhoneNumberSearchResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_search_available_phone_numbers(
self, country_code: str, body: Union[_models.PhoneNumberSearchRequest, IO], **kwargs: Any
) -> AsyncLROPoller[_models.PhoneNumberSearchResult]:
"""Search for available phone numbers to purchase.
Search for available phone numbers to purchase.
:param country_code: The ISO 3166-2 country code, e.g. US. Required.
:type country_code: str
:param body: The phone number search request. Is either a model type or a IO type. Required.
:type body: ~azure.communication.phonenumbers.models.PhoneNumberSearchRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PhoneNumberSearchResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.communication.phonenumbers.models.PhoneNumberSearchResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.PhoneNumberSearchResult]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._search_available_phone_numbers_initial( # type: ignore
country_code=country_code,
body=body,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["search-id"] = self._deserialize("str", response.headers.get("search-id"))
deserialized = self._deserialize("PhoneNumberSearchResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(
lro_delay,
lro_options={"final-state-via": "location"},
path_format_arguments=path_format_arguments,
**kwargs
),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_search_available_phone_numbers.metadata = {"url": "/availablePhoneNumbers/countries/{countryCode}/:search"} # type: ignore
@distributed_trace_async
async def get_search_result(self, search_id: str, **kwargs: Any) -> _models.PhoneNumberSearchResult:
"""Gets a phone number search result by search id.
Gets a phone number search result by search id.
:param search_id: The search Id. Required.
:type search_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PhoneNumberSearchResult or the result of cls(response)
:rtype: ~azure.communication.phonenumbers.models.PhoneNumberSearchResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PhoneNumberSearchResult]
request = build_get_search_result_request(
search_id=search_id,
api_version=api_version,
template_url=self.get_search_result.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("PhoneNumberSearchResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_search_result.metadata = {"url": "/availablePhoneNumbers/searchResults/{searchId}"} # type: ignore
async def _purchase_phone_numbers_initial( # pylint: disable=inconsistent-return-statements
self, search_id: Optional[str] = None, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
_body = _models.PhoneNumberPurchaseRequest(search_id=search_id)
_json = self._serialize.body(_body, "PhoneNumberPurchaseRequest")
request = build_purchase_phone_numbers_request(
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._purchase_phone_numbers_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["purchase-id"] = self._deserialize("str", response.headers.get("purchase-id"))
if cls:
return cls(pipeline_response, None, response_headers)
_purchase_phone_numbers_initial.metadata = {"url": "/availablePhoneNumbers/:purchase"} # type: ignore
@distributed_trace_async
async def begin_purchase_phone_numbers(
self, search_id: Optional[str] = None, **kwargs: Any
) -> AsyncLROPoller[None]:
"""Purchases phone numbers.
Purchases phone numbers.
:param search_id: The search id. Default value is None.
:type search_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._purchase_phone_numbers_initial( # type: ignore
search_id=search_id,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_purchase_phone_numbers.metadata = {"url": "/availablePhoneNumbers/:purchase"} # type: ignore
@distributed_trace_async
async def get_operation(self, operation_id: str, **kwargs: Any) -> _models.PhoneNumberOperation:
"""Gets an operation by its id.
Gets an operation by its id.
:param operation_id: The id of the operation. Required.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PhoneNumberOperation or the result of cls(response)
:rtype: ~azure.communication.phonenumbers.models.PhoneNumberOperation
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PhoneNumberOperation]
request = build_get_operation_request(
operation_id=operation_id,
api_version=api_version,
template_url=self.get_operation.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
deserialized = self._deserialize("PhoneNumberOperation", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get_operation.metadata = {"url": "/phoneNumbers/operations/{operationId}"} # type: ignore
@distributed_trace_async
async def cancel_operation( # pylint: disable=inconsistent-return-statements
self, operation_id: str, **kwargs: Any
) -> None:
"""Cancels an operation by its id.
Cancels an operation by its id.
:param operation_id: The id of the operation. Required.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_cancel_operation_request(
operation_id=operation_id,
api_version=api_version,
template_url=self.cancel_operation.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
cancel_operation.metadata = {"url": "/phoneNumbers/operations/{operationId}"} # type: ignore
async def _update_capabilities_initial(
self,
phone_number: str,
calling: Optional[Union[str, _models.PhoneNumberCapabilityType]] = None,
sms: Optional[Union[str, _models.PhoneNumberCapabilityType]] = None,
**kwargs: Any
) -> _models.PurchasedPhoneNumber:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop(
"content_type", _headers.pop("Content-Type", "application/merge-patch+json")
) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PurchasedPhoneNumber]
_body = _models.PhoneNumberCapabilitiesRequest(calling=calling, sms=sms)
if _body is not None:
_json = self._serialize.body(_body, "PhoneNumberCapabilitiesRequest")
else:
_json = None
request = build_update_capabilities_request(
phone_number=phone_number,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._update_capabilities_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["capabilities-id"] = self._deserialize("str", response.headers.get("capabilities-id"))
deserialized = self._deserialize("PurchasedPhoneNumber", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_update_capabilities_initial.metadata = {"url": "/phoneNumbers/{phoneNumber}/capabilities"} # type: ignore
@distributed_trace_async
async def begin_update_capabilities(
self,
phone_number: str,
calling: Optional[Union[str, _models.PhoneNumberCapabilityType]] = None,
sms: Optional[Union[str, _models.PhoneNumberCapabilityType]] = None,
**kwargs: Any
) -> AsyncLROPoller[_models.PurchasedPhoneNumber]:
"""Updates the capabilities of a phone number.
Updates the capabilities of a phone number.
:param phone_number: The phone number id in E.164 format. The leading plus can be either + or
encoded as %2B, e.g. +11234567890. Required.
:type phone_number: str
:param calling: Capability value for calling. Known values are: "none", "inbound", "outbound",
and "inbound+outbound". Default value is None.
:type calling: str or ~azure.communication.phonenumbers.models.PhoneNumberCapabilityType
:param sms: Capability value for SMS. Known values are: "none", "inbound", "outbound", and
"inbound+outbound". Default value is None.
:type sms: str or ~azure.communication.phonenumbers.models.PhoneNumberCapabilityType
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PurchasedPhoneNumber or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.communication.phonenumbers.models.PurchasedPhoneNumber]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop(
"content_type", _headers.pop("Content-Type", "application/merge-patch+json")
) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PurchasedPhoneNumber]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_capabilities_initial( # type: ignore
phone_number=phone_number,
calling=calling,
sms=sms,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["capabilities-id"] = self._deserialize("str", response.headers.get("capabilities-id"))
deserialized = self._deserialize("PurchasedPhoneNumber", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(
lro_delay,
lro_options={"final-state-via": "location"},
path_format_arguments=path_format_arguments,
**kwargs
),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_capabilities.metadata = {"url": "/phoneNumbers/{phoneNumber}/capabilities"} # type: ignore
@distributed_trace_async
async def get_by_number(self, phone_number: str, **kwargs: Any) -> _models.PurchasedPhoneNumber:
"""Gets the details of the given purchased phone number.
Gets the details of the given purchased phone number.
:param phone_number: The purchased phone number whose details are to be fetched in E.164
format, e.g. +11234567890. Required.
:type phone_number: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PurchasedPhoneNumber or the result of cls(response)
:rtype: ~azure.communication.phonenumbers.models.PurchasedPhoneNumber
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PurchasedPhoneNumber]
request = build_get_by_number_request(
phone_number=phone_number,
api_version=api_version,
template_url=self.get_by_number.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("PurchasedPhoneNumber", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_number.metadata = {"url": "/phoneNumbers/{phoneNumber}"} # type: ignore
async def _release_phone_number_initial( # pylint: disable=inconsistent-return-statements
self, phone_number: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_release_phone_number_request(
phone_number=phone_number,
api_version=api_version,
template_url=self._release_phone_number_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
response_headers["operation-id"] = self._deserialize("str", response.headers.get("operation-id"))
response_headers["release-id"] = self._deserialize("str", response.headers.get("release-id"))
if cls:
return cls(pipeline_response, None, response_headers)
_release_phone_number_initial.metadata = {"url": "/phoneNumbers/{phoneNumber}"} # type: ignore
@distributed_trace_async
async def begin_release_phone_number(self, phone_number: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Releases a purchased phone number.
Releases a purchased phone number.
:param phone_number: Phone number to be released, e.g. +11234567890. Required.
:type phone_number: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False
for this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._release_phone_number_initial( # type: ignore
phone_number=phone_number,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_release_phone_number.metadata = {"url": "/phoneNumbers/{phoneNumber}"} # type: ignore
@distributed_trace
def list_phone_numbers(
self, skip: int = 0, top: int = 100, **kwargs: Any
) -> AsyncIterable["_models.PurchasedPhoneNumber"]:
"""Gets the list of all purchased phone numbers.
Gets the list of all purchased phone numbers.
:param skip: An optional parameter for how many entries to skip, for pagination purposes. The
default value is 0. Default value is 0.
:type skip: int
:param top: An optional parameter for how many entries to return, for pagination purposes. The
default value is 100. Default value is 100.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PurchasedPhoneNumber or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.communication.phonenumbers.models.PurchasedPhoneNumber]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.PurchasedPhoneNumbers]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_phone_numbers_request(
skip=skip,
top=top,
api_version=api_version,
template_url=self.list_phone_numbers.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
path_format_arguments = {
"endpoint": self._serialize.url(
"self._config.endpoint", self._config.endpoint, "str", skip_quote=True
),
}
request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PurchasedPhoneNumbers", pipeline_response)
list_of_elem = deserialized.phone_numbers
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CommunicationErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_phone_numbers.metadata = {"url": "/phoneNumbers"} # type: ignore
|
{
"content_hash": "dc017c802ba2e6db4e1f493ed8c1e56d",
"timestamp": "",
"source": "github",
"line_count": 1050,
"max_line_length": 136,
"avg_line_length": 47.56761904761905,
"alnum_prop": 0.6361670604252593,
"repo_name": "Azure/azure-sdk-for-python",
"id": "d408f01f5c5364c9aa0f2f020847627f2a79b4c4",
"size": "50446",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/communication/azure-communication-phonenumbers/azure/communication/phonenumbers/_generated/aio/operations/_phone_numbers_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
"""
Tests for HDBSCAN clustering algorithm
Shamelessly based on (i.e. ripped off from) the DBSCAN test code
"""
import numpy as np
from scipy.spatial import distance
from scipy import sparse
from scipy import stats
from sklearn.utils.estimator_checks import check_estimator
from sklearn.utils._testing import (
assert_array_equal,
assert_array_almost_equal,
assert_raises,
)
from hdbscan import (
HDBSCAN,
hdbscan,
validity_index,
approximate_predict,
approximate_predict_scores,
membership_vector,
all_points_membership_vectors,
)
# from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.datasets import make_blobs
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
from scipy.stats import mode
from tempfile import mkdtemp
from functools import wraps
import pytest
from sklearn import datasets
import warnings
n_clusters = 3
# X = generate_clustered_data(n_clusters=n_clusters, n_samples_per_cluster=50)
X, y = make_blobs(n_samples=200, random_state=10)
X, y = shuffle(X, y, random_state=7)
X = StandardScaler().fit_transform(X)
X_missing_data = X.copy()
X_missing_data[0] = [np.nan, 1]
X_missing_data[5] = [np.nan, np.nan]
def test_missing_data():
"""Tests if nan data are treated as infinite distance from all other points and assigned to -1 cluster"""
model = HDBSCAN().fit(X_missing_data)
assert model.labels_[0] == -1
assert model.labels_[5] == -1
assert model.probabilities_[0] == 0
assert model.probabilities_[5] == 0
assert model.probabilities_[5] == 0
clean_indices = list(range(1, 5)) + list(range(6, 200))
clean_model = HDBSCAN().fit(X_missing_data[clean_indices])
assert np.allclose(clean_model.labels_, model.labels_[clean_indices])
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed.
Parameters
----------
func
"""
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use("Agg")
# this fails if no $DISPLAY specified
import matplotlib.pyplot as plt
plt.figure()
except ImportError:
pytest.skip("Matplotlib not available.")
else:
return func(*args, **kwargs)
return run_test
def if_pandas(func):
"""Test decorator that skips test if pandas not installed."""
@wraps(func)
def run_test(*args, **kwargs):
try:
import pandas
except ImportError:
pytest.skip("Pandas not available.")
else:
return func(*args, **kwargs)
return run_test
def if_networkx(func):
"""Test decorator that skips test if networkx not installed."""
@wraps(func)
def run_test(*args, **kwargs):
try:
import networkx
except ImportError:
pytest.skip("NetworkX not available.")
else:
return func(*args, **kwargs)
return run_test
def generate_noisy_data():
blobs, _ = datasets.make_blobs(
n_samples=200, centers=[(-0.75, 2.25), (1.0, 2.0)], cluster_std=0.25
)
moons, _ = datasets.make_moons(n_samples=200, noise=0.05)
noise = np.random.uniform(-1.0, 3.0, (50, 2))
return np.vstack([blobs, moons, noise])
def homogeneity(labels1, labels2):
num_missed = 0.0
for label in set(labels1):
matches = labels2[labels1 == label]
match_mode = mode(matches)[0][0]
num_missed += np.sum(matches != match_mode)
for label in set(labels2):
matches = labels1[labels2 == label]
match_mode = mode(matches)[0][0]
num_missed += np.sum(matches != match_mode)
return num_missed / 2.0
def test_hdbscan_distance_matrix():
D = distance.squareform(distance.pdist(X))
D /= np.max(D)
labels, p, persist, ctree, ltree, mtree = hdbscan(D, metric="precomputed")
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels) # ignore noise
assert n_clusters_1 == n_clusters
labels = HDBSCAN(metric="precomputed").fit(D).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
validity = validity_index(D, labels, metric="precomputed", d=2)
assert validity >= 0.6
def test_hdbscan_sparse_distance_matrix():
D = distance.squareform(distance.pdist(X))
D /= np.max(D)
threshold = stats.scoreatpercentile(D.flatten(), 50)
D[D >= threshold] = 0.0
D = sparse.csr_matrix(D)
D.eliminate_zeros()
labels, p, persist, ctree, ltree, mtree = hdbscan(D, metric="precomputed")
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels) # ignore noise
assert n_clusters_1 == n_clusters
labels = HDBSCAN(metric="precomputed", gen_min_span_tree=True).fit(D).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_hdbscan_feature_vector():
labels, p, persist, ctree, ltree, mtree = hdbscan(X)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN().fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
validity = validity_index(X, labels)
assert validity >= 0.4
def test_hdbscan_prims_kdtree():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, algorithm="prims_kdtree")
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(algorithm="prims_kdtree", gen_min_span_tree=True).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
assert_raises(ValueError, hdbscan, X, algorithm="prims_kdtree", metric="russelrao")
def test_hdbscan_prims_balltree():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, algorithm="prims_balltree")
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(algorithm="prims_balltree", gen_min_span_tree=True).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
assert_raises(ValueError, hdbscan, X, algorithm="prims_balltree", metric="cosine")
def test_hdbscan_boruvka_kdtree():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, algorithm="boruvka_kdtree")
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(algorithm="boruvka_kdtree", gen_min_span_tree=True).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
assert_raises(
ValueError, hdbscan, X, algorithm="boruvka_kdtree", metric="russelrao"
)
def test_hdbscan_boruvka_balltree():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, algorithm="boruvka_balltree")
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = (
HDBSCAN(algorithm="boruvka_balltree", gen_min_span_tree=True).fit(X).labels_
)
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
assert_raises(ValueError, hdbscan, X, algorithm="boruvka_balltree", metric="cosine")
def test_hdbscan_generic():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, algorithm="generic")
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(algorithm="generic", gen_min_span_tree=True).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_hdbscan_dbscan_clustering():
clusterer = HDBSCAN().fit(X)
labels = clusterer.dbscan_clustering(0.3)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert(n_clusters == n_clusters_1)
def test_hdbscan_high_dimensional():
H, y = make_blobs(n_samples=50, random_state=0, n_features=64)
# H, y = shuffle(X, y, random_state=7)
H = StandardScaler().fit_transform(H)
labels, p, persist, ctree, ltree, mtree = hdbscan(H)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = (
HDBSCAN(algorithm="best", metric="seuclidean", V=np.ones(H.shape[1]))
.fit(H)
.labels_
)
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_hdbscan_best_balltree_metric():
labels, p, persist, ctree, ltree, mtree = hdbscan(
X, metric="seuclidean", V=np.ones(X.shape[1])
)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(metric="seuclidean", V=np.ones(X.shape[1])).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_hdbscan_no_clusters():
labels, p, persist, ctree, ltree, mtree = hdbscan(X, min_cluster_size=len(X) + 1)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == 0
labels = HDBSCAN(min_cluster_size=len(X) + 1).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == 0
def test_hdbscan_min_cluster_size():
for min_cluster_size in range(2, len(X) + 1, 1):
labels, p, persist, ctree, ltree, mtree = hdbscan(
X, min_cluster_size=min_cluster_size
)
true_labels = [label for label in labels if label != -1]
if len(true_labels) != 0:
assert np.min(np.bincount(true_labels)) >= min_cluster_size
labels = HDBSCAN(min_cluster_size=min_cluster_size).fit(X).labels_
true_labels = [label for label in labels if label != -1]
if len(true_labels) != 0:
assert np.min(np.bincount(true_labels)) >= min_cluster_size
def test_hdbscan_callable_metric():
# metric is the function reference, not the string key.
metric = distance.euclidean
labels, p, persist, ctree, ltree, mtree = hdbscan(X, metric=metric)
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
labels = HDBSCAN(metric=metric).fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_hdbscan_input_lists():
X = [[1.0, 2.0], [3.0, 4.0]]
HDBSCAN().fit(X) # must not raise exception
def test_hdbscan_boruvka_kdtree_matches():
data = generate_noisy_data()
labels_prims, p, persist, ctree, ltree, mtree = hdbscan(data, algorithm="generic")
labels_boruvka, p, persist, ctree, ltree, mtree = hdbscan(
data, algorithm="boruvka_kdtree"
)
num_mismatches = homogeneity(labels_prims, labels_boruvka)
assert (num_mismatches / float(data.shape[0])) < 0.15
labels_prims = HDBSCAN(algorithm="generic").fit_predict(data)
labels_boruvka = HDBSCAN(algorithm="boruvka_kdtree").fit_predict(data)
num_mismatches = homogeneity(labels_prims, labels_boruvka)
assert (num_mismatches / float(data.shape[0])) < 0.15
def test_hdbscan_boruvka_balltree_matches():
data = generate_noisy_data()
labels_prims, p, persist, ctree, ltree, mtree = hdbscan(data, algorithm="generic")
labels_boruvka, p, persist, ctree, ltree, mtree = hdbscan(
data, algorithm="boruvka_balltree"
)
num_mismatches = homogeneity(labels_prims, labels_boruvka)
assert (num_mismatches / float(data.shape[0])) < 0.15
labels_prims = HDBSCAN(algorithm="generic").fit_predict(data)
labels_boruvka = HDBSCAN(algorithm="boruvka_balltree").fit_predict(data)
num_mismatches = homogeneity(labels_prims, labels_boruvka)
assert (num_mismatches / float(data.shape[0])) < 0.15
def test_condensed_tree_plot():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
if_matplotlib(clusterer.condensed_tree_.plot)(
select_clusters=True,
label_clusters=True,
selection_palette=("r", "g", "b"),
cmap="Reds",
)
if_matplotlib(clusterer.condensed_tree_.plot)(
log_size=True, colorbar=False, cmap="none"
)
def test_single_linkage_tree_plot():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
if_matplotlib(clusterer.single_linkage_tree_.plot)(cmap="Reds")
if_matplotlib(clusterer.single_linkage_tree_.plot)(
vary_line_width=False, truncate_mode="lastp", p=10, cmap="none", colorbar=False
)
def test_min_span_tree_plot():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
if_matplotlib(clusterer.minimum_spanning_tree_.plot)(edge_cmap="Reds")
H, y = make_blobs(n_samples=50, random_state=0, n_features=10)
H = StandardScaler().fit_transform(H)
clusterer = HDBSCAN(gen_min_span_tree=True).fit(H)
if_matplotlib(clusterer.minimum_spanning_tree_.plot)(
edge_cmap="Reds", vary_line_width=False, colorbar=False
)
H, y = make_blobs(n_samples=50, random_state=0, n_features=40)
H = StandardScaler().fit_transform(H)
clusterer = HDBSCAN(gen_min_span_tree=True).fit(H)
if_matplotlib(clusterer.minimum_spanning_tree_.plot)(
edge_cmap="Reds", vary_line_width=False, colorbar=False
)
def test_tree_numpy_output_formats():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
clusterer.single_linkage_tree_.to_numpy()
clusterer.condensed_tree_.to_numpy()
clusterer.minimum_spanning_tree_.to_numpy()
def test_tree_pandas_output_formats():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
if_pandas(clusterer.condensed_tree_.to_pandas)()
if_pandas(clusterer.single_linkage_tree_.to_pandas)()
if_pandas(clusterer.minimum_spanning_tree_.to_pandas)()
def test_tree_networkx_output_formats():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
if_networkx(clusterer.condensed_tree_.to_networkx)()
if_networkx(clusterer.single_linkage_tree_.to_networkx)()
if_networkx(clusterer.minimum_spanning_tree_.to_networkx)()
def test_hdbscan_outliers():
clusterer = HDBSCAN(gen_min_span_tree=True).fit(X)
scores = clusterer.outlier_scores_
assert scores is not None
# def test_hdbscan_unavailable_attributes():
# clusterer = HDBSCAN(gen_min_span_tree=False)
# with warnings.catch_warnings(record=True) as w:
# tree = clusterer.condensed_tree_
# assert len(w) > 0
# assert tree is None
# with warnings.catch_warnings(record=True) as w:
# tree = clusterer.single_linkage_tree_
# assert len(w) > 0
# assert tree is None
# with warnings.catch_warnings(record=True) as w:
# scores = clusterer.outlier_scores_
# assert len(w) > 0
# assert scores is None
# with warnings.catch_warnings(record=True) as w:
# tree = clusterer.minimum_spanning_tree_
# assert len(w) > 0
# assert tree is None
# def test_hdbscan_min_span_tree_availability():
# clusterer = HDBSCAN().fit(X)
# tree = clusterer.minimum_spanning_tree_
# assert tree is None
# D = distance.squareform(distance.pdist(X))
# D /= np.max(D)
# HDBSCAN(metric='precomputed').fit(D)
# tree = clusterer.minimum_spanning_tree_
# assert tree is None
def test_hdbscan_approximate_predict():
clusterer = HDBSCAN(prediction_data=True).fit(X)
cluster, prob = approximate_predict(clusterer, np.array([[-1.5, -1.0]]))
assert cluster == 2
cluster, prob = approximate_predict(clusterer, np.array([[1.5, -1.0]]))
assert cluster == 1
cluster, prob = approximate_predict(clusterer, np.array([[0.0, 0.0]]))
assert cluster == -1
def test_hdbscan_approximate_predict_score():
clusterer = HDBSCAN(min_cluster_size=200).fit(X)
# no prediction data error
assert_raises(ValueError, approximate_predict_scores, clusterer, X)
clusterer.generate_prediction_data()
# wrong dimensions error
assert_raises(
ValueError, approximate_predict_scores, clusterer, np.array([[1, 2, 3]])
)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
approximate_predict_scores(clusterer, np.array([[1.5, -1.0]]))
# no clusters warning
assert "Clusterer does not have any defined clusters" in str(w[-1].message)
clusterer = HDBSCAN(prediction_data=True).fit(X)
scores = approximate_predict_scores(clusterer, X)
assert_array_almost_equal(scores, clusterer.outlier_scores_)
assert scores.min() >= 0
assert scores.max() <= 1
# def test_hdbscan_membership_vector():
# clusterer = HDBSCAN(prediction_data=True).fit(X)
# vector = membership_vector(clusterer, np.array([[-1.5, -1.0]]))
# assert_array_almost_equal(
# vector,
# np.array([[ 0.05705305, 0.05974177, 0.12228153]]))
# vector = membership_vector(clusterer, np.array([[1.5, -1.0]]))
# assert_array_almost_equal(
# vector,
# np.array([[ 0.09462176, 0.32061556, 0.10112905]]))
# vector = membership_vector(clusterer, np.array([[0.0, 0.0]]))
# assert_array_almost_equal(
# vector,
# np.array([[ 0.03545607, 0.03363318, 0.04643177]]))
#
# def test_hdbscan_all_points_membership_vectors():
# clusterer = HDBSCAN(prediction_data=True).fit(X)
# vects = all_points_membership_vectors(clusterer)
# assert_array_almost_equal(vects[0], np.array([7.86400992e-002,
# 2.52734246e-001,
# 8.38299608e-002]))
# assert_array_almost_equal(vects[-1], np.array([8.09055344e-001,
# 8.35882503e-002,
# 1.07356406e-001]))
def test_hdbscan_all_points_membership_vectors():
clusterer = HDBSCAN(prediction_data=True, min_cluster_size=200).fit(X)
vects = all_points_membership_vectors(clusterer)
assert_array_equal(vects, np.zeros(clusterer.prediction_data_.raw_data.shape[0]))
def test_hdbscan_badargs():
assert_raises(ValueError, hdbscan, X="fail")
assert_raises(ValueError, hdbscan, X=None)
assert_raises(ValueError, hdbscan, X, min_cluster_size="fail")
assert_raises(ValueError, hdbscan, X, min_samples="fail")
assert_raises(ValueError, hdbscan, X, min_samples=-1)
assert_raises(ValueError, hdbscan, X, metric="imperial")
assert_raises(ValueError, hdbscan, X, metric=None)
assert_raises(ValueError, hdbscan, X, metric="minkowski", p=-1)
assert_raises(
ValueError, hdbscan, X, metric="minkowski", p=-1, algorithm="prims_kdtree"
)
assert_raises(
ValueError, hdbscan, X, metric="minkowski", p=-1, algorithm="prims_balltree"
)
assert_raises(
ValueError, hdbscan, X, metric="minkowski", p=-1, algorithm="boruvka_balltree"
)
assert_raises(
ValueError, hdbscan, X, metric="precomputed", algorithm="boruvka_kdtree"
)
assert_raises(
ValueError, hdbscan, X, metric="precomputed", algorithm="prims_kdtree"
)
assert_raises(
ValueError, hdbscan, X, metric="precomputed", algorithm="prims_balltree"
)
assert_raises(
ValueError, hdbscan, X, metric="precomputed", algorithm="boruvka_balltree"
)
assert_raises(ValueError, hdbscan, X, alpha=-1)
assert_raises(ValueError, hdbscan, X, alpha="fail")
assert_raises(Exception, hdbscan, X, algorithm="something_else")
assert_raises(TypeError, hdbscan, X, metric="minkowski", p=None)
assert_raises(ValueError, hdbscan, X, leaf_size=0)
def test_hdbscan_sparse():
sparse_X = sparse.csr_matrix(X)
labels = HDBSCAN().fit(sparse_X).labels_
n_clusters = len(set(labels)) - int(-1 in labels)
assert n_clusters == 3
def test_hdbscan_caching():
cachedir = mkdtemp()
labels1 = HDBSCAN(memory=cachedir, min_samples=5).fit(X).labels_
labels2 = HDBSCAN(memory=cachedir, min_samples=5, min_cluster_size=6).fit(X).labels_
n_clusters1 = len(set(labels1)) - int(-1 in labels1)
n_clusters2 = len(set(labels2)) - int(-1 in labels2)
assert n_clusters1 == n_clusters2
def test_hdbscan_centroids_medoids():
centers = [(0.0, 0.0), (3.0, 3.0)]
H, y = make_blobs(n_samples=1000, random_state=0, centers=centers, cluster_std=0.5)
clusterer = HDBSCAN().fit(H)
for idx, center in enumerate(centers):
centroid = clusterer.weighted_cluster_centroid(idx)
assert_array_almost_equal(centroid, center, decimal=1)
medoid = clusterer.weighted_cluster_medoid(idx)
assert_array_almost_equal(medoid, center, decimal=1)
def test_hdbscan_no_centroid_medoid_for_noise():
clusterer = HDBSCAN().fit(X)
assert_raises(ValueError, clusterer.weighted_cluster_centroid, -1)
assert_raises(ValueError, clusterer.weighted_cluster_medoid, -1)
def test_hdbscan_allow_single_cluster_with_epsilon():
np.random.seed(0)
no_structure = np.random.rand(150, 2)
# without epsilon we should see many noise points as children of root.
labels = HDBSCAN(
min_cluster_size=5,
cluster_selection_epsilon=0.0,
cluster_selection_method="eom",
allow_single_cluster=True,
).fit_predict(no_structure)
unique_labels, counts = np.unique(labels, return_counts=True)
assert len(unique_labels) == 2
assert counts[unique_labels == -1] == 46
# for this random seed an epsilon of 0.2 will produce exactly 2 noise
# points at that cut in single linkage.
labels = HDBSCAN(
min_cluster_size=5,
cluster_selection_epsilon=0.2,
cluster_selection_method="eom",
allow_single_cluster=True,
).fit_predict(no_structure)
unique_labels, counts = np.unique(labels, return_counts=True)
assert len(unique_labels) == 2
assert counts[unique_labels == -1] == 2
# Disable for now -- need to refactor to meet newer standards
@pytest.mark.skip(reason="need to refactor to meet newer standards")
def test_hdbscan_is_sklearn_estimator():
check_estimator(HDBSCAN)
# Probably not applicable now #
# def test_dbscan_sparse():
# def test_dbscan_balltree():
# def test_pickle():
# def test_dbscan_core_samples_toy():
# def test_boundaries():
|
{
"content_hash": "e87bec0db55047116801e4f180645106",
"timestamp": "",
"source": "github",
"line_count": 654,
"max_line_length": 109,
"avg_line_length": 34.06880733944954,
"alnum_prop": 0.6540550244603025,
"repo_name": "scikit-learn-contrib/hdbscan",
"id": "9e3faa34da216bffcfaee23c34cd75f933470a67",
"size": "22281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hdbscan/tests/test_hdbscan.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Cython",
"bytes": "155467"
},
{
"name": "Jupyter Notebook",
"bytes": "6845240"
},
{
"name": "Python",
"bytes": "242605"
},
{
"name": "Shell",
"bytes": "1322"
},
{
"name": "TeX",
"bytes": "1790"
}
],
"symlink_target": ""
}
|
import asyncio
from pydatacoll.protocols import BaseDevice
import pydatacoll.utils.logger as my_logger
logger = my_logger.get_logger('FORMULADevice')
class FORMULADevice(BaseDevice):
def __init__(self, device_info: dict, io_loop: asyncio.AbstractEventLoop):
super(FORMULADevice, self).__init__(device_info, io_loop)
def disconnect(self, reconnect=False):
pass
def send_frame(self, frame, check=True):
pass
def prepare_ctrl_frame(self, term_item_dict, value):
pass
def prepare_call_frame(self, term_item_dict):
pass
def fresh_task(self, term_dict, term_item_dict, delete=False):
pass
|
{
"content_hash": "482932856410b0e0e49fff21ba2ff009",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 78,
"avg_line_length": 25.53846153846154,
"alnum_prop": 0.6822289156626506,
"repo_name": "timercrack/pydatacoll",
"id": "ffeaffd4252666c38abc3f71559c362a28ddd9ae",
"size": "1263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydatacoll/protocols/formula/device.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "290495"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.