text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from django.http import HttpResponse
from .models import Match, Player
from .forms import MatchUploadForm
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from sc2reader.events import *
import json
class PlayerDetail(DetailView):
queryset = Player.objects.all()
slug_field = 'username'
slug_url_kwarg = 'username'
def get_queryset(self):
if self.request.user.is_authenticated():
return Player.objects.filter(match__in=Match.share.available(self.request.user))
else:
return Player.objects.filter(match__in=Match.share.public())
class PlayerList(ListView):
queryset = Player.objects.all()
def get_queryset(self):
if self.request.user.is_authenticated():
return Player.objects.filter(match__in=Match.share.available(self.request.user))
else:
return Player.objects.filter(match__in=Match.share.public())
class MatchView(DetailView):
queryset = Match.share.all().select_related('playerresult', 'playerresult__player', 'message')
def get_queryset(self):
if self.request.user.is_authenticated():
return Match.share.available(self.request.user).select_related('playerresult', 'playerresult__player', 'message')
else:
return Match.share.public().select_related('playerresult', 'playerresult__player', 'message')
class MatchList(ListView):
queryset = Match.share.all()
def get_queryset(self):
if self.request.user.is_authenticated():
return Match.share.available(self.request.user)
else:
return Match.share.public()
class MatchUpload(CreateView):
success_url = '/sc2/match/upload/'
form_class = MatchUploadForm
template_name = 'sc2match/upload.html'
queryset = Match.objects.all()
def get_form_kwargs(self):
kw = super(MatchUpload, self).get_form_kwargs()
kw.update({'user': self.request.user})
return kw
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(MatchUpload, self).dispatch(*args, **kwargs)
def match_upload_done(request):
return HttpResponse('{"upload":"complete"}', content_type="application/json")
|
{
"content_hash": "5d7d3a46f29b22f565be6c1c9135a7a8",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 125,
"avg_line_length": 36.11940298507463,
"alnum_prop": 0.6958677685950413,
"repo_name": "wraithan/rplay",
"id": "55009dd32e725df0de6b3c5dee6a7bcbce1dff53",
"size": "2420",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "replayswithfriends/sc2match/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "92690"
},
{
"name": "Python",
"bytes": "151290"
}
],
"symlink_target": ""
}
|
import json
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
import datetime
from participantCollection import ParticipantCollection
# Edit Me!
participantFileNames = ['../stayclean-2014-november/participants.txt',
'../stayclean-2014-december/participants.txt',
'../stayclean-2015-january/participants.txt',
'../stayclean-2015-february/participants.txt',
'../stayclean-2015-march/participants.txt',
'../stayclean-2015-april/participants.txt',
'../stayclean-2015-may/participants.txt',
'../stayclean-2015-june/participants.txt',
'../stayclean-2015-july/participants.txt',
'../stayclean-2015-august/participants.txt',
'../stayclean-2015-september/participants.txt',
'../stayclean-2015-october/participants.txt',
'../stayclean-2015-november/participants.txt',
'../stayclean-2015-december/participants.txt',
'../stayclean-2016-january/participants.txt',
'../stayclean-2016-february/participants.txt',
'../stayclean-2016-march/participants.txt',
'../stayclean-2016-april/participants.txt',
'../stayclean-2016-may/participants.txt',
'../stayclean-2016-june/participants.txt',
'../stayclean-2016-july/participants.txt',
'../stayclean-2016-august/participants.txt',
'../stayclean-2016-september/participants.txt',
'../stayclean-2016-october/participants.txt',
'../stayclean-2016-november/participants.txt',
'../stayclean-2016-december/participants.txt',
'../stayclean-2017-january/participants.txt',
'../stayclean-2017-february/participants.txt',
'../stayclean-2017-march/participants.txt',
'../stayclean-2017-april/participants.txt',
'../stayclean-2017-may/participants.txt',
'../stayclean-2017-june/participants.txt',
'../stayclean-2017-july/participants.txt',
'../stayclean-2017-august/participants.txt',
'../stayclean-2017-september/participants.txt',
'../stayclean-2017-october/participants.txt',
'../stayclean-2017-november/participants.txt',
'../stayclean-2017-december/participants.txt',
'./participants.txt']
sortedRelapseDates = []
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
sortedRelapseDates = sortedRelapseDates + participants.allRelapseDates()
sortedRelapseDates.sort()
earliestReportDate = sortedRelapseDates[0]
latestReportDate = sortedRelapseDates[-1]
reportDates = []
numberOfRelapsesPerDate = []
reportDatesAndNumberOfRelapses = {}
dayOfWeekIndexesAndNumberOfInstances = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
reportDate = earliestReportDate
while reportDate <= latestReportDate:
reportDatesAndNumberOfRelapses[reportDate] = 0
# dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] = dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] + 1
dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] += 1
reportDate += datetime.timedelta(days=1)
for relapseDate in sortedRelapseDates:
# reportDatesAndNumberOfRelapses[relapseDate] = reportDatesAndNumberOfRelapses[relapseDate] + 1
reportDatesAndNumberOfRelapses[relapseDate] += 1
dayOfWeekIndexesAndTotalNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
# print participants.relapseDayOfWeekIndexesAndParticipants()
for index, parts in participants.relapseDayOfWeekIndexesAndParticipants().iteritems():
# dayOfWeekIndexesAndTotalNumberOfRelapses[index] = dayOfWeekIndexesAndTotalNumberOfRelapses[index] + len(parts)
dayOfWeekIndexesAndTotalNumberOfRelapses[index] += len(parts)
dayOfWeekIndexesAndAverageNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for index, instances in dayOfWeekIndexesAndNumberOfInstances.iteritems():
# dayOfWeekIndexesAndAverageNumberOfRelapses[index] = int(round(float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)))
dayOfWeekIndexesAndAverageNumberOfRelapses[index] = float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)
spreadsheetTitle = "StayClean monthly challenge relapse data"
# spreadsheetTitle = "Test spreadsheet"
json_key = json.load(open('../google-oauth-credentials.json'))
scope = ['https://spreadsheets.google.com/feeds']
credentials = SignedJwtAssertionCredentials(json_key['client_email'], json_key['private_key'].encode(), scope)
gc = gspread.authorize(credentials)
spreadSheet = None
try:
spreadSheet = gc.open(spreadsheetTitle)
except gspread.exceptions.SpreadsheetNotFound:
print "No spreadsheet with title " + spreadsheetTitle
exit(1)
workSheet = spreadSheet.get_worksheet(0)
columnACells = workSheet.range("A2:A" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnBCells = workSheet.range("B2:B" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnCCells = workSheet.range("C2:C8")
columnDCells = workSheet.range("D2:D8")
reportDate = earliestReportDate
rowIndex = 0
while reportDate <= latestReportDate:
columnACells[rowIndex].value = str(reportDate)
columnBCells[rowIndex].value = str(reportDatesAndNumberOfRelapses[reportDate])
rowIndex += 1
reportDate += datetime.timedelta(days=1)
for weekdayIndex in range(0, 7):
weekdayName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][weekdayIndex]
# spreadsheetClient.UpdateCell(weekdayIndex + 2,3,weekdayName,spreadsheetId)
# spreadsheetClient.UpdateCell(weekdayIndex + 2,4,str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex]),spreadsheetId)
columnCCells[weekdayIndex].value = weekdayName
columnDCells[weekdayIndex].value = str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex])
allCells = columnACells + columnBCells + columnCCells + columnDCells
workSheet.update_cells(allCells)
exit(0)
|
{
"content_hash": "b935209868cf271490d409e2d6d1adde",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 143,
"avg_line_length": 56.837606837606835,
"alnum_prop": 0.673984962406015,
"repo_name": "foobarbazblarg/stayclean",
"id": "e7b7983943db30335cd401162daf5898f416dc8c",
"size": "6775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stayclean-2018-january/update-google-chart.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4232161"
},
{
"name": "Shell",
"bytes": "52056"
}
],
"symlink_target": ""
}
|
"""
Function to load configuration files for bots.
"""
import runpy
import os.path as op
import yaml
def load_config_file(conf_file_path):
file_name, file_extension = op.splitext(conf_file_path)
try:
if file_extension.lower() in ('.yml', '.yaml'):
cfg = load_yml_config(conf_file_path)
elif file_extension.lower() == '.py':
cfg = load_py_config(conf_file_path)
else:
raise ValueError('Was expecting a configuration file with extension '
'`.yml`, `.yaml` or `.py`, not {}.'.format(file_extension))
except:
raise
else:
return cfg
def load_py_config(py_file_path):
try:
conf = runpy.run_path(py_file_path)
except Exception as exc:
raise IOError('Error running Python config file in {}.'.format(py_file_path)) from exc
else:
return {k: val for k, val in conf.items() if not k.startswith('__')}
def load_yml_config(yml_file_path):
try:
with open(yml_file_path, 'r') as yml_file:
config = yaml.load(yml_file)
except Exception as exc:
raise IOError('Error parsing YAML file in {}.'.format(yml_file_path)) from exc
else:
return config
|
{
"content_hash": "01de00d76857a7817820718fac8ba4be",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 94,
"avg_line_length": 29.523809523809526,
"alnum_prop": 0.5959677419354839,
"repo_name": "PythonSanSebastian/pyper_the_bot",
"id": "9e5bdcdf886197d7d53506ac577946e26d095b2e",
"size": "1240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "helbot/config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1645"
},
{
"name": "Python",
"bytes": "125591"
},
{
"name": "Shell",
"bytes": "1867"
},
{
"name": "TeX",
"bytes": "10667"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import SuspiciousOperation
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import TestCase
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import force_text, smart_str
from django.utils.functional import lazy
lazystr = lazy(force_text, six.text_type)
class QueryDictTests(unittest.TestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(str('')))
def test_missing_key(self):
q = QueryDict()
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict()
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [])
self.assertEqual(list(six.iterlists(q)), [])
self.assertEqual(list(six.iterkeys(q)), [])
self.assertEqual(list(six.itervalues(q)), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict(str('foo=bar'))
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
if six.PY2:
self.assertFalse(q.has_key('bar'))
self.assertNotIn('bar', q)
self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')])
self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])])
self.assertEqual(list(six.iterkeys(q)), ['foo'])
self.assertEqual(list(six.itervalues(q)), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
self.assertListEqual(sorted(list(six.iteritems(q))),
[('foo', 'another'), ('name', 'john')])
self.assertListEqual(sorted(list(six.iterlists(q))),
[('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertListEqual(sorted(list(six.iterkeys(q))),
['foo', 'name'])
self.assertListEqual(sorted(list(six.itervalues(q))),
['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict(str('vote=yes&vote=no'))
self.assertEqual(q['vote'], 'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [('vote', 'no')])
self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])])
self.assertEqual(list(six.iterkeys(q)), ['vote'])
self.assertEqual(list(six.itervalues(q)), ['no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__delitem__, 'vote')
if six.PY2:
def test_invalid_input_encoding(self):
"""
QueryDicts must be able to handle invalid input encoding (in this
case, bad UTF-8 encoding), falling back to ISO-8859-1 decoding.
This test doesn't apply under Python 3 because the URL is a string
and not a bytestring.
"""
q = QueryDict(str(b'foo=bar&foo=\xff'))
self.assertEqual(q['foo'], '\xff')
self.assertEqual(q.getlist('foo'), ['bar', '\xff'])
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d&a=1'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict(str("a=1&a=2"), mutable=True)
y = QueryDict(str("a=3&a=4"))
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict(str('cur=%A4'), encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# The following tests explicitly test types in addition to values
# because in Python 2 u'foo' == b'foo'.
# ASCII unicode or bytes values are converted to native strings.
r['key'] = 'test'
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
self.assertIn(b'test', r.serialize_headers())
# Latin-1 unicode or bytes values are also converted to native strings.
r['key'] = 'café'
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
r['key'] = 'café'.encode('latin-1')
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], str('=?utf-8?b?4oCg?='))
self.assertIsInstance(r['key'], str)
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts unicode or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
self.assertRaises(UnicodeError, r.__setitem__, 'føø', 'bar')
self.assertRaises(UnicodeError, r.__setitem__, 'føø'.encode('utf-8'), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachement; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertEqual(r.get('test'), None)
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode('utf-8')
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertEqual(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:test@example.com',
'file:///etc/passwd',
]
for url in bad_urls:
self.assertRaises(SuspiciousOperation,
HttpResponseRedirect, url)
self.assertRaises(SuspiciousOperation,
HttpResponsePermanentRedirect, url)
class HttpResponseSubclassesTests(TestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Test that standard HttpResponse init args can be used
response = HttpResponseRedirect('/redirected/',
content='The resource has temporarily moved',
content_type='text/html')
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
# Test that url attribute is right
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Test that standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'],
content='Only the GET method is allowed',
content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
class JsonResponseTests(TestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
class StreamingHttpResponseTests(TestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over Unicode strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode('utf-8')
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(
six.binary_type(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(TestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# don't automatically close file when we finish iterating the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
list(r)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""
Test that we don't output tricky characters in encoded value
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""
Test that we can still preserve semi-colons and commas
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_decode_2(self):
"""
Test that we haven't broken normal encoding
"""
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_nonstandard_keys(self):
"""
Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007.
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
def test_httponly_after_load(self):
"""
Test that we can use httponly attribute on cookies that we load
"""
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
@unittest.skipUnless(six.PY2, "PY3 throws an exception on invalid cookie keys.")
def test_bad_cookie(self):
"""
Regression test for #18403
"""
r = HttpResponse()
r.set_cookie("a:.b/", 1)
self.assertEqual(len(r.cookies.bad_cookies), 1)
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
|
{
"content_hash": "ea894c880617dfb98e3a311396de3429",
"timestamp": "",
"source": "github",
"line_count": 701,
"max_line_length": 94,
"avg_line_length": 38.4793152639087,
"alnum_prop": 0.6004671164825387,
"repo_name": "liangazhou/django-rdp",
"id": "c7bf63b4caed9410cbce8c70b7770ad22e8177ce",
"size": "27025",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packages/Django-1.8.6/tests/httpwrappers/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "22310"
},
{
"name": "CSS",
"bytes": "5463444"
},
{
"name": "CoffeeScript",
"bytes": "83631"
},
{
"name": "Groff",
"bytes": "450"
},
{
"name": "HTML",
"bytes": "439341404"
},
{
"name": "JavaScript",
"bytes": "19561573"
},
{
"name": "PHP",
"bytes": "94083"
},
{
"name": "Perl",
"bytes": "9844"
},
{
"name": "Python",
"bytes": "8069"
},
{
"name": "Shell",
"bytes": "11480"
},
{
"name": "XSLT",
"bytes": "224454"
}
],
"symlink_target": ""
}
|
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import masterha_toolkit
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MasterHA Toolkit'
copyright = u'2014, Tyler Mitchell'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = masterha_toolkit.__version__
# The full version, including alpha/beta/rc tags.
release = masterha_toolkit.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'masterha_toolkitdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'masterha_toolkit.tex',
u'MasterHA Toolkit Documentation',
u'Tyler Mitchell', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'masterha_toolkit',
u'MasterHA Toolkit Documentation',
[u'Tyler Mitchell'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'masterha_toolkit',
u'MasterHA Toolkit Documentation',
u'Tyler Mitchell',
'masterha_toolkit',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
{
"content_hash": "8003fcc0a586d2a0cd3428a36ff0465b",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 76,
"avg_line_length": 31.003846153846155,
"alnum_prop": 0.7047512715543978,
"repo_name": "zastari/masterha_toolkit",
"id": "01431338ab0d5c07bc6d2a57f52d5e64fe7ade4d",
"size": "8512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "8337"
},
{
"name": "Python",
"bytes": "20175"
},
{
"name": "Shell",
"bytes": "6479"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import.importer import upload
from frappe.utils.background_jobs import enqueue
class DataImport(Document):
def autoname(self):
if not self.name:
self.name = "Import on "+ format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def post_process(out):
del_keys = ('parent', 'parentfield', 'parenttype', 'modified_by', 'creation', 'owner', 'idx')
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")
|
{
"content_hash": "173a8aa21491266c140192c40426d20a",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 134,
"avg_line_length": 38.283185840707965,
"alnum_prop": 0.7182154415164124,
"repo_name": "chdecultot/frappe",
"id": "02624cdf65e038a21bd186cbafc56e1111119ad8",
"size": "4460",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "frappe/core/doctype/data_import/data_import.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "440872"
},
{
"name": "HTML",
"bytes": "196064"
},
{
"name": "JavaScript",
"bytes": "1884702"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "2207816"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
}
|
import datetime
from typing import Dict, List, Optional, TYPE_CHECKING, Union
from ... import _serialization
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
class AddressSpace(_serialization.Model):
"""AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network.
:ivar address_prefixes: A list of address blocks reserved for this virtual network in CIDR
notation.
:vartype address_prefixes: list[str]
"""
_attribute_map = {
"address_prefixes": {"key": "addressPrefixes", "type": "[str]"},
}
def __init__(self, *, address_prefixes: Optional[List[str]] = None, **kwargs):
"""
:keyword address_prefixes: A list of address blocks reserved for this virtual network in CIDR
notation.
:paramtype address_prefixes: list[str]
"""
super().__init__(**kwargs)
self.address_prefixes = address_prefixes
class Resource(_serialization.Model):
"""Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
"""
super().__init__(**kwargs)
self.id = id
self.name = None
self.type = None
self.location = location
self.tags = tags
class ApplicationGateway(Resource): # pylint: disable=too-many-instance-attributes
"""Application gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar sku: SKU of the application gateway resource.
:vartype sku: ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySku
:ivar ssl_policy: SSL policy of the application gateway resource.
:vartype ssl_policy: ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslPolicy
:ivar operational_state: Operational state of the application gateway resource. Known values
are: "Stopped", "Starting", "Running", and "Stopping".
:vartype operational_state: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayOperationalState
:ivar gateway_ip_configurations: Subnets of application the gateway resource.
:vartype gateway_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayIPConfiguration]
:ivar authentication_certificates: Authentication certificates of the application gateway
resource.
:vartype authentication_certificates:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayAuthenticationCertificate]
:ivar ssl_certificates: SSL certificates of the application gateway resource.
:vartype ssl_certificates:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslCertificate]
:ivar frontend_ip_configurations: Frontend IP addresses of the application gateway resource.
:vartype frontend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFrontendIPConfiguration]
:ivar frontend_ports: Frontend ports of the application gateway resource.
:vartype frontend_ports:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFrontendPort]
:ivar probes: Probes of the application gateway resource.
:vartype probes: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProbe]
:ivar backend_address_pools: Backend address pool of the application gateway resource.
:vartype backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool]
:ivar backend_http_settings_collection: Backend http settings of the application gateway
resource.
:vartype backend_http_settings_collection:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHttpSettings]
:ivar http_listeners: Http listeners of the application gateway resource.
:vartype http_listeners:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayHttpListener]
:ivar url_path_maps: URL path map of the application gateway resource.
:vartype url_path_maps:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayUrlPathMap]
:ivar request_routing_rules: Request routing rules of the application gateway resource.
:vartype request_routing_rules:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayRequestRoutingRule]
:ivar web_application_firewall_configuration: Web application firewall configuration.
:vartype web_application_firewall_configuration:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayWebApplicationFirewallConfiguration
:ivar resource_guid: Resource GUID property of the application gateway resource.
:vartype resource_guid: str
:ivar provisioning_state: Provisioning state of the application gateway resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"operational_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"sku": {"key": "properties.sku", "type": "ApplicationGatewaySku"},
"ssl_policy": {"key": "properties.sslPolicy", "type": "ApplicationGatewaySslPolicy"},
"operational_state": {"key": "properties.operationalState", "type": "str"},
"gateway_ip_configurations": {
"key": "properties.gatewayIPConfigurations",
"type": "[ApplicationGatewayIPConfiguration]",
},
"authentication_certificates": {
"key": "properties.authenticationCertificates",
"type": "[ApplicationGatewayAuthenticationCertificate]",
},
"ssl_certificates": {"key": "properties.sslCertificates", "type": "[ApplicationGatewaySslCertificate]"},
"frontend_ip_configurations": {
"key": "properties.frontendIPConfigurations",
"type": "[ApplicationGatewayFrontendIPConfiguration]",
},
"frontend_ports": {"key": "properties.frontendPorts", "type": "[ApplicationGatewayFrontendPort]"},
"probes": {"key": "properties.probes", "type": "[ApplicationGatewayProbe]"},
"backend_address_pools": {
"key": "properties.backendAddressPools",
"type": "[ApplicationGatewayBackendAddressPool]",
},
"backend_http_settings_collection": {
"key": "properties.backendHttpSettingsCollection",
"type": "[ApplicationGatewayBackendHttpSettings]",
},
"http_listeners": {"key": "properties.httpListeners", "type": "[ApplicationGatewayHttpListener]"},
"url_path_maps": {"key": "properties.urlPathMaps", "type": "[ApplicationGatewayUrlPathMap]"},
"request_routing_rules": {
"key": "properties.requestRoutingRules",
"type": "[ApplicationGatewayRequestRoutingRule]",
},
"web_application_firewall_configuration": {
"key": "properties.webApplicationFirewallConfiguration",
"type": "ApplicationGatewayWebApplicationFirewallConfiguration",
},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
sku: Optional["_models.ApplicationGatewaySku"] = None,
ssl_policy: Optional["_models.ApplicationGatewaySslPolicy"] = None,
gateway_ip_configurations: Optional[List["_models.ApplicationGatewayIPConfiguration"]] = None,
authentication_certificates: Optional[List["_models.ApplicationGatewayAuthenticationCertificate"]] = None,
ssl_certificates: Optional[List["_models.ApplicationGatewaySslCertificate"]] = None,
frontend_ip_configurations: Optional[List["_models.ApplicationGatewayFrontendIPConfiguration"]] = None,
frontend_ports: Optional[List["_models.ApplicationGatewayFrontendPort"]] = None,
probes: Optional[List["_models.ApplicationGatewayProbe"]] = None,
backend_address_pools: Optional[List["_models.ApplicationGatewayBackendAddressPool"]] = None,
backend_http_settings_collection: Optional[List["_models.ApplicationGatewayBackendHttpSettings"]] = None,
http_listeners: Optional[List["_models.ApplicationGatewayHttpListener"]] = None,
url_path_maps: Optional[List["_models.ApplicationGatewayUrlPathMap"]] = None,
request_routing_rules: Optional[List["_models.ApplicationGatewayRequestRoutingRule"]] = None,
web_application_firewall_configuration: Optional[
"_models.ApplicationGatewayWebApplicationFirewallConfiguration"
] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword sku: SKU of the application gateway resource.
:paramtype sku: ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySku
:keyword ssl_policy: SSL policy of the application gateway resource.
:paramtype ssl_policy: ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslPolicy
:keyword gateway_ip_configurations: Subnets of application the gateway resource.
:paramtype gateway_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayIPConfiguration]
:keyword authentication_certificates: Authentication certificates of the application gateway
resource.
:paramtype authentication_certificates:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayAuthenticationCertificate]
:keyword ssl_certificates: SSL certificates of the application gateway resource.
:paramtype ssl_certificates:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslCertificate]
:keyword frontend_ip_configurations: Frontend IP addresses of the application gateway resource.
:paramtype frontend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFrontendIPConfiguration]
:keyword frontend_ports: Frontend ports of the application gateway resource.
:paramtype frontend_ports:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFrontendPort]
:keyword probes: Probes of the application gateway resource.
:paramtype probes: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProbe]
:keyword backend_address_pools: Backend address pool of the application gateway resource.
:paramtype backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool]
:keyword backend_http_settings_collection: Backend http settings of the application gateway
resource.
:paramtype backend_http_settings_collection:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHttpSettings]
:keyword http_listeners: Http listeners of the application gateway resource.
:paramtype http_listeners:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayHttpListener]
:keyword url_path_maps: URL path map of the application gateway resource.
:paramtype url_path_maps:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayUrlPathMap]
:keyword request_routing_rules: Request routing rules of the application gateway resource.
:paramtype request_routing_rules:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayRequestRoutingRule]
:keyword web_application_firewall_configuration: Web application firewall configuration.
:paramtype web_application_firewall_configuration:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayWebApplicationFirewallConfiguration
:keyword resource_guid: Resource GUID property of the application gateway resource.
:paramtype resource_guid: str
:keyword provisioning_state: Provisioning state of the application gateway resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.sku = sku
self.ssl_policy = ssl_policy
self.operational_state = None
self.gateway_ip_configurations = gateway_ip_configurations
self.authentication_certificates = authentication_certificates
self.ssl_certificates = ssl_certificates
self.frontend_ip_configurations = frontend_ip_configurations
self.frontend_ports = frontend_ports
self.probes = probes
self.backend_address_pools = backend_address_pools
self.backend_http_settings_collection = backend_http_settings_collection
self.http_listeners = http_listeners
self.url_path_maps = url_path_maps
self.request_routing_rules = request_routing_rules
self.web_application_firewall_configuration = web_application_firewall_configuration
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class SubResource(_serialization.Model):
"""SubResource.
:ivar id: Resource ID.
:vartype id: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
}
def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin
"""
:keyword id: Resource ID.
:paramtype id: str
"""
super().__init__(**kwargs)
self.id = id
class ApplicationGatewayAuthenticationCertificate(SubResource):
"""Authentication certificates of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar data: Certificate public data.
:vartype data: str
:ivar provisioning_state: Provisioning state of the authentication certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"data": {"key": "properties.data", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
data: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword data: Certificate public data.
:paramtype data: str
:keyword provisioning_state: Provisioning state of the authentication certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.data = data
self.provisioning_state = provisioning_state
class ApplicationGatewayAvailableWafRuleSetsResult(_serialization.Model):
"""Response for ApplicationGatewayAvailableWafRuleSets API service call.
:ivar value: The list of application gateway rule sets.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRuleSet]
"""
_attribute_map = {
"value": {"key": "value", "type": "[ApplicationGatewayFirewallRuleSet]"},
}
def __init__(self, *, value: Optional[List["_models.ApplicationGatewayFirewallRuleSet"]] = None, **kwargs):
"""
:keyword value: The list of application gateway rule sets.
:paramtype value:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRuleSet]
"""
super().__init__(**kwargs)
self.value = value
class ApplicationGatewayBackendAddress(_serialization.Model):
"""Backend address of an application gateway.
:ivar fqdn: Fully qualified domain name (FQDN).
:vartype fqdn: str
:ivar ip_address: IP address.
:vartype ip_address: str
"""
_attribute_map = {
"fqdn": {"key": "fqdn", "type": "str"},
"ip_address": {"key": "ipAddress", "type": "str"},
}
def __init__(self, *, fqdn: Optional[str] = None, ip_address: Optional[str] = None, **kwargs):
"""
:keyword fqdn: Fully qualified domain name (FQDN).
:paramtype fqdn: str
:keyword ip_address: IP address.
:paramtype ip_address: str
"""
super().__init__(**kwargs)
self.fqdn = fqdn
self.ip_address = ip_address
class ApplicationGatewayBackendAddressPool(SubResource):
"""Backend Address Pool of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource that is unique within a resource group. This name can be used to access
the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar backend_ip_configurations: Collection of references to IPs defined in network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration]
:ivar backend_addresses: Backend addresses.
:vartype backend_addresses:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddress]
:ivar provisioning_state: Provisioning state of the backend address pool resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"backend_ip_configurations": {
"key": "properties.backendIPConfigurations",
"type": "[NetworkInterfaceIPConfiguration]",
},
"backend_addresses": {"key": "properties.backendAddresses", "type": "[ApplicationGatewayBackendAddress]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
backend_ip_configurations: Optional[List["_models.NetworkInterfaceIPConfiguration"]] = None,
backend_addresses: Optional[List["_models.ApplicationGatewayBackendAddress"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Resource that is unique within a resource group. This name can be used to access
the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword backend_ip_configurations: Collection of references to IPs defined in network
interfaces.
:paramtype backend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration]
:keyword backend_addresses: Backend addresses.
:paramtype backend_addresses:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddress]
:keyword provisioning_state: Provisioning state of the backend address pool resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.backend_ip_configurations = backend_ip_configurations
self.backend_addresses = backend_addresses
self.provisioning_state = provisioning_state
class ApplicationGatewayBackendHealth(_serialization.Model):
"""List of ApplicationGatewayBackendHealthPool resources.
:ivar backend_address_pools:
:vartype backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthPool]
"""
_attribute_map = {
"backend_address_pools": {"key": "backendAddressPools", "type": "[ApplicationGatewayBackendHealthPool]"},
}
def __init__(
self, *, backend_address_pools: Optional[List["_models.ApplicationGatewayBackendHealthPool"]] = None, **kwargs
):
"""
:keyword backend_address_pools:
:paramtype backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthPool]
"""
super().__init__(**kwargs)
self.backend_address_pools = backend_address_pools
class ApplicationGatewayBackendHealthHttpSettings(_serialization.Model):
"""Application gateway BackendHealthHttp settings.
:ivar backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource.
:vartype backend_http_settings:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHttpSettings
:ivar servers: List of ApplicationGatewayBackendHealthServer resources.
:vartype servers:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthServer]
"""
_attribute_map = {
"backend_http_settings": {"key": "backendHttpSettings", "type": "ApplicationGatewayBackendHttpSettings"},
"servers": {"key": "servers", "type": "[ApplicationGatewayBackendHealthServer]"},
}
def __init__(
self,
*,
backend_http_settings: Optional["_models.ApplicationGatewayBackendHttpSettings"] = None,
servers: Optional[List["_models.ApplicationGatewayBackendHealthServer"]] = None,
**kwargs
):
"""
:keyword backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource.
:paramtype backend_http_settings:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHttpSettings
:keyword servers: List of ApplicationGatewayBackendHealthServer resources.
:paramtype servers:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthServer]
"""
super().__init__(**kwargs)
self.backend_http_settings = backend_http_settings
self.servers = servers
class ApplicationGatewayBackendHealthPool(_serialization.Model):
"""Application gateway BackendHealth pool.
:ivar backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:vartype backend_address_pool:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool
:ivar backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings
resources.
:vartype backend_http_settings_collection:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthHttpSettings]
"""
_attribute_map = {
"backend_address_pool": {"key": "backendAddressPool", "type": "ApplicationGatewayBackendAddressPool"},
"backend_http_settings_collection": {
"key": "backendHttpSettingsCollection",
"type": "[ApplicationGatewayBackendHealthHttpSettings]",
},
}
def __init__(
self,
*,
backend_address_pool: Optional["_models.ApplicationGatewayBackendAddressPool"] = None,
backend_http_settings_collection: Optional[List["_models.ApplicationGatewayBackendHealthHttpSettings"]] = None,
**kwargs
):
"""
:keyword backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:paramtype backend_address_pool:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool
:keyword backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings
resources.
:paramtype backend_http_settings_collection:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthHttpSettings]
"""
super().__init__(**kwargs)
self.backend_address_pool = backend_address_pool
self.backend_http_settings_collection = backend_http_settings_collection
class ApplicationGatewayBackendHealthServer(_serialization.Model):
"""Application gateway backendhealth http settings.
:ivar address: IP address or FQDN of backend server.
:vartype address: str
:ivar ip_configuration: Reference of IP configuration of backend server.
:vartype ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar health: Health of backend server. Known values are: "Unknown", "Up", "Down", "Partial",
and "Draining".
:vartype health: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthServerHealth
"""
_attribute_map = {
"address": {"key": "address", "type": "str"},
"ip_configuration": {"key": "ipConfiguration", "type": "SubResource"},
"health": {"key": "health", "type": "str"},
}
def __init__(
self,
*,
address: Optional[str] = None,
ip_configuration: Optional["_models.SubResource"] = None,
health: Optional[Union[str, "_models.ApplicationGatewayBackendHealthServerHealth"]] = None,
**kwargs
):
"""
:keyword address: IP address or FQDN of backend server.
:paramtype address: str
:keyword ip_configuration: Reference of IP configuration of backend server.
:paramtype ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword health: Health of backend server. Known values are: "Unknown", "Up", "Down",
"Partial", and "Draining".
:paramtype health: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendHealthServerHealth
"""
super().__init__(**kwargs)
self.address = address
self.ip_configuration = ip_configuration
self.health = health
class ApplicationGatewayBackendHttpSettings(SubResource): # pylint: disable=too-many-instance-attributes
"""Backend address pool settings of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar port: Port.
:vartype port: int
:ivar protocol: Protocol. Known values are: "Http" and "Https".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:ivar cookie_based_affinity: Cookie based affinity. Known values are: "Enabled" and "Disabled".
:vartype cookie_based_affinity: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayCookieBasedAffinity
:ivar request_timeout: Request timeout in seconds. Application Gateway will fail the request if
response is not received within RequestTimeout. Acceptable values are from 1 second to 86400
seconds.
:vartype request_timeout: int
:ivar probe: Probe resource of an application gateway.
:vartype probe: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar authentication_certificates: Array of references to application gateway authentication
certificates.
:vartype authentication_certificates: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar connection_draining: Connection draining of the backend http settings resource.
:vartype connection_draining:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayConnectionDraining
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"port": {"key": "properties.port", "type": "int"},
"protocol": {"key": "properties.protocol", "type": "str"},
"cookie_based_affinity": {"key": "properties.cookieBasedAffinity", "type": "str"},
"request_timeout": {"key": "properties.requestTimeout", "type": "int"},
"probe": {"key": "properties.probe", "type": "SubResource"},
"authentication_certificates": {"key": "properties.authenticationCertificates", "type": "[SubResource]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"connection_draining": {"key": "properties.connectionDraining", "type": "ApplicationGatewayConnectionDraining"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
port: Optional[int] = None,
protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None,
cookie_based_affinity: Optional[Union[str, "_models.ApplicationGatewayCookieBasedAffinity"]] = None,
request_timeout: Optional[int] = None,
probe: Optional["_models.SubResource"] = None,
authentication_certificates: Optional[List["_models.SubResource"]] = None,
provisioning_state: Optional[str] = None,
connection_draining: Optional["_models.ApplicationGatewayConnectionDraining"] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword port: Port.
:paramtype port: int
:keyword protocol: Protocol. Known values are: "Http" and "Https".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:keyword cookie_based_affinity: Cookie based affinity. Known values are: "Enabled" and
"Disabled".
:paramtype cookie_based_affinity: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayCookieBasedAffinity
:keyword request_timeout: Request timeout in seconds. Application Gateway will fail the request
if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400
seconds.
:paramtype request_timeout: int
:keyword probe: Probe resource of an application gateway.
:paramtype probe: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword authentication_certificates: Array of references to application gateway authentication
certificates.
:paramtype authentication_certificates:
list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:keyword provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
:keyword connection_draining: Connection draining of the backend http settings resource.
:paramtype connection_draining:
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayConnectionDraining
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.port = port
self.protocol = protocol
self.cookie_based_affinity = cookie_based_affinity
self.request_timeout = request_timeout
self.probe = probe
self.authentication_certificates = authentication_certificates
self.provisioning_state = provisioning_state
self.connection_draining = connection_draining
class ApplicationGatewayConnectionDraining(_serialization.Model):
"""Connection draining allows open connections to a backend server to be active for a specified time after the backend server got removed from the configuration.
All required parameters must be populated in order to send to Azure.
:ivar enabled: Whether connection draining is enabled or not. Required.
:vartype enabled: bool
:ivar drain_timeout_in_sec: The number of seconds connection draining is active. Acceptable
values are from 1 second to 3600 seconds. Required.
:vartype drain_timeout_in_sec: int
"""
_validation = {
"enabled": {"required": True},
"drain_timeout_in_sec": {"required": True, "maximum": 3600, "minimum": 1},
}
_attribute_map = {
"enabled": {"key": "enabled", "type": "bool"},
"drain_timeout_in_sec": {"key": "drainTimeoutInSec", "type": "int"},
}
def __init__(self, *, enabled: bool, drain_timeout_in_sec: int, **kwargs):
"""
:keyword enabled: Whether connection draining is enabled or not. Required.
:paramtype enabled: bool
:keyword drain_timeout_in_sec: The number of seconds connection draining is active. Acceptable
values are from 1 second to 3600 seconds. Required.
:paramtype drain_timeout_in_sec: int
"""
super().__init__(**kwargs)
self.enabled = enabled
self.drain_timeout_in_sec = drain_timeout_in_sec
class ApplicationGatewayFirewallDisabledRuleGroup(_serialization.Model):
"""Allows to disable rules within a rule group or an entire rule group.
All required parameters must be populated in order to send to Azure.
:ivar rule_group_name: The name of the rule group that will be disabled. Required.
:vartype rule_group_name: str
:ivar rules: The list of rules that will be disabled. If null, all rules of the rule group will
be disabled.
:vartype rules: list[int]
"""
_validation = {
"rule_group_name": {"required": True},
}
_attribute_map = {
"rule_group_name": {"key": "ruleGroupName", "type": "str"},
"rules": {"key": "rules", "type": "[int]"},
}
def __init__(self, *, rule_group_name: str, rules: Optional[List[int]] = None, **kwargs):
"""
:keyword rule_group_name: The name of the rule group that will be disabled. Required.
:paramtype rule_group_name: str
:keyword rules: The list of rules that will be disabled. If null, all rules of the rule group
will be disabled.
:paramtype rules: list[int]
"""
super().__init__(**kwargs)
self.rule_group_name = rule_group_name
self.rules = rules
class ApplicationGatewayFirewallRule(_serialization.Model):
"""A web application firewall rule.
All required parameters must be populated in order to send to Azure.
:ivar rule_id: The identifier of the web application firewall rule. Required.
:vartype rule_id: int
:ivar description: The description of the web application firewall rule.
:vartype description: str
"""
_validation = {
"rule_id": {"required": True},
}
_attribute_map = {
"rule_id": {"key": "ruleId", "type": "int"},
"description": {"key": "description", "type": "str"},
}
def __init__(self, *, rule_id: int, description: Optional[str] = None, **kwargs):
"""
:keyword rule_id: The identifier of the web application firewall rule. Required.
:paramtype rule_id: int
:keyword description: The description of the web application firewall rule.
:paramtype description: str
"""
super().__init__(**kwargs)
self.rule_id = rule_id
self.description = description
class ApplicationGatewayFirewallRuleGroup(_serialization.Model):
"""A web application firewall rule group.
All required parameters must be populated in order to send to Azure.
:ivar rule_group_name: The name of the web application firewall rule group. Required.
:vartype rule_group_name: str
:ivar description: The description of the web application firewall rule group.
:vartype description: str
:ivar rules: The rules of the web application firewall rule group. Required.
:vartype rules: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRule]
"""
_validation = {
"rule_group_name": {"required": True},
"rules": {"required": True},
}
_attribute_map = {
"rule_group_name": {"key": "ruleGroupName", "type": "str"},
"description": {"key": "description", "type": "str"},
"rules": {"key": "rules", "type": "[ApplicationGatewayFirewallRule]"},
}
def __init__(
self,
*,
rule_group_name: str,
rules: List["_models.ApplicationGatewayFirewallRule"],
description: Optional[str] = None,
**kwargs
):
"""
:keyword rule_group_name: The name of the web application firewall rule group. Required.
:paramtype rule_group_name: str
:keyword description: The description of the web application firewall rule group.
:paramtype description: str
:keyword rules: The rules of the web application firewall rule group. Required.
:paramtype rules: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRule]
"""
super().__init__(**kwargs)
self.rule_group_name = rule_group_name
self.description = description
self.rules = rules
class ApplicationGatewayFirewallRuleSet(Resource):
"""A web application firewall rule set.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar provisioning_state: The provisioning state of the web application firewall rule set.
:vartype provisioning_state: str
:ivar rule_set_type: The type of the web application firewall rule set.
:vartype rule_set_type: str
:ivar rule_set_version: The version of the web application firewall rule set type.
:vartype rule_set_version: str
:ivar rule_groups: The rule groups of the web application firewall rule set.
:vartype rule_groups:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRuleGroup]
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"rule_set_type": {"key": "properties.ruleSetType", "type": "str"},
"rule_set_version": {"key": "properties.ruleSetVersion", "type": "str"},
"rule_groups": {"key": "properties.ruleGroups", "type": "[ApplicationGatewayFirewallRuleGroup]"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
provisioning_state: Optional[str] = None,
rule_set_type: Optional[str] = None,
rule_set_version: Optional[str] = None,
rule_groups: Optional[List["_models.ApplicationGatewayFirewallRuleGroup"]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword provisioning_state: The provisioning state of the web application firewall rule set.
:paramtype provisioning_state: str
:keyword rule_set_type: The type of the web application firewall rule set.
:paramtype rule_set_type: str
:keyword rule_set_version: The version of the web application firewall rule set type.
:paramtype rule_set_version: str
:keyword rule_groups: The rule groups of the web application firewall rule set.
:paramtype rule_groups:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallRuleGroup]
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.provisioning_state = provisioning_state
self.rule_set_type = rule_set_type
self.rule_set_version = rule_set_version
self.rule_groups = rule_groups
class ApplicationGatewayFrontendIPConfiguration(SubResource):
"""Frontend IP configuration of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_ip_address: PrivateIPAddress of the network interface IP Configuration.
:vartype private_ip_address: str
:ivar private_ip_allocation_method: PrivateIP allocation method. Known values are: "Static" and
"Dynamic".
:vartype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar subnet: Reference of the subnet resource.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar public_ip_address: Reference of the PublicIP resource.
:vartype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Provisioning state of the public IP resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"private_ip_address": {"key": "properties.privateIPAddress", "type": "str"},
"private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "SubResource"},
"public_ip_address": {"key": "properties.publicIPAddress", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
subnet: Optional["_models.SubResource"] = None,
public_ip_address: Optional["_models.SubResource"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword private_ip_address: PrivateIPAddress of the network interface IP Configuration.
:paramtype private_ip_address: str
:keyword private_ip_allocation_method: PrivateIP allocation method. Known values are: "Static"
and "Dynamic".
:paramtype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword subnet: Reference of the subnet resource.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword public_ip_address: Reference of the PublicIP resource.
:paramtype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword provisioning_state: Provisioning state of the public IP resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
class ApplicationGatewayFrontendPort(SubResource):
"""Frontend port of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar port: Frontend port.
:vartype port: int
:ivar provisioning_state: Provisioning state of the frontend port resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"port": {"key": "properties.port", "type": "int"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
port: Optional[int] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword port: Frontend port.
:paramtype port: int
:keyword provisioning_state: Provisioning state of the frontend port resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.port = port
self.provisioning_state = provisioning_state
class ApplicationGatewayHttpListener(SubResource):
"""Http listener of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar frontend_ip_configuration: Frontend IP configuration resource of an application gateway.
:vartype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar frontend_port: Frontend port resource of an application gateway.
:vartype frontend_port: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar protocol: Protocol. Known values are: "Http" and "Https".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:ivar host_name: Host name of HTTP listener.
:vartype host_name: str
:ivar ssl_certificate: SSL certificate resource of an application gateway.
:vartype ssl_certificate: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar require_server_name_indication: Applicable only if protocol is https. Enables SNI for
multi-hosting.
:vartype require_server_name_indication: bool
:ivar provisioning_state: Provisioning state of the HTTP listener resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"},
"frontend_port": {"key": "properties.frontendPort", "type": "SubResource"},
"protocol": {"key": "properties.protocol", "type": "str"},
"host_name": {"key": "properties.hostName", "type": "str"},
"ssl_certificate": {"key": "properties.sslCertificate", "type": "SubResource"},
"require_server_name_indication": {"key": "properties.requireServerNameIndication", "type": "bool"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional["_models.SubResource"] = None,
frontend_port: Optional["_models.SubResource"] = None,
protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None,
host_name: Optional[str] = None,
ssl_certificate: Optional["_models.SubResource"] = None,
require_server_name_indication: Optional[bool] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword frontend_ip_configuration: Frontend IP configuration resource of an application
gateway.
:paramtype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword frontend_port: Frontend port resource of an application gateway.
:paramtype frontend_port: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword protocol: Protocol. Known values are: "Http" and "Https".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:keyword host_name: Host name of HTTP listener.
:paramtype host_name: str
:keyword ssl_certificate: SSL certificate resource of an application gateway.
:paramtype ssl_certificate: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword require_server_name_indication: Applicable only if protocol is https. Enables SNI for
multi-hosting.
:paramtype require_server_name_indication: bool
:keyword provisioning_state: Provisioning state of the HTTP listener resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.frontend_ip_configuration = frontend_ip_configuration
self.frontend_port = frontend_port
self.protocol = protocol
self.host_name = host_name
self.ssl_certificate = ssl_certificate
self.require_server_name_indication = require_server_name_indication
self.provisioning_state = provisioning_state
class ApplicationGatewayIPConfiguration(SubResource):
"""IP configuration of an application gateway. Currently 1 public and 1 private IP configuration is allowed.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar subnet: Reference of the subnet resource. A subnet from where application gateway gets
its private address.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Provisioning state of the application gateway subnet resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
subnet: Optional["_models.SubResource"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword subnet: Reference of the subnet resource. A subnet from where application gateway gets
its private address.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword provisioning_state: Provisioning state of the application gateway subnet resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.subnet = subnet
self.provisioning_state = provisioning_state
class ApplicationGatewayListResult(_serialization.Model):
"""Response for ListApplicationGateways API service call.
:ivar value: List of an application gateways in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGateway]
:ivar next_link: URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ApplicationGateway]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.ApplicationGateway"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: List of an application gateways in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGateway]
:keyword next_link: URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ApplicationGatewayPathRule(SubResource):
"""Path rule of URL path map of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar paths: Path rules of URL path map.
:vartype paths: list[str]
:ivar backend_address_pool: Backend address pool resource of URL path map.
:vartype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar backend_http_settings: Backend http settings resource of URL path map.
:vartype backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Path rule of URL path map resource. Possible values are: 'Updating',
'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"paths": {"key": "properties.paths", "type": "[str]"},
"backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"},
"backend_http_settings": {"key": "properties.backendHttpSettings", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
paths: Optional[List[str]] = None,
backend_address_pool: Optional["_models.SubResource"] = None,
backend_http_settings: Optional["_models.SubResource"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword paths: Path rules of URL path map.
:paramtype paths: list[str]
:keyword backend_address_pool: Backend address pool resource of URL path map.
:paramtype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword backend_http_settings: Backend http settings resource of URL path map.
:paramtype backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword provisioning_state: Path rule of URL path map resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.paths = paths
self.backend_address_pool = backend_address_pool
self.backend_http_settings = backend_http_settings
self.provisioning_state = provisioning_state
class ApplicationGatewayProbe(SubResource):
"""Probe of the application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar protocol: Protocol. Known values are: "Http" and "Https".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:ivar host: Host name to send the probe to.
:vartype host: str
:ivar path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:vartype path: str
:ivar interval: The probing interval in seconds. This is the time interval between two
consecutive probes. Acceptable values are from 1 second to 86400 seconds.
:vartype interval: int
:ivar timeout: the probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:vartype timeout: int
:ivar unhealthy_threshold: The probe retry count. Backend server is marked down after
consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second
to 20.
:vartype unhealthy_threshold: int
:ivar provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"protocol": {"key": "properties.protocol", "type": "str"},
"host": {"key": "properties.host", "type": "str"},
"path": {"key": "properties.path", "type": "str"},
"interval": {"key": "properties.interval", "type": "int"},
"timeout": {"key": "properties.timeout", "type": "int"},
"unhealthy_threshold": {"key": "properties.unhealthyThreshold", "type": "int"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
protocol: Optional[Union[str, "_models.ApplicationGatewayProtocol"]] = None,
host: Optional[str] = None,
path: Optional[str] = None,
interval: Optional[int] = None,
timeout: Optional[int] = None,
unhealthy_threshold: Optional[int] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword protocol: Protocol. Known values are: "Http" and "Https".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayProtocol
:keyword host: Host name to send the probe to.
:paramtype host: str
:keyword path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:paramtype path: str
:keyword interval: The probing interval in seconds. This is the time interval between two
consecutive probes. Acceptable values are from 1 second to 86400 seconds.
:paramtype interval: int
:keyword timeout: the probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:paramtype timeout: int
:keyword unhealthy_threshold: The probe retry count. Backend server is marked down after
consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second
to 20.
:paramtype unhealthy_threshold: int
:keyword provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.protocol = protocol
self.host = host
self.path = path
self.interval = interval
self.timeout = timeout
self.unhealthy_threshold = unhealthy_threshold
self.provisioning_state = provisioning_state
class ApplicationGatewayRequestRoutingRule(SubResource):
"""Request routing rule of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar rule_type: Rule type. Known values are: "Basic" and "PathBasedRouting".
:vartype rule_type: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayRequestRoutingRuleType
:ivar backend_address_pool: Backend address pool resource of the application gateway.
:vartype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar backend_http_settings: Frontend port resource of the application gateway.
:vartype backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar http_listener: Http listener resource of the application gateway.
:vartype http_listener: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar url_path_map: URL path map resource of the application gateway.
:vartype url_path_map: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Provisioning state of the request routing rule resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"rule_type": {"key": "properties.ruleType", "type": "str"},
"backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"},
"backend_http_settings": {"key": "properties.backendHttpSettings", "type": "SubResource"},
"http_listener": {"key": "properties.httpListener", "type": "SubResource"},
"url_path_map": {"key": "properties.urlPathMap", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
rule_type: Optional[Union[str, "_models.ApplicationGatewayRequestRoutingRuleType"]] = None,
backend_address_pool: Optional["_models.SubResource"] = None,
backend_http_settings: Optional["_models.SubResource"] = None,
http_listener: Optional["_models.SubResource"] = None,
url_path_map: Optional["_models.SubResource"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword rule_type: Rule type. Known values are: "Basic" and "PathBasedRouting".
:paramtype rule_type: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayRequestRoutingRuleType
:keyword backend_address_pool: Backend address pool resource of the application gateway.
:paramtype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword backend_http_settings: Frontend port resource of the application gateway.
:paramtype backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword http_listener: Http listener resource of the application gateway.
:paramtype http_listener: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword url_path_map: URL path map resource of the application gateway.
:paramtype url_path_map: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword provisioning_state: Provisioning state of the request routing rule resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.rule_type = rule_type
self.backend_address_pool = backend_address_pool
self.backend_http_settings = backend_http_settings
self.http_listener = http_listener
self.url_path_map = url_path_map
self.provisioning_state = provisioning_state
class ApplicationGatewaySku(_serialization.Model):
"""SKU of an application gateway.
:ivar name: Name of an application gateway SKU. Known values are: "Standard_Small",
"Standard_Medium", "Standard_Large", "WAF_Medium", and "WAF_Large".
:vartype name: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySkuName
:ivar tier: Tier of an application gateway. Known values are: "Standard" and "WAF".
:vartype tier: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayTier
:ivar capacity: Capacity (instance count) of an application gateway.
:vartype capacity: int
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"tier": {"key": "tier", "type": "str"},
"capacity": {"key": "capacity", "type": "int"},
}
def __init__(
self,
*,
name: Optional[Union[str, "_models.ApplicationGatewaySkuName"]] = None,
tier: Optional[Union[str, "_models.ApplicationGatewayTier"]] = None,
capacity: Optional[int] = None,
**kwargs
):
"""
:keyword name: Name of an application gateway SKU. Known values are: "Standard_Small",
"Standard_Medium", "Standard_Large", "WAF_Medium", and "WAF_Large".
:paramtype name: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySkuName
:keyword tier: Tier of an application gateway. Known values are: "Standard" and "WAF".
:paramtype tier: str or ~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayTier
:keyword capacity: Capacity (instance count) of an application gateway.
:paramtype capacity: int
"""
super().__init__(**kwargs)
self.name = name
self.tier = tier
self.capacity = capacity
class ApplicationGatewaySslCertificate(SubResource):
"""SSL certificates of an application gateway.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar data: Base-64 encoded pfx certificate. Only applicable in PUT Request.
:vartype data: str
:ivar password: Password for the pfx file specified in data. Only applicable in PUT request.
:vartype password: str
:ivar public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in
data. Only applicable in GET request.
:vartype public_cert_data: str
:ivar provisioning_state: Provisioning state of the SSL certificate resource Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"data": {"key": "properties.data", "type": "str"},
"password": {"key": "properties.password", "type": "str"},
"public_cert_data": {"key": "properties.publicCertData", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
data: Optional[str] = None,
password: Optional[str] = None,
public_cert_data: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword data: Base-64 encoded pfx certificate. Only applicable in PUT Request.
:paramtype data: str
:keyword password: Password for the pfx file specified in data. Only applicable in PUT request.
:paramtype password: str
:keyword public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in
data. Only applicable in GET request.
:paramtype public_cert_data: str
:keyword provisioning_state: Provisioning state of the SSL certificate resource Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.data = data
self.password = password
self.public_cert_data = public_cert_data
self.provisioning_state = provisioning_state
class ApplicationGatewaySslPolicy(_serialization.Model):
"""Application gateway SSL policy.
:ivar disabled_ssl_protocols: SSL protocols to be disabled on application gateway.
:vartype disabled_ssl_protocols: list[str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslProtocol]
"""
_attribute_map = {
"disabled_ssl_protocols": {"key": "disabledSslProtocols", "type": "[str]"},
}
def __init__(
self,
*,
disabled_ssl_protocols: Optional[List[Union[str, "_models.ApplicationGatewaySslProtocol"]]] = None,
**kwargs
):
"""
:keyword disabled_ssl_protocols: SSL protocols to be disabled on application gateway.
:paramtype disabled_ssl_protocols: list[str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewaySslProtocol]
"""
super().__init__(**kwargs)
self.disabled_ssl_protocols = disabled_ssl_protocols
class ApplicationGatewayUrlPathMap(SubResource):
"""UrlPathMaps give a url path to the backend mapping information for PathBasedRouting.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar default_backend_address_pool: Default backend address pool resource of URL path map.
:vartype default_backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar default_backend_http_settings: Default backend http settings resource of URL path map.
:vartype default_backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar path_rules: Path rule of URL path map resource.
:vartype path_rules: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayPathRule]
:ivar provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"default_backend_address_pool": {"key": "properties.defaultBackendAddressPool", "type": "SubResource"},
"default_backend_http_settings": {"key": "properties.defaultBackendHttpSettings", "type": "SubResource"},
"path_rules": {"key": "properties.pathRules", "type": "[ApplicationGatewayPathRule]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
default_backend_address_pool: Optional["_models.SubResource"] = None,
default_backend_http_settings: Optional["_models.SubResource"] = None,
path_rules: Optional[List["_models.ApplicationGatewayPathRule"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword default_backend_address_pool: Default backend address pool resource of URL path map.
:paramtype default_backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword default_backend_http_settings: Default backend http settings resource of URL path map.
:paramtype default_backend_http_settings: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword path_rules: Path rule of URL path map resource.
:paramtype path_rules: list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayPathRule]
:keyword provisioning_state: Provisioning state of the backend http settings resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.default_backend_address_pool = default_backend_address_pool
self.default_backend_http_settings = default_backend_http_settings
self.path_rules = path_rules
self.provisioning_state = provisioning_state
class ApplicationGatewayWebApplicationFirewallConfiguration(_serialization.Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:ivar enabled: Whether the web application firewall is enabled or not. Required.
:vartype enabled: bool
:ivar firewall_mode: Web application firewall mode. Required. Known values are: "Detection" and
"Prevention".
:vartype firewall_mode: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallMode
:ivar rule_set_type: The type of the web application firewall rule set. Possible values are:
'OWASP'. Required.
:vartype rule_set_type: str
:ivar rule_set_version: The version of the rule set type. Required.
:vartype rule_set_version: str
:ivar disabled_rule_groups: The disabled rule groups.
:vartype disabled_rule_groups:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
"""
_validation = {
"enabled": {"required": True},
"firewall_mode": {"required": True},
"rule_set_type": {"required": True},
"rule_set_version": {"required": True},
}
_attribute_map = {
"enabled": {"key": "enabled", "type": "bool"},
"firewall_mode": {"key": "firewallMode", "type": "str"},
"rule_set_type": {"key": "ruleSetType", "type": "str"},
"rule_set_version": {"key": "ruleSetVersion", "type": "str"},
"disabled_rule_groups": {"key": "disabledRuleGroups", "type": "[ApplicationGatewayFirewallDisabledRuleGroup]"},
}
def __init__(
self,
*,
enabled: bool,
firewall_mode: Union[str, "_models.ApplicationGatewayFirewallMode"],
rule_set_type: str,
rule_set_version: str,
disabled_rule_groups: Optional[List["_models.ApplicationGatewayFirewallDisabledRuleGroup"]] = None,
**kwargs
):
"""
:keyword enabled: Whether the web application firewall is enabled or not. Required.
:paramtype enabled: bool
:keyword firewall_mode: Web application firewall mode. Required. Known values are: "Detection"
and "Prevention".
:paramtype firewall_mode: str or
~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallMode
:keyword rule_set_type: The type of the web application firewall rule set. Possible values are:
'OWASP'. Required.
:paramtype rule_set_type: str
:keyword rule_set_version: The version of the rule set type. Required.
:paramtype rule_set_version: str
:keyword disabled_rule_groups: The disabled rule groups.
:paramtype disabled_rule_groups:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
"""
super().__init__(**kwargs)
self.enabled = enabled
self.firewall_mode = firewall_mode
self.rule_set_type = rule_set_type
self.rule_set_version = rule_set_version
self.disabled_rule_groups = disabled_rule_groups
class AuthorizationListResult(_serialization.Model):
"""Response for ListAuthorizations API service call retrieves all authorizations that belongs to an ExpressRouteCircuit.
:ivar value: The authorizations in an ExpressRoute Circuit.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitAuthorization]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuitAuthorization]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteCircuitAuthorization"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The authorizations in an ExpressRoute Circuit.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitAuthorization]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class AzureAsyncOperationResult(_serialization.Model):
"""The response body contains the status of the specified asynchronous operation, indicating whether it has succeeded, is in progress, or has failed. Note that this status is distinct from the HTTP status code returned for the Get Operation Status operation itself. If the asynchronous operation succeeded, the response body includes the HTTP status code for the successful request. If the asynchronous operation failed, the response body includes the HTTP status code for the failed request and error information regarding the failure.
:ivar status: Status of the Azure async operation. Possible values are: 'InProgress',
'Succeeded', and 'Failed'. Known values are: "InProgress", "Succeeded", and "Failed".
:vartype status: str or ~azure.mgmt.network.v2017_03_01.models.NetworkOperationStatus
:ivar error:
:vartype error: ~azure.mgmt.network.v2017_03_01.models.Error
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
"error": {"key": "error", "type": "Error"},
}
def __init__(
self,
*,
status: Optional[Union[str, "_models.NetworkOperationStatus"]] = None,
error: Optional["_models.Error"] = None,
**kwargs
):
"""
:keyword status: Status of the Azure async operation. Possible values are: 'InProgress',
'Succeeded', and 'Failed'. Known values are: "InProgress", "Succeeded", and "Failed".
:paramtype status: str or ~azure.mgmt.network.v2017_03_01.models.NetworkOperationStatus
:keyword error:
:paramtype error: ~azure.mgmt.network.v2017_03_01.models.Error
"""
super().__init__(**kwargs)
self.status = status
self.error = error
class BackendAddressPool(SubResource):
"""Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar backend_ip_configurations: Gets collection of references to IP addresses defined in
network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: Gets load balancing rules that use this backend address pool.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar outbound_nat_rule: Gets outbound rules that use this backend address pool.
:vartype outbound_nat_rule: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Get provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"backend_ip_configurations": {"readonly": True},
"load_balancing_rules": {"readonly": True},
"outbound_nat_rule": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"backend_ip_configurations": {
"key": "properties.backendIPConfigurations",
"type": "[NetworkInterfaceIPConfiguration]",
},
"load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"},
"outbound_nat_rule": {"key": "properties.outboundNatRule", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Gets name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword provisioning_state: Get provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.backend_ip_configurations = None
self.load_balancing_rules = None
self.outbound_nat_rule = None
self.provisioning_state = provisioning_state
class BGPCommunity(_serialization.Model):
"""Contains bgp community information offered in Service Community resources.
:ivar service_supported_region: The region which the service support. e.g. For O365, region is
Global.
:vartype service_supported_region: str
:ivar community_name: The name of the bgp community. e.g. Skype.
:vartype community_name: str
:ivar community_value: The value of the bgp community. For more information:
https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing.
:vartype community_value: str
:ivar community_prefixes: The prefixes that the bgp community contains.
:vartype community_prefixes: list[str]
"""
_attribute_map = {
"service_supported_region": {"key": "serviceSupportedRegion", "type": "str"},
"community_name": {"key": "communityName", "type": "str"},
"community_value": {"key": "communityValue", "type": "str"},
"community_prefixes": {"key": "communityPrefixes", "type": "[str]"},
}
def __init__(
self,
*,
service_supported_region: Optional[str] = None,
community_name: Optional[str] = None,
community_value: Optional[str] = None,
community_prefixes: Optional[List[str]] = None,
**kwargs
):
"""
:keyword service_supported_region: The region which the service support. e.g. For O365, region
is Global.
:paramtype service_supported_region: str
:keyword community_name: The name of the bgp community. e.g. Skype.
:paramtype community_name: str
:keyword community_value: The value of the bgp community. For more information:
https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing.
:paramtype community_value: str
:keyword community_prefixes: The prefixes that the bgp community contains.
:paramtype community_prefixes: list[str]
"""
super().__init__(**kwargs)
self.service_supported_region = service_supported_region
self.community_name = community_name
self.community_value = community_value
self.community_prefixes = community_prefixes
class BgpPeerStatus(_serialization.Model):
"""BGP peer status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The virtual network gateway's local address.
:vartype local_address: str
:ivar neighbor: The remote BGP peer.
:vartype neighbor: str
:ivar asn: The autonomous system number of the remote BGP peer.
:vartype asn: int
:ivar state: The BGP peer state. Known values are: "Unknown", "Stopped", "Idle", "Connecting",
and "Connected".
:vartype state: str or ~azure.mgmt.network.v2017_03_01.models.BgpPeerState
:ivar connected_duration: For how long the peering has been up.
:vartype connected_duration: str
:ivar routes_received: The number of routes learned from this peer.
:vartype routes_received: int
:ivar messages_sent: The number of BGP messages sent.
:vartype messages_sent: int
:ivar messages_received: The number of BGP messages received.
:vartype messages_received: int
"""
_validation = {
"local_address": {"readonly": True},
"neighbor": {"readonly": True},
"asn": {"readonly": True},
"state": {"readonly": True},
"connected_duration": {"readonly": True},
"routes_received": {"readonly": True},
"messages_sent": {"readonly": True},
"messages_received": {"readonly": True},
}
_attribute_map = {
"local_address": {"key": "localAddress", "type": "str"},
"neighbor": {"key": "neighbor", "type": "str"},
"asn": {"key": "asn", "type": "int"},
"state": {"key": "state", "type": "str"},
"connected_duration": {"key": "connectedDuration", "type": "str"},
"routes_received": {"key": "routesReceived", "type": "int"},
"messages_sent": {"key": "messagesSent", "type": "int"},
"messages_received": {"key": "messagesReceived", "type": "int"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.local_address = None
self.neighbor = None
self.asn = None
self.state = None
self.connected_duration = None
self.routes_received = None
self.messages_sent = None
self.messages_received = None
class BgpPeerStatusListResult(_serialization.Model):
"""Response for list BGP peer status API service call.
:ivar value: List of BGP peers.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.BgpPeerStatus]
"""
_attribute_map = {
"value": {"key": "value", "type": "[BgpPeerStatus]"},
}
def __init__(self, *, value: Optional[List["_models.BgpPeerStatus"]] = None, **kwargs):
"""
:keyword value: List of BGP peers.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.BgpPeerStatus]
"""
super().__init__(**kwargs)
self.value = value
class BgpServiceCommunity(Resource):
"""Service Community Properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar service_name: The name of the bgp community. e.g. Skype.
:vartype service_name: str
:ivar bgp_communities: Get a list of bgp communities.
:vartype bgp_communities: list[~azure.mgmt.network.v2017_03_01.models.BGPCommunity]
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"service_name": {"key": "properties.serviceName", "type": "str"},
"bgp_communities": {"key": "properties.bgpCommunities", "type": "[BGPCommunity]"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
service_name: Optional[str] = None,
bgp_communities: Optional[List["_models.BGPCommunity"]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword service_name: The name of the bgp community. e.g. Skype.
:paramtype service_name: str
:keyword bgp_communities: Get a list of bgp communities.
:paramtype bgp_communities: list[~azure.mgmt.network.v2017_03_01.models.BGPCommunity]
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.service_name = service_name
self.bgp_communities = bgp_communities
class BgpServiceCommunityListResult(_serialization.Model):
"""Response for the ListServiceCommunity API service call.
:ivar value: A list of service community resources.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.BgpServiceCommunity]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[BgpServiceCommunity]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.BgpServiceCommunity"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of service community resources.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.BgpServiceCommunity]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class BgpSettings(_serialization.Model):
"""BGP settings details.
:ivar asn: The BGP speaker's ASN.
:vartype asn: int
:ivar bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:vartype bgp_peering_address: str
:ivar peer_weight: The weight added to routes learned from this BGP speaker.
:vartype peer_weight: int
"""
_attribute_map = {
"asn": {"key": "asn", "type": "int"},
"bgp_peering_address": {"key": "bgpPeeringAddress", "type": "str"},
"peer_weight": {"key": "peerWeight", "type": "int"},
}
def __init__(
self,
*,
asn: Optional[int] = None,
bgp_peering_address: Optional[str] = None,
peer_weight: Optional[int] = None,
**kwargs
):
"""
:keyword asn: The BGP speaker's ASN.
:paramtype asn: int
:keyword bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:paramtype bgp_peering_address: str
:keyword peer_weight: The weight added to routes learned from this BGP speaker.
:paramtype peer_weight: int
"""
super().__init__(**kwargs)
self.asn = asn
self.bgp_peering_address = bgp_peering_address
self.peer_weight = peer_weight
class ConnectionResetSharedKey(_serialization.Model):
"""The virtual network connection reset shared key.
All required parameters must be populated in order to send to Azure.
:ivar key_length: The virtual network connection reset shared key length, should between 1 and
128. Required.
:vartype key_length: int
"""
_validation = {
"key_length": {"required": True, "maximum": 128, "minimum": 1},
}
_attribute_map = {
"key_length": {"key": "keyLength", "type": "int"},
}
def __init__(self, *, key_length: int, **kwargs):
"""
:keyword key_length: The virtual network connection reset shared key length, should between 1
and 128. Required.
:paramtype key_length: int
"""
super().__init__(**kwargs)
self.key_length = key_length
class ConnectionSharedKey(_serialization.Model):
"""Response for GetConnectionSharedKey API service call.
All required parameters must be populated in order to send to Azure.
:ivar value: The virtual network connection shared key value. Required.
:vartype value: str
"""
_validation = {
"value": {"required": True},
}
_attribute_map = {
"value": {"key": "value", "type": "str"},
}
def __init__(self, *, value: str, **kwargs):
"""
:keyword value: The virtual network connection shared key value. Required.
:paramtype value: str
"""
super().__init__(**kwargs)
self.value = value
class ConnectivityDestination(_serialization.Model):
"""Parameters that define destination of connection.
:ivar resource_id: The ID of the resource to which a connection attempt will be made.
:vartype resource_id: str
:ivar address: The IP address or URI the resource to which a connection attempt will be made.
:vartype address: str
:ivar port: Port on which check connectivity will be performed.
:vartype port: int
"""
_attribute_map = {
"resource_id": {"key": "resourceId", "type": "str"},
"address": {"key": "address", "type": "str"},
"port": {"key": "port", "type": "int"},
}
def __init__(
self, *, resource_id: Optional[str] = None, address: Optional[str] = None, port: Optional[int] = None, **kwargs
):
"""
:keyword resource_id: The ID of the resource to which a connection attempt will be made.
:paramtype resource_id: str
:keyword address: The IP address or URI the resource to which a connection attempt will be
made.
:paramtype address: str
:keyword port: Port on which check connectivity will be performed.
:paramtype port: int
"""
super().__init__(**kwargs)
self.resource_id = resource_id
self.address = address
self.port = port
class ConnectivityHop(_serialization.Model):
"""Information about a hop between the source and the destination.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of the hop.
:vartype type: str
:ivar id: The ID of the hop.
:vartype id: str
:ivar address: The IP address of the hop.
:vartype address: str
:ivar resource_id: The ID of the resource corresponding to this hop.
:vartype resource_id: str
:ivar next_hop_ids: List of next hop identifiers.
:vartype next_hop_ids: list[str]
:ivar issues: List of issues.
:vartype issues: list[~azure.mgmt.network.v2017_03_01.models.ConnectivityIssue]
"""
_validation = {
"type": {"readonly": True},
"id": {"readonly": True},
"address": {"readonly": True},
"resource_id": {"readonly": True},
"next_hop_ids": {"readonly": True},
"issues": {"readonly": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"id": {"key": "id", "type": "str"},
"address": {"key": "address", "type": "str"},
"resource_id": {"key": "resourceId", "type": "str"},
"next_hop_ids": {"key": "nextHopIds", "type": "[str]"},
"issues": {"key": "issues", "type": "[ConnectivityIssue]"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.type = None
self.id = None
self.address = None
self.resource_id = None
self.next_hop_ids = None
self.issues = None
class ConnectivityInformation(_serialization.Model):
"""Information on the connectivity status.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2017_03_01.models.ConnectivityHop]
:ivar connection_status: The connection status. Known values are: "Unknown", "Connected",
"Disconnected", and "Degraded".
:vartype connection_status: str or ~azure.mgmt.network.v2017_03_01.models.ConnectionStatus
:ivar avg_latency_in_ms: Average latency in milliseconds.
:vartype avg_latency_in_ms: int
:ivar min_latency_in_ms: Minimum latency in milliseconds.
:vartype min_latency_in_ms: int
:ivar max_latency_in_ms: Maximum latency in milliseconds.
:vartype max_latency_in_ms: int
:ivar probes_sent: Total number of probes sent.
:vartype probes_sent: int
:ivar probes_failed: Number of failed probes.
:vartype probes_failed: int
"""
_validation = {
"hops": {"readonly": True},
"connection_status": {"readonly": True},
"avg_latency_in_ms": {"readonly": True},
"min_latency_in_ms": {"readonly": True},
"max_latency_in_ms": {"readonly": True},
"probes_sent": {"readonly": True},
"probes_failed": {"readonly": True},
}
_attribute_map = {
"hops": {"key": "hops", "type": "[ConnectivityHop]"},
"connection_status": {"key": "connectionStatus", "type": "str"},
"avg_latency_in_ms": {"key": "avgLatencyInMs", "type": "int"},
"min_latency_in_ms": {"key": "minLatencyInMs", "type": "int"},
"max_latency_in_ms": {"key": "maxLatencyInMs", "type": "int"},
"probes_sent": {"key": "probesSent", "type": "int"},
"probes_failed": {"key": "probesFailed", "type": "int"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.hops = None
self.connection_status = None
self.avg_latency_in_ms = None
self.min_latency_in_ms = None
self.max_latency_in_ms = None
self.probes_sent = None
self.probes_failed = None
class ConnectivityIssue(_serialization.Model):
"""Information about an issue encountered in the process of checking for connectivity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar origin: The origin of the issue. Known values are: "Local", "Inbound", and "Outbound".
:vartype origin: str or ~azure.mgmt.network.v2017_03_01.models.Origin
:ivar severity: The severity of the issue. Known values are: "Error" and "Warning".
:vartype severity: str or ~azure.mgmt.network.v2017_03_01.models.Severity
:ivar type: The type of issue. Known values are: "Unknown", "AgentStopped", "GuestFirewall",
"DnsResolution", "SocketBind", "NetworkSecurityRule", "UserDefinedRoute", "PortThrottled", and
"Platform".
:vartype type: str or ~azure.mgmt.network.v2017_03_01.models.IssueType
:ivar context: Provides additional context on the issue.
:vartype context: list[dict[str, str]]
"""
_validation = {
"origin": {"readonly": True},
"severity": {"readonly": True},
"type": {"readonly": True},
"context": {"readonly": True},
}
_attribute_map = {
"origin": {"key": "origin", "type": "str"},
"severity": {"key": "severity", "type": "str"},
"type": {"key": "type", "type": "str"},
"context": {"key": "context", "type": "[{str}]"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.origin = None
self.severity = None
self.type = None
self.context = None
class ConnectivityParameters(_serialization.Model):
"""Parameters that determine how the connectivity check will be performed.
All required parameters must be populated in order to send to Azure.
:ivar source: Parameters that define the source of the connection. Required.
:vartype source: ~azure.mgmt.network.v2017_03_01.models.ConnectivitySource
:ivar destination: Parameters that define destination of connection. Required.
:vartype destination: ~azure.mgmt.network.v2017_03_01.models.ConnectivityDestination
"""
_validation = {
"source": {"required": True},
"destination": {"required": True},
}
_attribute_map = {
"source": {"key": "source", "type": "ConnectivitySource"},
"destination": {"key": "destination", "type": "ConnectivityDestination"},
}
def __init__(
self, *, source: "_models.ConnectivitySource", destination: "_models.ConnectivityDestination", **kwargs
):
"""
:keyword source: Parameters that define the source of the connection. Required.
:paramtype source: ~azure.mgmt.network.v2017_03_01.models.ConnectivitySource
:keyword destination: Parameters that define destination of connection. Required.
:paramtype destination: ~azure.mgmt.network.v2017_03_01.models.ConnectivityDestination
"""
super().__init__(**kwargs)
self.source = source
self.destination = destination
class ConnectivitySource(_serialization.Model):
"""Parameters that define the source of the connection.
All required parameters must be populated in order to send to Azure.
:ivar resource_id: The ID of the resource from which a connectivity check will be initiated.
Required.
:vartype resource_id: str
:ivar port: The source port from which a connectivity check will be performed.
:vartype port: int
"""
_validation = {
"resource_id": {"required": True},
}
_attribute_map = {
"resource_id": {"key": "resourceId", "type": "str"},
"port": {"key": "port", "type": "int"},
}
def __init__(self, *, resource_id: str, port: Optional[int] = None, **kwargs):
"""
:keyword resource_id: The ID of the resource from which a connectivity check will be initiated.
Required.
:paramtype resource_id: str
:keyword port: The source port from which a connectivity check will be performed.
:paramtype port: int
"""
super().__init__(**kwargs)
self.resource_id = resource_id
self.port = port
class DhcpOptions(_serialization.Model):
"""DhcpOptions contains an array of DNS servers available to VMs deployed in the virtual network. Standard DHCP option for a subnet overrides VNET DHCP options.
:ivar dns_servers: The list of DNS servers IP addresses.
:vartype dns_servers: list[str]
"""
_attribute_map = {
"dns_servers": {"key": "dnsServers", "type": "[str]"},
}
def __init__(self, *, dns_servers: Optional[List[str]] = None, **kwargs):
"""
:keyword dns_servers: The list of DNS servers IP addresses.
:paramtype dns_servers: list[str]
"""
super().__init__(**kwargs)
self.dns_servers = dns_servers
class DnsNameAvailabilityResult(_serialization.Model):
"""Response for the CheckDnsNameAvailability API service call.
:ivar available: Domain availability (True/False).
:vartype available: bool
"""
_attribute_map = {
"available": {"key": "available", "type": "bool"},
}
def __init__(self, *, available: Optional[bool] = None, **kwargs):
"""
:keyword available: Domain availability (True/False).
:paramtype available: bool
"""
super().__init__(**kwargs)
self.available = available
class EffectiveNetworkSecurityGroup(_serialization.Model):
"""Effective network security group.
:ivar network_security_group: The ID of network security group that is applied.
:vartype network_security_group: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar association: The effective network security group association.
:vartype association:
~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityGroupAssociation
:ivar effective_security_rules: A collection of effective security rules.
:vartype effective_security_rules:
list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityRule]
"""
_attribute_map = {
"network_security_group": {"key": "networkSecurityGroup", "type": "SubResource"},
"association": {"key": "association", "type": "EffectiveNetworkSecurityGroupAssociation"},
"effective_security_rules": {"key": "effectiveSecurityRules", "type": "[EffectiveNetworkSecurityRule]"},
}
def __init__(
self,
*,
network_security_group: Optional["_models.SubResource"] = None,
association: Optional["_models.EffectiveNetworkSecurityGroupAssociation"] = None,
effective_security_rules: Optional[List["_models.EffectiveNetworkSecurityRule"]] = None,
**kwargs
):
"""
:keyword network_security_group: The ID of network security group that is applied.
:paramtype network_security_group: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword association: The effective network security group association.
:paramtype association:
~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityGroupAssociation
:keyword effective_security_rules: A collection of effective security rules.
:paramtype effective_security_rules:
list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityRule]
"""
super().__init__(**kwargs)
self.network_security_group = network_security_group
self.association = association
self.effective_security_rules = effective_security_rules
class EffectiveNetworkSecurityGroupAssociation(_serialization.Model):
"""The effective network security group association.
:ivar subnet: The ID of the subnet if assigned.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar network_interface: The ID of the network interface if assigned.
:vartype network_interface: ~azure.mgmt.network.v2017_03_01.models.SubResource
"""
_attribute_map = {
"subnet": {"key": "subnet", "type": "SubResource"},
"network_interface": {"key": "networkInterface", "type": "SubResource"},
}
def __init__(
self,
*,
subnet: Optional["_models.SubResource"] = None,
network_interface: Optional["_models.SubResource"] = None,
**kwargs
):
"""
:keyword subnet: The ID of the subnet if assigned.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword network_interface: The ID of the network interface if assigned.
:paramtype network_interface: ~azure.mgmt.network.v2017_03_01.models.SubResource
"""
super().__init__(**kwargs)
self.subnet = subnet
self.network_interface = network_interface
class EffectiveNetworkSecurityGroupListResult(_serialization.Model):
"""Response for list effective network security groups API service call.
:ivar value: A list of effective network security groups.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[EffectiveNetworkSecurityGroup]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.EffectiveNetworkSecurityGroup"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: A list of effective network security groups.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityGroup]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class EffectiveNetworkSecurityRule(_serialization.Model): # pylint: disable=too-many-instance-attributes
"""Effective network security rules.
:ivar name: The name of the security rule specified by the user (if created by the user).
:vartype name: str
:ivar protocol: The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp',
and '*'. Known values are: "Tcp", "Udp", and "*".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleProtocol
:ivar source_port_range: The source port or range.
:vartype source_port_range: str
:ivar destination_port_range: The destination port or range.
:vartype destination_port_range: str
:ivar source_address_prefix: The source address prefix.
:vartype source_address_prefix: str
:ivar destination_address_prefix: The destination address prefix.
:vartype destination_address_prefix: str
:ivar expanded_source_address_prefix: The expanded source address prefix.
:vartype expanded_source_address_prefix: list[str]
:ivar expanded_destination_address_prefix: Expanded destination address prefix.
:vartype expanded_destination_address_prefix: list[str]
:ivar access: Whether network traffic is allowed or denied. Possible values are: 'Allow' and
'Deny'. Known values are: "Allow" and "Deny".
:vartype access: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleAccess
:ivar priority: The priority of the rule.
:vartype priority: int
:ivar direction: The direction of the rule. Possible values are: 'Inbound and Outbound'. Known
values are: "Inbound" and "Outbound".
:vartype direction: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleDirection
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"protocol": {"key": "protocol", "type": "str"},
"source_port_range": {"key": "sourcePortRange", "type": "str"},
"destination_port_range": {"key": "destinationPortRange", "type": "str"},
"source_address_prefix": {"key": "sourceAddressPrefix", "type": "str"},
"destination_address_prefix": {"key": "destinationAddressPrefix", "type": "str"},
"expanded_source_address_prefix": {"key": "expandedSourceAddressPrefix", "type": "[str]"},
"expanded_destination_address_prefix": {"key": "expandedDestinationAddressPrefix", "type": "[str]"},
"access": {"key": "access", "type": "str"},
"priority": {"key": "priority", "type": "int"},
"direction": {"key": "direction", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
protocol: Optional[Union[str, "_models.SecurityRuleProtocol"]] = None,
source_port_range: Optional[str] = None,
destination_port_range: Optional[str] = None,
source_address_prefix: Optional[str] = None,
destination_address_prefix: Optional[str] = None,
expanded_source_address_prefix: Optional[List[str]] = None,
expanded_destination_address_prefix: Optional[List[str]] = None,
access: Optional[Union[str, "_models.SecurityRuleAccess"]] = None,
priority: Optional[int] = None,
direction: Optional[Union[str, "_models.SecurityRuleDirection"]] = None,
**kwargs
):
"""
:keyword name: The name of the security rule specified by the user (if created by the user).
:paramtype name: str
:keyword protocol: The network protocol this rule applies to. Possible values are: 'Tcp',
'Udp', and '*'. Known values are: "Tcp", "Udp", and "*".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleProtocol
:keyword source_port_range: The source port or range.
:paramtype source_port_range: str
:keyword destination_port_range: The destination port or range.
:paramtype destination_port_range: str
:keyword source_address_prefix: The source address prefix.
:paramtype source_address_prefix: str
:keyword destination_address_prefix: The destination address prefix.
:paramtype destination_address_prefix: str
:keyword expanded_source_address_prefix: The expanded source address prefix.
:paramtype expanded_source_address_prefix: list[str]
:keyword expanded_destination_address_prefix: Expanded destination address prefix.
:paramtype expanded_destination_address_prefix: list[str]
:keyword access: Whether network traffic is allowed or denied. Possible values are: 'Allow' and
'Deny'. Known values are: "Allow" and "Deny".
:paramtype access: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleAccess
:keyword priority: The priority of the rule.
:paramtype priority: int
:keyword direction: The direction of the rule. Possible values are: 'Inbound and Outbound'.
Known values are: "Inbound" and "Outbound".
:paramtype direction: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleDirection
"""
super().__init__(**kwargs)
self.name = name
self.protocol = protocol
self.source_port_range = source_port_range
self.destination_port_range = destination_port_range
self.source_address_prefix = source_address_prefix
self.destination_address_prefix = destination_address_prefix
self.expanded_source_address_prefix = expanded_source_address_prefix
self.expanded_destination_address_prefix = expanded_destination_address_prefix
self.access = access
self.priority = priority
self.direction = direction
class EffectiveRoute(_serialization.Model):
"""Effective Route.
:ivar name: The name of the user defined route. This is optional.
:vartype name: str
:ivar source: Who created the route. Possible values are: 'Unknown', 'User',
'VirtualNetworkGateway', and 'Default'. Known values are: "Unknown", "User",
"VirtualNetworkGateway", and "Default".
:vartype source: str or ~azure.mgmt.network.v2017_03_01.models.EffectiveRouteSource
:ivar state: The value of effective route. Possible values are: 'Active' and 'Invalid'. Known
values are: "Active" and "Invalid".
:vartype state: str or ~azure.mgmt.network.v2017_03_01.models.EffectiveRouteState
:ivar address_prefix: The address prefixes of the effective routes in CIDR notation.
:vartype address_prefix: list[str]
:ivar next_hop_ip_address: The IP address of the next hop of the effective route.
:vartype next_hop_ip_address: list[str]
:ivar next_hop_type: The type of Azure hop the packet should be sent to. Possible values are:
'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values
are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None".
:vartype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.RouteNextHopType
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"source": {"key": "source", "type": "str"},
"state": {"key": "state", "type": "str"},
"address_prefix": {"key": "addressPrefix", "type": "[str]"},
"next_hop_ip_address": {"key": "nextHopIpAddress", "type": "[str]"},
"next_hop_type": {"key": "nextHopType", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
source: Optional[Union[str, "_models.EffectiveRouteSource"]] = None,
state: Optional[Union[str, "_models.EffectiveRouteState"]] = None,
address_prefix: Optional[List[str]] = None,
next_hop_ip_address: Optional[List[str]] = None,
next_hop_type: Optional[Union[str, "_models.RouteNextHopType"]] = None,
**kwargs
):
"""
:keyword name: The name of the user defined route. This is optional.
:paramtype name: str
:keyword source: Who created the route. Possible values are: 'Unknown', 'User',
'VirtualNetworkGateway', and 'Default'. Known values are: "Unknown", "User",
"VirtualNetworkGateway", and "Default".
:paramtype source: str or ~azure.mgmt.network.v2017_03_01.models.EffectiveRouteSource
:keyword state: The value of effective route. Possible values are: 'Active' and 'Invalid'.
Known values are: "Active" and "Invalid".
:paramtype state: str or ~azure.mgmt.network.v2017_03_01.models.EffectiveRouteState
:keyword address_prefix: The address prefixes of the effective routes in CIDR notation.
:paramtype address_prefix: list[str]
:keyword next_hop_ip_address: The IP address of the next hop of the effective route.
:paramtype next_hop_ip_address: list[str]
:keyword next_hop_type: The type of Azure hop the packet should be sent to. Possible values
are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known
values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None".
:paramtype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.RouteNextHopType
"""
super().__init__(**kwargs)
self.name = name
self.source = source
self.state = state
self.address_prefix = address_prefix
self.next_hop_ip_address = next_hop_ip_address
self.next_hop_type = next_hop_type
class EffectiveRouteListResult(_serialization.Model):
"""Response for list effective route API service call.
:ivar value: A list of effective routes.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.EffectiveRoute]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[EffectiveRoute]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.EffectiveRoute"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of effective routes.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.EffectiveRoute]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class Error(_serialization.Model):
"""Error.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~azure.mgmt.network.v2017_03_01.models.ErrorDetails]
:ivar inner_error:
:vartype inner_error: str
"""
_attribute_map = {
"code": {"key": "code", "type": "str"},
"message": {"key": "message", "type": "str"},
"target": {"key": "target", "type": "str"},
"details": {"key": "details", "type": "[ErrorDetails]"},
"inner_error": {"key": "innerError", "type": "str"},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
details: Optional[List["_models.ErrorDetails"]] = None,
inner_error: Optional[str] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
:keyword details:
:paramtype details: list[~azure.mgmt.network.v2017_03_01.models.ErrorDetails]
:keyword inner_error:
:paramtype inner_error: str
"""
super().__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
self.inner_error = inner_error
class ErrorDetails(_serialization.Model):
"""ErrorDetails.
:ivar code:
:vartype code: str
:ivar target:
:vartype target: str
:ivar message:
:vartype message: str
"""
_attribute_map = {
"code": {"key": "code", "type": "str"},
"target": {"key": "target", "type": "str"},
"message": {"key": "message", "type": "str"},
}
def __init__(
self, *, code: Optional[str] = None, target: Optional[str] = None, message: Optional[str] = None, **kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword target:
:paramtype target: str
:keyword message:
:paramtype message: str
"""
super().__init__(**kwargs)
self.code = code
self.target = target
self.message = message
class ExpressRouteCircuit(Resource): # pylint: disable=too-many-instance-attributes
"""ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar sku: The SKU.
:vartype sku: ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSku
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar allow_classic_operations: Allow classic operations.
:vartype allow_classic_operations: bool
:ivar circuit_provisioning_state: The CircuitProvisioningState state of the resource.
:vartype circuit_provisioning_state: str
:ivar service_provider_provisioning_state: The ServiceProviderProvisioningState state of the
resource. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned', and
'Deprovisioning'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and
"Deprovisioning".
:vartype service_provider_provisioning_state: str or
~azure.mgmt.network.v2017_03_01.models.ServiceProviderProvisioningState
:ivar authorizations: The list of authorizations.
:vartype authorizations:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitAuthorization]
:ivar peerings: The list of peerings.
:vartype peerings: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:ivar service_key: The ServiceKey.
:vartype service_key: str
:ivar service_provider_notes: The ServiceProviderNotes.
:vartype service_provider_notes: str
:ivar service_provider_properties: The ServiceProviderProperties.
:vartype service_provider_properties:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitServiceProviderProperties
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar gateway_manager_etag: The GatewayManager Etag.
:vartype gateway_manager_etag: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"etag": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"sku": {"key": "sku", "type": "ExpressRouteCircuitSku"},
"etag": {"key": "etag", "type": "str"},
"allow_classic_operations": {"key": "properties.allowClassicOperations", "type": "bool"},
"circuit_provisioning_state": {"key": "properties.circuitProvisioningState", "type": "str"},
"service_provider_provisioning_state": {"key": "properties.serviceProviderProvisioningState", "type": "str"},
"authorizations": {"key": "properties.authorizations", "type": "[ExpressRouteCircuitAuthorization]"},
"peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"},
"service_key": {"key": "properties.serviceKey", "type": "str"},
"service_provider_notes": {"key": "properties.serviceProviderNotes", "type": "str"},
"service_provider_properties": {
"key": "properties.serviceProviderProperties",
"type": "ExpressRouteCircuitServiceProviderProperties",
},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"gateway_manager_etag": {"key": "properties.gatewayManagerEtag", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
sku: Optional["_models.ExpressRouteCircuitSku"] = None,
allow_classic_operations: Optional[bool] = None,
circuit_provisioning_state: Optional[str] = None,
service_provider_provisioning_state: Optional[Union[str, "_models.ServiceProviderProvisioningState"]] = None,
authorizations: Optional[List["_models.ExpressRouteCircuitAuthorization"]] = None,
peerings: Optional[List["_models.ExpressRouteCircuitPeering"]] = None,
service_key: Optional[str] = None,
service_provider_notes: Optional[str] = None,
service_provider_properties: Optional["_models.ExpressRouteCircuitServiceProviderProperties"] = None,
provisioning_state: Optional[str] = None,
gateway_manager_etag: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword sku: The SKU.
:paramtype sku: ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSku
:keyword allow_classic_operations: Allow classic operations.
:paramtype allow_classic_operations: bool
:keyword circuit_provisioning_state: The CircuitProvisioningState state of the resource.
:paramtype circuit_provisioning_state: str
:keyword service_provider_provisioning_state: The ServiceProviderProvisioningState state of the
resource. Possible values are 'NotProvisioned', 'Provisioning', 'Provisioned', and
'Deprovisioning'. Known values are: "NotProvisioned", "Provisioning", "Provisioned", and
"Deprovisioning".
:paramtype service_provider_provisioning_state: str or
~azure.mgmt.network.v2017_03_01.models.ServiceProviderProvisioningState
:keyword authorizations: The list of authorizations.
:paramtype authorizations:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitAuthorization]
:keyword peerings: The list of peerings.
:paramtype peerings: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:keyword service_key: The ServiceKey.
:paramtype service_key: str
:keyword service_provider_notes: The ServiceProviderNotes.
:paramtype service_provider_notes: str
:keyword service_provider_properties: The ServiceProviderProperties.
:paramtype service_provider_properties:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitServiceProviderProperties
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
:keyword gateway_manager_etag: The GatewayManager Etag.
:paramtype gateway_manager_etag: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.sku = sku
self.etag = None
self.allow_classic_operations = allow_classic_operations
self.circuit_provisioning_state = circuit_provisioning_state
self.service_provider_provisioning_state = service_provider_provisioning_state
self.authorizations = authorizations
self.peerings = peerings
self.service_key = service_key
self.service_provider_notes = service_provider_notes
self.service_provider_properties = service_provider_properties
self.provisioning_state = provisioning_state
self.gateway_manager_etag = gateway_manager_etag
class ExpressRouteCircuitArpTable(_serialization.Model):
"""The ARP table associated with the ExpressRouteCircuit.
:ivar age: Age.
:vartype age: int
:ivar interface: Interface.
:vartype interface: str
:ivar ip_address: The IP address.
:vartype ip_address: str
:ivar mac_address: The MAC address.
:vartype mac_address: str
"""
_attribute_map = {
"age": {"key": "age", "type": "int"},
"interface": {"key": "interface", "type": "str"},
"ip_address": {"key": "ipAddress", "type": "str"},
"mac_address": {"key": "macAddress", "type": "str"},
}
def __init__(
self,
*,
age: Optional[int] = None,
interface: Optional[str] = None,
ip_address: Optional[str] = None,
mac_address: Optional[str] = None,
**kwargs
):
"""
:keyword age: Age.
:paramtype age: int
:keyword interface: Interface.
:paramtype interface: str
:keyword ip_address: The IP address.
:paramtype ip_address: str
:keyword mac_address: The MAC address.
:paramtype mac_address: str
"""
super().__init__(**kwargs)
self.age = age
self.interface = interface
self.ip_address = ip_address
self.mac_address = mac_address
class ExpressRouteCircuitAuthorization(SubResource):
"""Authorization in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar authorization_key: The authorization key.
:vartype authorization_key: str
:ivar authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and
'InUse'. Known values are: "Available" and "InUse".
:vartype authorization_use_status: str or
~azure.mgmt.network.v2017_03_01.models.AuthorizationUseStatus
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"etag": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"authorization_key": {"key": "properties.authorizationKey", "type": "str"},
"authorization_use_status": {"key": "properties.authorizationUseStatus", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
authorization_key: Optional[str] = None,
authorization_use_status: Optional[Union[str, "_models.AuthorizationUseStatus"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Gets name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword authorization_key: The authorization key.
:paramtype authorization_key: str
:keyword authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and
'InUse'. Known values are: "Available" and "InUse".
:paramtype authorization_use_status: str or
~azure.mgmt.network.v2017_03_01.models.AuthorizationUseStatus
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = None
self.authorization_key = authorization_key
self.authorization_use_status = authorization_use_status
self.provisioning_state = provisioning_state
class ExpressRouteCircuitListResult(_serialization.Model):
"""Response for ListExpressRouteCircuit API service call.
:ivar value: A list of ExpressRouteCircuits in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuit]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuit]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.ExpressRouteCircuit"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of ExpressRouteCircuits in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuit]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExpressRouteCircuitPeering(SubResource): # pylint: disable=too-many-instance-attributes
"""Peering in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar peering_type: The PeeringType. Possible values are: 'AzurePublicPeering',
'AzurePrivatePeering', and 'MicrosoftPeering'. Known values are: "AzurePublicPeering",
"AzurePrivatePeering", and "MicrosoftPeering".
:vartype peering_type: str or
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringType
:ivar state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known values
are: "Disabled" and "Enabled".
:vartype state: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringState
:ivar azure_asn: The Azure ASN.
:vartype azure_asn: int
:ivar peer_asn: The peer ASN.
:vartype peer_asn: int
:ivar primary_peer_address_prefix: The primary address prefix.
:vartype primary_peer_address_prefix: str
:ivar secondary_peer_address_prefix: The secondary address prefix.
:vartype secondary_peer_address_prefix: str
:ivar primary_azure_port: The primary port.
:vartype primary_azure_port: str
:ivar secondary_azure_port: The secondary port.
:vartype secondary_azure_port: str
:ivar shared_key: The shared key.
:vartype shared_key: str
:ivar vlan_id: The VLAN ID.
:vartype vlan_id: int
:ivar microsoft_peering_config: The Microsoft peering configuration.
:vartype microsoft_peering_config:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringConfig
:ivar stats: Gets peering stats.
:vartype stats: ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitStats
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar gateway_manager_etag: The GatewayManager Etag.
:vartype gateway_manager_etag: str
:ivar last_modified_by: Gets whether the provider or the customer last modified the peering.
:vartype last_modified_by: str
:ivar route_filter: The reference of the RouteFilter resource.
:vartype route_filter: ~azure.mgmt.network.v2017_03_01.models.RouteFilter
:ivar ipv6_peering_config: The IPv6 peering configuration.
:vartype ipv6_peering_config:
~azure.mgmt.network.v2017_03_01.models.Ipv6ExpressRouteCircuitPeeringConfig
"""
_validation = {
"etag": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"peering_type": {"key": "properties.peeringType", "type": "str"},
"state": {"key": "properties.state", "type": "str"},
"azure_asn": {"key": "properties.azureASN", "type": "int"},
"peer_asn": {"key": "properties.peerASN", "type": "int"},
"primary_peer_address_prefix": {"key": "properties.primaryPeerAddressPrefix", "type": "str"},
"secondary_peer_address_prefix": {"key": "properties.secondaryPeerAddressPrefix", "type": "str"},
"primary_azure_port": {"key": "properties.primaryAzurePort", "type": "str"},
"secondary_azure_port": {"key": "properties.secondaryAzurePort", "type": "str"},
"shared_key": {"key": "properties.sharedKey", "type": "str"},
"vlan_id": {"key": "properties.vlanId", "type": "int"},
"microsoft_peering_config": {
"key": "properties.microsoftPeeringConfig",
"type": "ExpressRouteCircuitPeeringConfig",
},
"stats": {"key": "properties.stats", "type": "ExpressRouteCircuitStats"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"gateway_manager_etag": {"key": "properties.gatewayManagerEtag", "type": "str"},
"last_modified_by": {"key": "properties.lastModifiedBy", "type": "str"},
"route_filter": {"key": "properties.routeFilter", "type": "RouteFilter"},
"ipv6_peering_config": {"key": "properties.ipv6PeeringConfig", "type": "Ipv6ExpressRouteCircuitPeeringConfig"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
peering_type: Optional[Union[str, "_models.ExpressRouteCircuitPeeringType"]] = None,
state: Optional[Union[str, "_models.ExpressRouteCircuitPeeringState"]] = None,
azure_asn: Optional[int] = None,
peer_asn: Optional[int] = None,
primary_peer_address_prefix: Optional[str] = None,
secondary_peer_address_prefix: Optional[str] = None,
primary_azure_port: Optional[str] = None,
secondary_azure_port: Optional[str] = None,
shared_key: Optional[str] = None,
vlan_id: Optional[int] = None,
microsoft_peering_config: Optional["_models.ExpressRouteCircuitPeeringConfig"] = None,
stats: Optional["_models.ExpressRouteCircuitStats"] = None,
provisioning_state: Optional[str] = None,
gateway_manager_etag: Optional[str] = None,
last_modified_by: Optional[str] = None,
route_filter: Optional["_models.RouteFilter"] = None,
ipv6_peering_config: Optional["_models.Ipv6ExpressRouteCircuitPeeringConfig"] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Gets name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword peering_type: The PeeringType. Possible values are: 'AzurePublicPeering',
'AzurePrivatePeering', and 'MicrosoftPeering'. Known values are: "AzurePublicPeering",
"AzurePrivatePeering", and "MicrosoftPeering".
:paramtype peering_type: str or
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringType
:keyword state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known
values are: "Disabled" and "Enabled".
:paramtype state: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringState
:keyword azure_asn: The Azure ASN.
:paramtype azure_asn: int
:keyword peer_asn: The peer ASN.
:paramtype peer_asn: int
:keyword primary_peer_address_prefix: The primary address prefix.
:paramtype primary_peer_address_prefix: str
:keyword secondary_peer_address_prefix: The secondary address prefix.
:paramtype secondary_peer_address_prefix: str
:keyword primary_azure_port: The primary port.
:paramtype primary_azure_port: str
:keyword secondary_azure_port: The secondary port.
:paramtype secondary_azure_port: str
:keyword shared_key: The shared key.
:paramtype shared_key: str
:keyword vlan_id: The VLAN ID.
:paramtype vlan_id: int
:keyword microsoft_peering_config: The Microsoft peering configuration.
:paramtype microsoft_peering_config:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringConfig
:keyword stats: Gets peering stats.
:paramtype stats: ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitStats
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
:keyword gateway_manager_etag: The GatewayManager Etag.
:paramtype gateway_manager_etag: str
:keyword last_modified_by: Gets whether the provider or the customer last modified the peering.
:paramtype last_modified_by: str
:keyword route_filter: The reference of the RouteFilter resource.
:paramtype route_filter: ~azure.mgmt.network.v2017_03_01.models.RouteFilter
:keyword ipv6_peering_config: The IPv6 peering configuration.
:paramtype ipv6_peering_config:
~azure.mgmt.network.v2017_03_01.models.Ipv6ExpressRouteCircuitPeeringConfig
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = None
self.peering_type = peering_type
self.state = state
self.azure_asn = azure_asn
self.peer_asn = peer_asn
self.primary_peer_address_prefix = primary_peer_address_prefix
self.secondary_peer_address_prefix = secondary_peer_address_prefix
self.primary_azure_port = primary_azure_port
self.secondary_azure_port = secondary_azure_port
self.shared_key = shared_key
self.vlan_id = vlan_id
self.microsoft_peering_config = microsoft_peering_config
self.stats = stats
self.provisioning_state = provisioning_state
self.gateway_manager_etag = gateway_manager_etag
self.last_modified_by = last_modified_by
self.route_filter = route_filter
self.ipv6_peering_config = ipv6_peering_config
class ExpressRouteCircuitPeeringConfig(_serialization.Model):
"""Specifies the peering configuration.
:ivar advertised_public_prefixes: The reference of AdvertisedPublicPrefixes.
:vartype advertised_public_prefixes: list[str]
:ivar advertised_public_prefixes_state: AdvertisedPublicPrefixState of the Peering resource.
Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'. Known
values are: "NotConfigured", "Configuring", "Configured", and "ValidationNeeded".
:vartype advertised_public_prefixes_state: str or
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState
:ivar customer_asn: The CustomerASN of the peering.
:vartype customer_asn: int
:ivar routing_registry_name: The RoutingRegistryName of the configuration.
:vartype routing_registry_name: str
"""
_attribute_map = {
"advertised_public_prefixes": {"key": "advertisedPublicPrefixes", "type": "[str]"},
"advertised_public_prefixes_state": {"key": "advertisedPublicPrefixesState", "type": "str"},
"customer_asn": {"key": "customerASN", "type": "int"},
"routing_registry_name": {"key": "routingRegistryName", "type": "str"},
}
def __init__(
self,
*,
advertised_public_prefixes: Optional[List[str]] = None,
advertised_public_prefixes_state: Optional[
Union[str, "_models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState"]
] = None,
customer_asn: Optional[int] = None,
routing_registry_name: Optional[str] = None,
**kwargs
):
"""
:keyword advertised_public_prefixes: The reference of AdvertisedPublicPrefixes.
:paramtype advertised_public_prefixes: list[str]
:keyword advertised_public_prefixes_state: AdvertisedPublicPrefixState of the Peering resource.
Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'. Known
values are: "NotConfigured", "Configuring", "Configured", and "ValidationNeeded".
:paramtype advertised_public_prefixes_state: str or
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState
:keyword customer_asn: The CustomerASN of the peering.
:paramtype customer_asn: int
:keyword routing_registry_name: The RoutingRegistryName of the configuration.
:paramtype routing_registry_name: str
"""
super().__init__(**kwargs)
self.advertised_public_prefixes = advertised_public_prefixes
self.advertised_public_prefixes_state = advertised_public_prefixes_state
self.customer_asn = customer_asn
self.routing_registry_name = routing_registry_name
class ExpressRouteCircuitPeeringListResult(_serialization.Model):
"""Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCircuit.
:ivar value: The peerings in an express route circuit.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuitPeering]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteCircuitPeering"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The peerings in an express route circuit.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExpressRouteCircuitRoutesTable(_serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:ivar network: network.
:vartype network: str
:ivar next_hop: nextHop.
:vartype next_hop: str
:ivar loc_prf: locPrf.
:vartype loc_prf: str
:ivar weight: weight.
:vartype weight: int
:ivar path: path.
:vartype path: str
"""
_attribute_map = {
"network": {"key": "network", "type": "str"},
"next_hop": {"key": "nextHop", "type": "str"},
"loc_prf": {"key": "locPrf", "type": "str"},
"weight": {"key": "weight", "type": "int"},
"path": {"key": "path", "type": "str"},
}
def __init__(
self,
*,
network: Optional[str] = None,
next_hop: Optional[str] = None,
loc_prf: Optional[str] = None,
weight: Optional[int] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword network: network.
:paramtype network: str
:keyword next_hop: nextHop.
:paramtype next_hop: str
:keyword loc_prf: locPrf.
:paramtype loc_prf: str
:keyword weight: weight.
:paramtype weight: int
:keyword path: path.
:paramtype path: str
"""
super().__init__(**kwargs)
self.network = network
self.next_hop = next_hop
self.loc_prf = loc_prf
self.weight = weight
self.path = path
class ExpressRouteCircuitRoutesTableSummary(_serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:ivar neighbor: Neighbor.
:vartype neighbor: str
:ivar v: BGP version number spoken to the neighbor.
:vartype v: int
:ivar as_property: Autonomous system number.
:vartype as_property: int
:ivar up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:vartype up_down: str
:ivar state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have
been received from a neighbor or peer group.
:vartype state_pfx_rcd: str
"""
_attribute_map = {
"neighbor": {"key": "neighbor", "type": "str"},
"v": {"key": "v", "type": "int"},
"as_property": {"key": "as", "type": "int"},
"up_down": {"key": "upDown", "type": "str"},
"state_pfx_rcd": {"key": "statePfxRcd", "type": "str"},
}
def __init__(
self,
*,
neighbor: Optional[str] = None,
v: Optional[int] = None,
as_property: Optional[int] = None,
up_down: Optional[str] = None,
state_pfx_rcd: Optional[str] = None,
**kwargs
):
"""
:keyword neighbor: Neighbor.
:paramtype neighbor: str
:keyword v: BGP version number spoken to the neighbor.
:paramtype v: int
:keyword as_property: Autonomous system number.
:paramtype as_property: int
:keyword up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:paramtype up_down: str
:keyword state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have
been received from a neighbor or peer group.
:paramtype state_pfx_rcd: str
"""
super().__init__(**kwargs)
self.neighbor = neighbor
self.v = v
self.as_property = as_property
self.up_down = up_down
self.state_pfx_rcd = state_pfx_rcd
class ExpressRouteCircuitsArpTableListResult(_serialization.Model):
"""Response for ListArpTable associated with the Express Route Circuits API.
:ivar value: Gets list of the ARP table.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitArpTable]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuitArpTable]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteCircuitArpTable"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: Gets list of the ARP table.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitArpTable]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExpressRouteCircuitServiceProviderProperties(_serialization.Model):
"""Contains ServiceProviderProperties in an ExpressRouteCircuit.
:ivar service_provider_name: The serviceProviderName.
:vartype service_provider_name: str
:ivar peering_location: The peering location.
:vartype peering_location: str
:ivar bandwidth_in_mbps: The BandwidthInMbps.
:vartype bandwidth_in_mbps: int
"""
_attribute_map = {
"service_provider_name": {"key": "serviceProviderName", "type": "str"},
"peering_location": {"key": "peeringLocation", "type": "str"},
"bandwidth_in_mbps": {"key": "bandwidthInMbps", "type": "int"},
}
def __init__(
self,
*,
service_provider_name: Optional[str] = None,
peering_location: Optional[str] = None,
bandwidth_in_mbps: Optional[int] = None,
**kwargs
):
"""
:keyword service_provider_name: The serviceProviderName.
:paramtype service_provider_name: str
:keyword peering_location: The peering location.
:paramtype peering_location: str
:keyword bandwidth_in_mbps: The BandwidthInMbps.
:paramtype bandwidth_in_mbps: int
"""
super().__init__(**kwargs)
self.service_provider_name = service_provider_name
self.peering_location = peering_location
self.bandwidth_in_mbps = bandwidth_in_mbps
class ExpressRouteCircuitSku(_serialization.Model):
"""Contains SKU in an ExpressRouteCircuit.
:ivar name: The name of the SKU.
:vartype name: str
:ivar tier: The tier of the SKU. Possible values are 'Standard' and 'Premium'. Known values
are: "Standard" and "Premium".
:vartype tier: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSkuTier
:ivar family: The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
Known values are: "UnlimitedData" and "MeteredData".
:vartype family: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSkuFamily
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"tier": {"key": "tier", "type": "str"},
"family": {"key": "family", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
tier: Optional[Union[str, "_models.ExpressRouteCircuitSkuTier"]] = None,
family: Optional[Union[str, "_models.ExpressRouteCircuitSkuFamily"]] = None,
**kwargs
):
"""
:keyword name: The name of the SKU.
:paramtype name: str
:keyword tier: The tier of the SKU. Possible values are 'Standard' and 'Premium'. Known values
are: "Standard" and "Premium".
:paramtype tier: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSkuTier
:keyword family: The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
Known values are: "UnlimitedData" and "MeteredData".
:paramtype family: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitSkuFamily
"""
super().__init__(**kwargs)
self.name = name
self.tier = tier
self.family = family
class ExpressRouteCircuitsRoutesTableListResult(_serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:ivar value: The list of routes table.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitRoutesTable]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuitRoutesTable]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteCircuitRoutesTable"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The list of routes table.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitRoutesTable]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExpressRouteCircuitsRoutesTableSummaryListResult(_serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:ivar value: A list of the routes table.
:vartype value:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitRoutesTableSummary]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteCircuitRoutesTableSummary]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteCircuitRoutesTableSummary"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: A list of the routes table.
:paramtype value:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitRoutesTableSummary]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExpressRouteCircuitStats(_serialization.Model):
"""Contains stats associated with the peering.
:ivar primarybytes_in: Gets BytesIn of the peering.
:vartype primarybytes_in: int
:ivar primarybytes_out: Gets BytesOut of the peering.
:vartype primarybytes_out: int
:ivar secondarybytes_in: Gets BytesIn of the peering.
:vartype secondarybytes_in: int
:ivar secondarybytes_out: Gets BytesOut of the peering.
:vartype secondarybytes_out: int
"""
_attribute_map = {
"primarybytes_in": {"key": "primarybytesIn", "type": "int"},
"primarybytes_out": {"key": "primarybytesOut", "type": "int"},
"secondarybytes_in": {"key": "secondarybytesIn", "type": "int"},
"secondarybytes_out": {"key": "secondarybytesOut", "type": "int"},
}
def __init__(
self,
*,
primarybytes_in: Optional[int] = None,
primarybytes_out: Optional[int] = None,
secondarybytes_in: Optional[int] = None,
secondarybytes_out: Optional[int] = None,
**kwargs
):
"""
:keyword primarybytes_in: Gets BytesIn of the peering.
:paramtype primarybytes_in: int
:keyword primarybytes_out: Gets BytesOut of the peering.
:paramtype primarybytes_out: int
:keyword secondarybytes_in: Gets BytesIn of the peering.
:paramtype secondarybytes_in: int
:keyword secondarybytes_out: Gets BytesOut of the peering.
:paramtype secondarybytes_out: int
"""
super().__init__(**kwargs)
self.primarybytes_in = primarybytes_in
self.primarybytes_out = primarybytes_out
self.secondarybytes_in = secondarybytes_in
self.secondarybytes_out = secondarybytes_out
class ExpressRouteServiceProvider(Resource):
"""A ExpressRouteResourceProvider object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar peering_locations: Get a list of peering locations.
:vartype peering_locations: list[str]
:ivar bandwidths_offered: Gets bandwidths offered.
:vartype bandwidths_offered:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteServiceProviderBandwidthsOffered]
:ivar provisioning_state: Gets the provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"peering_locations": {"key": "properties.peeringLocations", "type": "[str]"},
"bandwidths_offered": {
"key": "properties.bandwidthsOffered",
"type": "[ExpressRouteServiceProviderBandwidthsOffered]",
},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
peering_locations: Optional[List[str]] = None,
bandwidths_offered: Optional[List["_models.ExpressRouteServiceProviderBandwidthsOffered"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword peering_locations: Get a list of peering locations.
:paramtype peering_locations: list[str]
:keyword bandwidths_offered: Gets bandwidths offered.
:paramtype bandwidths_offered:
list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteServiceProviderBandwidthsOffered]
:keyword provisioning_state: Gets the provisioning state of the resource.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.peering_locations = peering_locations
self.bandwidths_offered = bandwidths_offered
self.provisioning_state = provisioning_state
class ExpressRouteServiceProviderBandwidthsOffered(_serialization.Model):
"""Contains bandwidths offered in ExpressRouteServiceProvider resources.
:ivar offer_name: The OfferName.
:vartype offer_name: str
:ivar value_in_mbps: The ValueInMbps.
:vartype value_in_mbps: int
"""
_attribute_map = {
"offer_name": {"key": "offerName", "type": "str"},
"value_in_mbps": {"key": "valueInMbps", "type": "int"},
}
def __init__(self, *, offer_name: Optional[str] = None, value_in_mbps: Optional[int] = None, **kwargs):
"""
:keyword offer_name: The OfferName.
:paramtype offer_name: str
:keyword value_in_mbps: The ValueInMbps.
:paramtype value_in_mbps: int
"""
super().__init__(**kwargs)
self.offer_name = offer_name
self.value_in_mbps = value_in_mbps
class ExpressRouteServiceProviderListResult(_serialization.Model):
"""Response for the ListExpressRouteServiceProvider API service call.
:ivar value: A list of ExpressRouteResourceProvider resources.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteServiceProvider]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExpressRouteServiceProvider]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ExpressRouteServiceProvider"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: A list of ExpressRouteResourceProvider resources.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteServiceProvider]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class FlowLogInformation(_serialization.Model):
"""Information on the configuration of flow log.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The ID of the resource to configure for flow logging. Required.
:vartype target_resource_id: str
:ivar storage_id: ID of the storage account which is used to store the flow log. Required.
:vartype storage_id: str
:ivar enabled: Flag to enable/disable flow logging. Required.
:vartype enabled: bool
:ivar retention_policy: Parameters that define the retention policy for flow log.
:vartype retention_policy: ~azure.mgmt.network.v2017_03_01.models.RetentionPolicyParameters
"""
_validation = {
"target_resource_id": {"required": True},
"storage_id": {"required": True},
"enabled": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
"storage_id": {"key": "properties.storageId", "type": "str"},
"enabled": {"key": "properties.enabled", "type": "bool"},
"retention_policy": {"key": "properties.retentionPolicy", "type": "RetentionPolicyParameters"},
}
def __init__(
self,
*,
target_resource_id: str,
storage_id: str,
enabled: bool,
retention_policy: Optional["_models.RetentionPolicyParameters"] = None,
**kwargs
):
"""
:keyword target_resource_id: The ID of the resource to configure for flow logging. Required.
:paramtype target_resource_id: str
:keyword storage_id: ID of the storage account which is used to store the flow log. Required.
:paramtype storage_id: str
:keyword enabled: Flag to enable/disable flow logging. Required.
:paramtype enabled: bool
:keyword retention_policy: Parameters that define the retention policy for flow log.
:paramtype retention_policy: ~azure.mgmt.network.v2017_03_01.models.RetentionPolicyParameters
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
self.storage_id = storage_id
self.enabled = enabled
self.retention_policy = retention_policy
class FlowLogStatusParameters(_serialization.Model):
"""Parameters that define a resource to query flow log status.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The target resource where getting the flow logging status. Required.
:vartype target_resource_id: str
"""
_validation = {
"target_resource_id": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
}
def __init__(self, *, target_resource_id: str, **kwargs):
"""
:keyword target_resource_id: The target resource where getting the flow logging status.
Required.
:paramtype target_resource_id: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
class FrontendIPConfiguration(SubResource): # pylint: disable=too-many-instance-attributes
"""Frontend IP address of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar inbound_nat_rules: Read only. Inbound rules URIs that use this frontend IP.
:vartype inbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar inbound_nat_pools: Read only. Inbound pools URIs that use this frontend IP.
:vartype inbound_nat_pools: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar outbound_nat_rules: Read only. Outbound rules URIs that use this frontend IP.
:vartype outbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar load_balancing_rules: Gets load balancing rules URIs that use this frontend IP.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar private_ip_address: The private IP address of the IP configuration.
:vartype private_ip_address: str
:ivar private_ip_allocation_method: The Private IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:vartype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar subnet: The reference of the subnet resource.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:ivar public_ip_address: The reference of the Public IP resource.
:vartype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"inbound_nat_rules": {"readonly": True},
"inbound_nat_pools": {"readonly": True},
"outbound_nat_rules": {"readonly": True},
"load_balancing_rules": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"inbound_nat_rules": {"key": "properties.inboundNatRules", "type": "[SubResource]"},
"inbound_nat_pools": {"key": "properties.inboundNatPools", "type": "[SubResource]"},
"outbound_nat_rules": {"key": "properties.outboundNatRules", "type": "[SubResource]"},
"load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"},
"private_ip_address": {"key": "properties.privateIPAddress", "type": "str"},
"private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "Subnet"},
"public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
subnet: Optional["_models.Subnet"] = None,
public_ip_address: Optional["_models.PublicIPAddress"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword private_ip_address: The private IP address of the IP configuration.
:paramtype private_ip_address: str
:keyword private_ip_allocation_method: The Private IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:paramtype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword subnet: The reference of the subnet resource.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:keyword public_ip_address: The reference of the Public IP resource.
:paramtype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.inbound_nat_rules = None
self.inbound_nat_pools = None
self.outbound_nat_rules = None
self.load_balancing_rules = None
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
class GatewayRoute(_serialization.Model):
"""Gateway routing details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The gateway's local address.
:vartype local_address: str
:ivar network: The route's network prefix.
:vartype network: str
:ivar next_hop: The route's next hop.
:vartype next_hop: str
:ivar source_peer: The peer this route was learned from.
:vartype source_peer: str
:ivar origin: The source this route was learned from.
:vartype origin: str
:ivar as_path: The route's AS path sequence.
:vartype as_path: str
:ivar weight: The route's weight.
:vartype weight: int
"""
_validation = {
"local_address": {"readonly": True},
"network": {"readonly": True},
"next_hop": {"readonly": True},
"source_peer": {"readonly": True},
"origin": {"readonly": True},
"as_path": {"readonly": True},
"weight": {"readonly": True},
}
_attribute_map = {
"local_address": {"key": "localAddress", "type": "str"},
"network": {"key": "network", "type": "str"},
"next_hop": {"key": "nextHop", "type": "str"},
"source_peer": {"key": "sourcePeer", "type": "str"},
"origin": {"key": "origin", "type": "str"},
"as_path": {"key": "asPath", "type": "str"},
"weight": {"key": "weight", "type": "int"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.local_address = None
self.network = None
self.next_hop = None
self.source_peer = None
self.origin = None
self.as_path = None
self.weight = None
class GatewayRouteListResult(_serialization.Model):
"""List of virtual network gateway routes.
:ivar value: List of gateway routes.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.GatewayRoute]
"""
_attribute_map = {
"value": {"key": "value", "type": "[GatewayRoute]"},
}
def __init__(self, *, value: Optional[List["_models.GatewayRoute"]] = None, **kwargs):
"""
:keyword value: List of gateway routes.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.GatewayRoute]
"""
super().__init__(**kwargs)
self.value = value
class InboundNatPool(SubResource):
"""Inbound NAT pool of the load balancer.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar frontend_ip_configuration: A reference to frontend IP addresses.
:vartype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'.
Known values are: "Udp" and "Tcp".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:ivar frontend_port_range_start: The first port number in the range of external ports that will
be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range
between 1 and 65534.
:vartype frontend_port_range_start: int
:ivar frontend_port_range_end: The last port number in the range of external ports that will be
used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range
between 1 and 65535.
:vartype frontend_port_range_end: int
:ivar backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 1 and 65535.
:vartype backend_port: int
:ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"},
"protocol": {"key": "properties.protocol", "type": "str"},
"frontend_port_range_start": {"key": "properties.frontendPortRangeStart", "type": "int"},
"frontend_port_range_end": {"key": "properties.frontendPortRangeEnd", "type": "int"},
"backend_port": {"key": "properties.backendPort", "type": "int"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional["_models.SubResource"] = None,
protocol: Optional[Union[str, "_models.TransportProtocol"]] = None,
frontend_port_range_start: Optional[int] = None,
frontend_port_range_end: Optional[int] = None,
backend_port: Optional[int] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword frontend_ip_configuration: A reference to frontend IP addresses.
:paramtype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or
'Tcp'. Known values are: "Udp" and "Tcp".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:keyword frontend_port_range_start: The first port number in the range of external ports that
will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values
range between 1 and 65534.
:paramtype frontend_port_range_start: int
:keyword frontend_port_range_end: The last port number in the range of external ports that will
be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range
between 1 and 65535.
:paramtype frontend_port_range_end: int
:keyword backend_port: The port used for internal connections on the endpoint. Acceptable
values are between 1 and 65535.
:paramtype backend_port: int
:keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.frontend_ip_configuration = frontend_ip_configuration
self.protocol = protocol
self.frontend_port_range_start = frontend_port_range_start
self.frontend_port_range_end = frontend_port_range_end
self.backend_port = backend_port
self.provisioning_state = provisioning_state
class InboundNatRule(SubResource): # pylint: disable=too-many-instance-attributes
"""Inbound NAT rule of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar frontend_ip_configuration: A reference to frontend IP addresses.
:vartype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar backend_ip_configuration: A reference to a private IP address defined on a network
interface of a VM. Traffic sent to the frontend port of each of the frontend IP configurations
is forwarded to the backed IP.
:vartype backend_ip_configuration:
~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration
:ivar protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'.
Known values are: "Udp" and "Tcp".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:ivar frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values range from 1 to 65534.
:vartype frontend_port: int
:ivar backend_port: The port used for the internal endpoint. Acceptable values range from 1 to
65535.
:vartype backend_port: int
:ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:vartype idle_timeout_in_minutes: int
:ivar enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:vartype enable_floating_ip: bool
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"backend_ip_configuration": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"},
"backend_ip_configuration": {
"key": "properties.backendIPConfiguration",
"type": "NetworkInterfaceIPConfiguration",
},
"protocol": {"key": "properties.protocol", "type": "str"},
"frontend_port": {"key": "properties.frontendPort", "type": "int"},
"backend_port": {"key": "properties.backendPort", "type": "int"},
"idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"},
"enable_floating_ip": {"key": "properties.enableFloatingIP", "type": "bool"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional["_models.SubResource"] = None,
protocol: Optional[Union[str, "_models.TransportProtocol"]] = None,
frontend_port: Optional[int] = None,
backend_port: Optional[int] = None,
idle_timeout_in_minutes: Optional[int] = None,
enable_floating_ip: Optional[bool] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Gets name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword frontend_ip_configuration: A reference to frontend IP addresses.
:paramtype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or
'Tcp'. Known values are: "Udp" and "Tcp".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:keyword frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values range from 1 to 65534.
:paramtype frontend_port: int
:keyword backend_port: The port used for the internal endpoint. Acceptable values range from 1
to 65535.
:paramtype backend_port: int
:keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:paramtype idle_timeout_in_minutes: int
:keyword enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:paramtype enable_floating_ip: bool
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.frontend_ip_configuration = frontend_ip_configuration
self.backend_ip_configuration = None
self.protocol = protocol
self.frontend_port = frontend_port
self.backend_port = backend_port
self.idle_timeout_in_minutes = idle_timeout_in_minutes
self.enable_floating_ip = enable_floating_ip
self.provisioning_state = provisioning_state
class IPAddressAvailabilityResult(_serialization.Model):
"""Response for CheckIPAddressAvailability API service call.
:ivar available: Private IP address availability.
:vartype available: bool
:ivar available_ip_addresses: Contains other available private IP addresses if the asked for
address is taken.
:vartype available_ip_addresses: list[str]
"""
_attribute_map = {
"available": {"key": "available", "type": "bool"},
"available_ip_addresses": {"key": "availableIPAddresses", "type": "[str]"},
}
def __init__(
self, *, available: Optional[bool] = None, available_ip_addresses: Optional[List[str]] = None, **kwargs
):
"""
:keyword available: Private IP address availability.
:paramtype available: bool
:keyword available_ip_addresses: Contains other available private IP addresses if the asked for
address is taken.
:paramtype available_ip_addresses: list[str]
"""
super().__init__(**kwargs)
self.available = available
self.available_ip_addresses = available_ip_addresses
class IPConfiguration(SubResource):
"""IPConfiguration.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_ip_address: The private IP address of the IP configuration.
:vartype private_ip_address: str
:ivar private_ip_allocation_method: The private IP allocation method. Possible values are
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:vartype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar subnet: The reference of the subnet resource.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:ivar public_ip_address: The reference of the public IP resource.
:vartype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"private_ip_address": {"key": "properties.privateIPAddress", "type": "str"},
"private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "Subnet"},
"public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
subnet: Optional["_models.Subnet"] = None,
public_ip_address: Optional["_models.PublicIPAddress"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword private_ip_address: The private IP address of the IP configuration.
:paramtype private_ip_address: str
:keyword private_ip_allocation_method: The private IP allocation method. Possible values are
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:paramtype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword subnet: The reference of the subnet resource.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:keyword public_ip_address: The reference of the public IP resource.
:paramtype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
class IpsecPolicy(_serialization.Model):
"""An IPSec Policy configuration for a virtual network gateway connection.
All required parameters must be populated in order to send to Azure.
:ivar sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2
SA) lifetime in seconds for a site to site VPN tunnel. Required.
:vartype sa_life_time_seconds: int
:ivar sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2
SA) payload size in KB for a site to site VPN tunnel. Required.
:vartype sa_data_size_kilobytes: int
:ivar ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values
are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and
"GCMAES256".
:vartype ipsec_encryption: str or ~azure.mgmt.network.v2017_03_01.models.IpsecEncryption
:ivar ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values are:
"MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256".
:vartype ipsec_integrity: str or ~azure.mgmt.network.v2017_03_01.models.IpsecIntegrity
:ivar ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values are:
"DES", "DES3", "AES128", "AES192", and "AES256".
:vartype ike_encryption: str or ~azure.mgmt.network.v2017_03_01.models.IkeEncryption
:ivar ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are:
"MD5", "SHA1", "SHA256", and "SHA384".
:vartype ike_integrity: str or ~azure.mgmt.network.v2017_03_01.models.IkeIntegrity
:ivar dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values are:
"None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and
"DHGroup24".
:vartype dh_group: str or ~azure.mgmt.network.v2017_03_01.models.DhGroup
:ivar pfs_group: The DH Groups used in IKE Phase 2 for new child SA. Required. Known values
are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", and "PFS24".
:vartype pfs_group: str or ~azure.mgmt.network.v2017_03_01.models.PfsGroup
"""
_validation = {
"sa_life_time_seconds": {"required": True},
"sa_data_size_kilobytes": {"required": True},
"ipsec_encryption": {"required": True},
"ipsec_integrity": {"required": True},
"ike_encryption": {"required": True},
"ike_integrity": {"required": True},
"dh_group": {"required": True},
"pfs_group": {"required": True},
}
_attribute_map = {
"sa_life_time_seconds": {"key": "saLifeTimeSeconds", "type": "int"},
"sa_data_size_kilobytes": {"key": "saDataSizeKilobytes", "type": "int"},
"ipsec_encryption": {"key": "ipsecEncryption", "type": "str"},
"ipsec_integrity": {"key": "ipsecIntegrity", "type": "str"},
"ike_encryption": {"key": "ikeEncryption", "type": "str"},
"ike_integrity": {"key": "ikeIntegrity", "type": "str"},
"dh_group": {"key": "dhGroup", "type": "str"},
"pfs_group": {"key": "pfsGroup", "type": "str"},
}
def __init__(
self,
*,
sa_life_time_seconds: int,
sa_data_size_kilobytes: int,
ipsec_encryption: Union[str, "_models.IpsecEncryption"],
ipsec_integrity: Union[str, "_models.IpsecIntegrity"],
ike_encryption: Union[str, "_models.IkeEncryption"],
ike_integrity: Union[str, "_models.IkeIntegrity"],
dh_group: Union[str, "_models.DhGroup"],
pfs_group: Union[str, "_models.PfsGroup"],
**kwargs
):
"""
:keyword sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase
2 SA) lifetime in seconds for a site to site VPN tunnel. Required.
:paramtype sa_life_time_seconds: int
:keyword sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or
Phase 2 SA) payload size in KB for a site to site VPN tunnel. Required.
:paramtype sa_data_size_kilobytes: int
:keyword ipsec_encryption: The IPSec encryption algorithm (IKE phase 1). Required. Known values
are: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192", and
"GCMAES256".
:paramtype ipsec_encryption: str or ~azure.mgmt.network.v2017_03_01.models.IpsecEncryption
:keyword ipsec_integrity: The IPSec integrity algorithm (IKE phase 1). Required. Known values
are: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", and "GCMAES256".
:paramtype ipsec_integrity: str or ~azure.mgmt.network.v2017_03_01.models.IpsecIntegrity
:keyword ike_encryption: The IKE encryption algorithm (IKE phase 2). Required. Known values
are: "DES", "DES3", "AES128", "AES192", and "AES256".
:paramtype ike_encryption: str or ~azure.mgmt.network.v2017_03_01.models.IkeEncryption
:keyword ike_integrity: The IKE integrity algorithm (IKE phase 2). Required. Known values are:
"MD5", "SHA1", "SHA256", and "SHA384".
:paramtype ike_integrity: str or ~azure.mgmt.network.v2017_03_01.models.IkeIntegrity
:keyword dh_group: The DH Groups used in IKE Phase 1 for initial SA. Required. Known values
are: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384", and
"DHGroup24".
:paramtype dh_group: str or ~azure.mgmt.network.v2017_03_01.models.DhGroup
:keyword pfs_group: The DH Groups used in IKE Phase 2 for new child SA. Required. Known values
are: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", and "PFS24".
:paramtype pfs_group: str or ~azure.mgmt.network.v2017_03_01.models.PfsGroup
"""
super().__init__(**kwargs)
self.sa_life_time_seconds = sa_life_time_seconds
self.sa_data_size_kilobytes = sa_data_size_kilobytes
self.ipsec_encryption = ipsec_encryption
self.ipsec_integrity = ipsec_integrity
self.ike_encryption = ike_encryption
self.ike_integrity = ike_integrity
self.dh_group = dh_group
self.pfs_group = pfs_group
class Ipv6ExpressRouteCircuitPeeringConfig(_serialization.Model):
"""Contains IPv6 peering config.
:ivar primary_peer_address_prefix: The primary address prefix.
:vartype primary_peer_address_prefix: str
:ivar secondary_peer_address_prefix: The secondary address prefix.
:vartype secondary_peer_address_prefix: str
:ivar microsoft_peering_config: The Microsoft peering configuration.
:vartype microsoft_peering_config:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringConfig
:ivar route_filter: The reference of the RouteFilter resource.
:vartype route_filter: ~azure.mgmt.network.v2017_03_01.models.RouteFilter
:ivar state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known values
are: "Disabled" and "Enabled".
:vartype state: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringState
"""
_attribute_map = {
"primary_peer_address_prefix": {"key": "primaryPeerAddressPrefix", "type": "str"},
"secondary_peer_address_prefix": {"key": "secondaryPeerAddressPrefix", "type": "str"},
"microsoft_peering_config": {"key": "microsoftPeeringConfig", "type": "ExpressRouteCircuitPeeringConfig"},
"route_filter": {"key": "routeFilter", "type": "RouteFilter"},
"state": {"key": "state", "type": "str"},
}
def __init__(
self,
*,
primary_peer_address_prefix: Optional[str] = None,
secondary_peer_address_prefix: Optional[str] = None,
microsoft_peering_config: Optional["_models.ExpressRouteCircuitPeeringConfig"] = None,
route_filter: Optional["_models.RouteFilter"] = None,
state: Optional[Union[str, "_models.ExpressRouteCircuitPeeringState"]] = None,
**kwargs
):
"""
:keyword primary_peer_address_prefix: The primary address prefix.
:paramtype primary_peer_address_prefix: str
:keyword secondary_peer_address_prefix: The secondary address prefix.
:paramtype secondary_peer_address_prefix: str
:keyword microsoft_peering_config: The Microsoft peering configuration.
:paramtype microsoft_peering_config:
~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringConfig
:keyword route_filter: The reference of the RouteFilter resource.
:paramtype route_filter: ~azure.mgmt.network.v2017_03_01.models.RouteFilter
:keyword state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Known
values are: "Disabled" and "Enabled".
:paramtype state: str or ~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeeringState
"""
super().__init__(**kwargs)
self.primary_peer_address_prefix = primary_peer_address_prefix
self.secondary_peer_address_prefix = secondary_peer_address_prefix
self.microsoft_peering_config = microsoft_peering_config
self.route_filter = route_filter
self.state = state
class LoadBalancer(Resource): # pylint: disable=too-many-instance-attributes
"""LoadBalancer resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar frontend_ip_configurations: Object representing the frontend IPs to be used for the load
balancer.
:vartype frontend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.FrontendIPConfiguration]
:ivar backend_address_pools: Collection of backend address pools used by a load balancer.
:vartype backend_address_pools: list[~azure.mgmt.network.v2017_03_01.models.BackendAddressPool]
:ivar load_balancing_rules: Object collection representing the load balancing rules Gets the
provisioning.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2017_03_01.models.LoadBalancingRule]
:ivar probes: Collection of probe objects used in the load balancer.
:vartype probes: list[~azure.mgmt.network.v2017_03_01.models.Probe]
:ivar inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining
inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT
pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are
associated with individual virtual machines cannot reference an Inbound NAT pool. They have to
reference individual inbound NAT rules.
:vartype inbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.InboundNatRule]
:ivar inbound_nat_pools: Defines an external port range for inbound NAT to a single backend
port on NICs associated with a load balancer. Inbound NAT rules are created automatically for
each NIC associated with the Load Balancer using an external port from this range. Defining an
Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with
individual virtual machines cannot reference an inbound NAT pool. They have to reference
individual inbound NAT rules.
:vartype inbound_nat_pools: list[~azure.mgmt.network.v2017_03_01.models.InboundNatPool]
:ivar outbound_nat_rules: The outbound NAT rules.
:vartype outbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.OutboundNatRule]
:ivar resource_guid: The resource GUID property of the load balancer resource.
:vartype resource_guid: str
:ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"frontend_ip_configurations": {
"key": "properties.frontendIPConfigurations",
"type": "[FrontendIPConfiguration]",
},
"backend_address_pools": {"key": "properties.backendAddressPools", "type": "[BackendAddressPool]"},
"load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[LoadBalancingRule]"},
"probes": {"key": "properties.probes", "type": "[Probe]"},
"inbound_nat_rules": {"key": "properties.inboundNatRules", "type": "[InboundNatRule]"},
"inbound_nat_pools": {"key": "properties.inboundNatPools", "type": "[InboundNatPool]"},
"outbound_nat_rules": {"key": "properties.outboundNatRules", "type": "[OutboundNatRule]"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
frontend_ip_configurations: Optional[List["_models.FrontendIPConfiguration"]] = None,
backend_address_pools: Optional[List["_models.BackendAddressPool"]] = None,
load_balancing_rules: Optional[List["_models.LoadBalancingRule"]] = None,
probes: Optional[List["_models.Probe"]] = None,
inbound_nat_rules: Optional[List["_models.InboundNatRule"]] = None,
inbound_nat_pools: Optional[List["_models.InboundNatPool"]] = None,
outbound_nat_rules: Optional[List["_models.OutboundNatRule"]] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword frontend_ip_configurations: Object representing the frontend IPs to be used for the
load balancer.
:paramtype frontend_ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.FrontendIPConfiguration]
:keyword backend_address_pools: Collection of backend address pools used by a load balancer.
:paramtype backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.BackendAddressPool]
:keyword load_balancing_rules: Object collection representing the load balancing rules Gets the
provisioning.
:paramtype load_balancing_rules: list[~azure.mgmt.network.v2017_03_01.models.LoadBalancingRule]
:keyword probes: Collection of probe objects used in the load balancer.
:paramtype probes: list[~azure.mgmt.network.v2017_03_01.models.Probe]
:keyword inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining
inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT
pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are
associated with individual virtual machines cannot reference an Inbound NAT pool. They have to
reference individual inbound NAT rules.
:paramtype inbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.InboundNatRule]
:keyword inbound_nat_pools: Defines an external port range for inbound NAT to a single backend
port on NICs associated with a load balancer. Inbound NAT rules are created automatically for
each NIC associated with the Load Balancer using an external port from this range. Defining an
Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with
individual virtual machines cannot reference an inbound NAT pool. They have to reference
individual inbound NAT rules.
:paramtype inbound_nat_pools: list[~azure.mgmt.network.v2017_03_01.models.InboundNatPool]
:keyword outbound_nat_rules: The outbound NAT rules.
:paramtype outbound_nat_rules: list[~azure.mgmt.network.v2017_03_01.models.OutboundNatRule]
:keyword resource_guid: The resource GUID property of the load balancer resource.
:paramtype resource_guid: str
:keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.frontend_ip_configurations = frontend_ip_configurations
self.backend_address_pools = backend_address_pools
self.load_balancing_rules = load_balancing_rules
self.probes = probes
self.inbound_nat_rules = inbound_nat_rules
self.inbound_nat_pools = inbound_nat_pools
self.outbound_nat_rules = outbound_nat_rules
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class LoadBalancerListResult(_serialization.Model):
"""Response for ListLoadBalancers API service call.
:ivar value: A list of load balancers in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.LoadBalancer]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[LoadBalancer]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.LoadBalancer"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of load balancers in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.LoadBalancer]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class LoadBalancingRule(SubResource): # pylint: disable=too-many-instance-attributes
"""A load balancing rule for a load balancer.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar frontend_ip_configuration: A reference to frontend IP addresses.
:vartype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load
balanced across IPs in the backend IPs.
:vartype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar probe: The reference of the load balancer probe used by the load balancing rule.
:vartype probe: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar protocol: The transport protocol for the external endpoint. Possible values are 'Udp' or
'Tcp'. Known values are: "Udp" and "Tcp".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:ivar load_distribution: The load distribution policy for this rule. Possible values are
'Default', 'SourceIP', and 'SourceIPProtocol'. Known values are: "Default", "SourceIP", and
"SourceIPProtocol".
:vartype load_distribution: str or ~azure.mgmt.network.v2017_03_01.models.LoadDistribution
:ivar frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values are between 1 and 65534.
:vartype frontend_port: int
:ivar backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 1 and 65535.
:vartype backend_port: int
:ivar idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:vartype idle_timeout_in_minutes: int
:ivar enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:vartype enable_floating_ip: bool
:ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"frontend_ip_configuration": {"key": "properties.frontendIPConfiguration", "type": "SubResource"},
"backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"},
"probe": {"key": "properties.probe", "type": "SubResource"},
"protocol": {"key": "properties.protocol", "type": "str"},
"load_distribution": {"key": "properties.loadDistribution", "type": "str"},
"frontend_port": {"key": "properties.frontendPort", "type": "int"},
"backend_port": {"key": "properties.backendPort", "type": "int"},
"idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"},
"enable_floating_ip": {"key": "properties.enableFloatingIP", "type": "bool"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional["_models.SubResource"] = None,
backend_address_pool: Optional["_models.SubResource"] = None,
probe: Optional["_models.SubResource"] = None,
protocol: Optional[Union[str, "_models.TransportProtocol"]] = None,
load_distribution: Optional[Union[str, "_models.LoadDistribution"]] = None,
frontend_port: Optional[int] = None,
backend_port: Optional[int] = None,
idle_timeout_in_minutes: Optional[int] = None,
enable_floating_ip: Optional[bool] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword frontend_ip_configuration: A reference to frontend IP addresses.
:paramtype frontend_ip_configuration: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load
balanced across IPs in the backend IPs.
:paramtype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword probe: The reference of the load balancer probe used by the load balancing rule.
:paramtype probe: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword protocol: The transport protocol for the external endpoint. Possible values are 'Udp'
or 'Tcp'. Known values are: "Udp" and "Tcp".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.TransportProtocol
:keyword load_distribution: The load distribution policy for this rule. Possible values are
'Default', 'SourceIP', and 'SourceIPProtocol'. Known values are: "Default", "SourceIP", and
"SourceIPProtocol".
:paramtype load_distribution: str or ~azure.mgmt.network.v2017_03_01.models.LoadDistribution
:keyword frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values are between 1 and 65534.
:paramtype frontend_port: int
:keyword backend_port: The port used for internal connections on the endpoint. Acceptable
values are between 1 and 65535.
:paramtype backend_port: int
:keyword idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:paramtype idle_timeout_in_minutes: int
:keyword enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:paramtype enable_floating_ip: bool
:keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.frontend_ip_configuration = frontend_ip_configuration
self.backend_address_pool = backend_address_pool
self.probe = probe
self.protocol = protocol
self.load_distribution = load_distribution
self.frontend_port = frontend_port
self.backend_port = backend_port
self.idle_timeout_in_minutes = idle_timeout_in_minutes
self.enable_floating_ip = enable_floating_ip
self.provisioning_state = provisioning_state
class LocalNetworkGateway(Resource): # pylint: disable=too-many-instance-attributes
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar local_network_address_space: Local network site address space.
:vartype local_network_address_space: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:ivar gateway_ip_address: IP address of local network gateway.
:vartype gateway_ip_address: str
:ivar bgp_settings: Local network gateway's BGP speaker settings.
:vartype bgp_settings: ~azure.mgmt.network.v2017_03_01.models.BgpSettings
:ivar resource_guid: The resource GUID property of the LocalNetworkGateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the LocalNetworkGateway resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"local_network_address_space": {"key": "properties.localNetworkAddressSpace", "type": "AddressSpace"},
"gateway_ip_address": {"key": "properties.gatewayIpAddress", "type": "str"},
"bgp_settings": {"key": "properties.bgpSettings", "type": "BgpSettings"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
local_network_address_space: Optional["_models.AddressSpace"] = None,
gateway_ip_address: Optional[str] = None,
bgp_settings: Optional["_models.BgpSettings"] = None,
resource_guid: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword local_network_address_space: Local network site address space.
:paramtype local_network_address_space: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:keyword gateway_ip_address: IP address of local network gateway.
:paramtype gateway_ip_address: str
:keyword bgp_settings: Local network gateway's BGP speaker settings.
:paramtype bgp_settings: ~azure.mgmt.network.v2017_03_01.models.BgpSettings
:keyword resource_guid: The resource GUID property of the LocalNetworkGateway resource.
:paramtype resource_guid: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.local_network_address_space = local_network_address_space
self.gateway_ip_address = gateway_ip_address
self.bgp_settings = bgp_settings
self.resource_guid = resource_guid
self.provisioning_state = None
class LocalNetworkGatewayListResult(_serialization.Model):
"""Response for ListLocalNetworkGateways API service call.
:ivar value: A list of local network gateways that exists in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.LocalNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[LocalNetworkGateway]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.LocalNetworkGateway"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of local network gateways that exists in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.LocalNetworkGateway]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class NetworkInterface(Resource): # pylint: disable=too-many-instance-attributes
"""A network interface in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar virtual_machine: The reference of a virtual machine.
:vartype virtual_machine: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar network_security_group: The reference of the NetworkSecurityGroup resource.
:vartype network_security_group: ~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup
:ivar ip_configurations: A list of IPConfigurations of the network interface.
:vartype ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration]
:ivar dns_settings: The DNS settings in network interface.
:vartype dns_settings: ~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceDnsSettings
:ivar mac_address: The MAC address of the network interface.
:vartype mac_address: str
:ivar primary: Gets whether this is a primary network interface on a virtual machine.
:vartype primary: bool
:ivar enable_accelerated_networking: If the network interface is accelerated networking
enabled.
:vartype enable_accelerated_networking: bool
:ivar enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network
interface.
:vartype enable_ip_forwarding: bool
:ivar resource_guid: The resource GUID property of the network interface resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"virtual_machine": {"key": "properties.virtualMachine", "type": "SubResource"},
"network_security_group": {"key": "properties.networkSecurityGroup", "type": "NetworkSecurityGroup"},
"ip_configurations": {"key": "properties.ipConfigurations", "type": "[NetworkInterfaceIPConfiguration]"},
"dns_settings": {"key": "properties.dnsSettings", "type": "NetworkInterfaceDnsSettings"},
"mac_address": {"key": "properties.macAddress", "type": "str"},
"primary": {"key": "properties.primary", "type": "bool"},
"enable_accelerated_networking": {"key": "properties.enableAcceleratedNetworking", "type": "bool"},
"enable_ip_forwarding": {"key": "properties.enableIPForwarding", "type": "bool"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
virtual_machine: Optional["_models.SubResource"] = None,
network_security_group: Optional["_models.NetworkSecurityGroup"] = None,
ip_configurations: Optional[List["_models.NetworkInterfaceIPConfiguration"]] = None,
dns_settings: Optional["_models.NetworkInterfaceDnsSettings"] = None,
mac_address: Optional[str] = None,
primary: Optional[bool] = None,
enable_accelerated_networking: Optional[bool] = None,
enable_ip_forwarding: Optional[bool] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword virtual_machine: The reference of a virtual machine.
:paramtype virtual_machine: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword network_security_group: The reference of the NetworkSecurityGroup resource.
:paramtype network_security_group: ~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup
:keyword ip_configurations: A list of IPConfigurations of the network interface.
:paramtype ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceIPConfiguration]
:keyword dns_settings: The DNS settings in network interface.
:paramtype dns_settings: ~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceDnsSettings
:keyword mac_address: The MAC address of the network interface.
:paramtype mac_address: str
:keyword primary: Gets whether this is a primary network interface on a virtual machine.
:paramtype primary: bool
:keyword enable_accelerated_networking: If the network interface is accelerated networking
enabled.
:paramtype enable_accelerated_networking: bool
:keyword enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network
interface.
:paramtype enable_ip_forwarding: bool
:keyword resource_guid: The resource GUID property of the network interface resource.
:paramtype resource_guid: str
:keyword provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.virtual_machine = virtual_machine
self.network_security_group = network_security_group
self.ip_configurations = ip_configurations
self.dns_settings = dns_settings
self.mac_address = mac_address
self.primary = primary
self.enable_accelerated_networking = enable_accelerated_networking
self.enable_ip_forwarding = enable_ip_forwarding
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class NetworkInterfaceAssociation(_serialization.Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Network interface ID.
:vartype id: str
:ivar security_rules: Collection of custom security rules.
:vartype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
"""
_validation = {
"id": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"security_rules": {"key": "securityRules", "type": "[SecurityRule]"},
}
def __init__(self, *, security_rules: Optional[List["_models.SecurityRule"]] = None, **kwargs):
"""
:keyword security_rules: Collection of custom security rules.
:paramtype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
"""
super().__init__(**kwargs)
self.id = None
self.security_rules = security_rules
class NetworkInterfaceDnsSettings(_serialization.Model):
"""DNS settings of a network interface.
:ivar dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure
provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be
the only value in dnsServers collection.
:vartype dns_servers: list[str]
:ivar applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then
this list will have the union of all DNS servers from all NICs that are part of the
Availability Set. This property is what is configured on each of those VMs.
:vartype applied_dns_servers: list[str]
:ivar internal_dns_name_label: Relative DNS name for this NIC used for internal communications
between VMs in the same virtual network.
:vartype internal_dns_name_label: str
:ivar internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in
the same virtual network.
:vartype internal_fqdn: str
:ivar internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry
is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the
VM name with the value of internalDomainNameSuffix.
:vartype internal_domain_name_suffix: str
"""
_attribute_map = {
"dns_servers": {"key": "dnsServers", "type": "[str]"},
"applied_dns_servers": {"key": "appliedDnsServers", "type": "[str]"},
"internal_dns_name_label": {"key": "internalDnsNameLabel", "type": "str"},
"internal_fqdn": {"key": "internalFqdn", "type": "str"},
"internal_domain_name_suffix": {"key": "internalDomainNameSuffix", "type": "str"},
}
def __init__(
self,
*,
dns_servers: Optional[List[str]] = None,
applied_dns_servers: Optional[List[str]] = None,
internal_dns_name_label: Optional[str] = None,
internal_fqdn: Optional[str] = None,
internal_domain_name_suffix: Optional[str] = None,
**kwargs
):
"""
:keyword dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to
azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it
must be the only value in dnsServers collection.
:paramtype dns_servers: list[str]
:keyword applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then
this list will have the union of all DNS servers from all NICs that are part of the
Availability Set. This property is what is configured on each of those VMs.
:paramtype applied_dns_servers: list[str]
:keyword internal_dns_name_label: Relative DNS name for this NIC used for internal
communications between VMs in the same virtual network.
:paramtype internal_dns_name_label: str
:keyword internal_fqdn: Fully qualified DNS name supporting internal communications between VMs
in the same virtual network.
:paramtype internal_fqdn: str
:keyword internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS
entry is created for the primary NIC of the VM. This DNS name can be constructed by
concatenating the VM name with the value of internalDomainNameSuffix.
:paramtype internal_domain_name_suffix: str
"""
super().__init__(**kwargs)
self.dns_servers = dns_servers
self.applied_dns_servers = applied_dns_servers
self.internal_dns_name_label = internal_dns_name_label
self.internal_fqdn = internal_fqdn
self.internal_domain_name_suffix = internal_domain_name_suffix
class NetworkInterfaceIPConfiguration(SubResource): # pylint: disable=too-many-instance-attributes
"""IPConfiguration in a network interface.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar application_gateway_backend_address_pools: The reference of
ApplicationGatewayBackendAddressPool resource.
:vartype application_gateway_backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool]
:ivar load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool
resource.
:vartype load_balancer_backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.BackendAddressPool]
:ivar load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules.
:vartype load_balancer_inbound_nat_rules:
list[~azure.mgmt.network.v2017_03_01.models.InboundNatRule]
:ivar private_ip_address:
:vartype private_ip_address: str
:ivar private_ip_allocation_method: Defines how a private IP address is assigned. Possible
values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:vartype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents
whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible
values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6".
:vartype private_ip_address_version: str or ~azure.mgmt.network.v2017_03_01.models.IPVersion
:ivar subnet: Subnet in a virtual network resource.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:ivar primary: Gets whether this is a primary customer address on the network interface.
:vartype primary: bool
:ivar public_ip_address: Public IP address resource.
:vartype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:ivar provisioning_state:
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"application_gateway_backend_address_pools": {
"key": "properties.applicationGatewayBackendAddressPools",
"type": "[ApplicationGatewayBackendAddressPool]",
},
"load_balancer_backend_address_pools": {
"key": "properties.loadBalancerBackendAddressPools",
"type": "[BackendAddressPool]",
},
"load_balancer_inbound_nat_rules": {
"key": "properties.loadBalancerInboundNatRules",
"type": "[InboundNatRule]",
},
"private_ip_address": {"key": "properties.privateIPAddress", "type": "str"},
"private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"},
"private_ip_address_version": {"key": "properties.privateIPAddressVersion", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "Subnet"},
"primary": {"key": "properties.primary", "type": "bool"},
"public_ip_address": {"key": "properties.publicIPAddress", "type": "PublicIPAddress"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
application_gateway_backend_address_pools: Optional[
List["_models.ApplicationGatewayBackendAddressPool"]
] = None,
load_balancer_backend_address_pools: Optional[List["_models.BackendAddressPool"]] = None,
load_balancer_inbound_nat_rules: Optional[List["_models.InboundNatRule"]] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
private_ip_address_version: Optional[Union[str, "_models.IPVersion"]] = None,
subnet: Optional["_models.Subnet"] = None,
primary: Optional[bool] = None,
public_ip_address: Optional["_models.PublicIPAddress"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword application_gateway_backend_address_pools: The reference of
ApplicationGatewayBackendAddressPool resource.
:paramtype application_gateway_backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.ApplicationGatewayBackendAddressPool]
:keyword load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool
resource.
:paramtype load_balancer_backend_address_pools:
list[~azure.mgmt.network.v2017_03_01.models.BackendAddressPool]
:keyword load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules.
:paramtype load_balancer_inbound_nat_rules:
list[~azure.mgmt.network.v2017_03_01.models.InboundNatRule]
:keyword private_ip_address:
:paramtype private_ip_address: str
:keyword private_ip_allocation_method: Defines how a private IP address is assigned. Possible
values are: 'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:paramtype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it
represents whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4.
Possible values are: 'IPv4' and 'IPv6'. Known values are: "IPv4" and "IPv6".
:paramtype private_ip_address_version: str or ~azure.mgmt.network.v2017_03_01.models.IPVersion
:keyword subnet: Subnet in a virtual network resource.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.Subnet
:keyword primary: Gets whether this is a primary customer address on the network interface.
:paramtype primary: bool
:keyword public_ip_address: Public IP address resource.
:paramtype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddress
:keyword provisioning_state:
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.application_gateway_backend_address_pools = application_gateway_backend_address_pools
self.load_balancer_backend_address_pools = load_balancer_backend_address_pools
self.load_balancer_inbound_nat_rules = load_balancer_inbound_nat_rules
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.private_ip_address_version = private_ip_address_version
self.subnet = subnet
self.primary = primary
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
class NetworkInterfaceListResult(_serialization.Model):
"""Response for the ListNetworkInterface API service call.
:ivar value: A list of network interfaces in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkInterface]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[NetworkInterface]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.NetworkInterface"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of network interfaces in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkInterface]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class NetworkSecurityGroup(Resource): # pylint: disable=too-many-instance-attributes
"""NetworkSecurityGroup resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar security_rules: A collection of security rules of the network security group.
:vartype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:ivar default_security_rules: The default security rules of network security group.
:vartype default_security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:ivar network_interfaces: A collection of references to network interfaces.
:vartype network_interfaces: list[~azure.mgmt.network.v2017_03_01.models.NetworkInterface]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:ivar resource_guid: The resource GUID property of the network security group resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"network_interfaces": {"readonly": True},
"subnets": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"security_rules": {"key": "properties.securityRules", "type": "[SecurityRule]"},
"default_security_rules": {"key": "properties.defaultSecurityRules", "type": "[SecurityRule]"},
"network_interfaces": {"key": "properties.networkInterfaces", "type": "[NetworkInterface]"},
"subnets": {"key": "properties.subnets", "type": "[Subnet]"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
security_rules: Optional[List["_models.SecurityRule"]] = None,
default_security_rules: Optional[List["_models.SecurityRule"]] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword security_rules: A collection of security rules of the network security group.
:paramtype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:keyword default_security_rules: The default security rules of network security group.
:paramtype default_security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:keyword resource_guid: The resource GUID property of the network security group resource.
:paramtype resource_guid: str
:keyword provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.security_rules = security_rules
self.default_security_rules = default_security_rules
self.network_interfaces = None
self.subnets = None
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class NetworkSecurityGroupListResult(_serialization.Model):
"""Response for ListNetworkSecurityGroups API service call.
:ivar value: A list of NetworkSecurityGroup resources.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[NetworkSecurityGroup]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.NetworkSecurityGroup"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of NetworkSecurityGroup resources.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class NetworkWatcher(Resource):
"""Network watcher in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the resource. Known values are:
"Succeeded", "Updating", "Deleting", and "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2017_03_01.models.ProvisioningState
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.provisioning_state = None
class NetworkWatcherListResult(_serialization.Model):
"""List of network watcher resources.
:ivar value:
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkWatcher]
"""
_attribute_map = {
"value": {"key": "value", "type": "[NetworkWatcher]"},
}
def __init__(self, *, value: Optional[List["_models.NetworkWatcher"]] = None, **kwargs):
"""
:keyword value:
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.NetworkWatcher]
"""
super().__init__(**kwargs)
self.value = value
class NextHopParameters(_serialization.Model):
"""Parameters that define the source and destination endpoint.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The resource identifier of the target resource against which the
action is to be performed. Required.
:vartype target_resource_id: str
:ivar source_ip_address: The source IP address. Required.
:vartype source_ip_address: str
:ivar destination_ip_address: The destination IP address. Required.
:vartype destination_ip_address: str
:ivar target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled
on any of the nics, then this parameter must be specified. Otherwise optional).
:vartype target_nic_resource_id: str
"""
_validation = {
"target_resource_id": {"required": True},
"source_ip_address": {"required": True},
"destination_ip_address": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
"source_ip_address": {"key": "sourceIPAddress", "type": "str"},
"destination_ip_address": {"key": "destinationIPAddress", "type": "str"},
"target_nic_resource_id": {"key": "targetNicResourceId", "type": "str"},
}
def __init__(
self,
*,
target_resource_id: str,
source_ip_address: str,
destination_ip_address: str,
target_nic_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword target_resource_id: The resource identifier of the target resource against which the
action is to be performed. Required.
:paramtype target_resource_id: str
:keyword source_ip_address: The source IP address. Required.
:paramtype source_ip_address: str
:keyword destination_ip_address: The destination IP address. Required.
:paramtype destination_ip_address: str
:keyword target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of the nics, then this parameter must be specified. Otherwise optional).
:paramtype target_nic_resource_id: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
self.source_ip_address = source_ip_address
self.destination_ip_address = destination_ip_address
self.target_nic_resource_id = target_nic_resource_id
class NextHopResult(_serialization.Model):
"""The information about next hop from the specified VM.
:ivar next_hop_type: Next hop type. Known values are: "Internet", "VirtualAppliance",
"VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", and "None".
:vartype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.NextHopType
:ivar next_hop_ip_address: Next hop IP Address.
:vartype next_hop_ip_address: str
:ivar route_table_id: The resource identifier for the route table associated with the route
being returned. If the route being returned does not correspond to any user created routes then
this field will be the string 'System Route'.
:vartype route_table_id: str
"""
_attribute_map = {
"next_hop_type": {"key": "nextHopType", "type": "str"},
"next_hop_ip_address": {"key": "nextHopIpAddress", "type": "str"},
"route_table_id": {"key": "routeTableId", "type": "str"},
}
def __init__(
self,
*,
next_hop_type: Optional[Union[str, "_models.NextHopType"]] = None,
next_hop_ip_address: Optional[str] = None,
route_table_id: Optional[str] = None,
**kwargs
):
"""
:keyword next_hop_type: Next hop type. Known values are: "Internet", "VirtualAppliance",
"VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", and "None".
:paramtype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.NextHopType
:keyword next_hop_ip_address: Next hop IP Address.
:paramtype next_hop_ip_address: str
:keyword route_table_id: The resource identifier for the route table associated with the route
being returned. If the route being returned does not correspond to any user created routes then
this field will be the string 'System Route'.
:paramtype route_table_id: str
"""
super().__init__(**kwargs)
self.next_hop_type = next_hop_type
self.next_hop_ip_address = next_hop_ip_address
self.route_table_id = route_table_id
class OutboundNatRule(SubResource):
"""Outbound NAT pool of the load balancer.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar allocated_outbound_ports: The number of outbound ports to be used for NAT.
:vartype allocated_outbound_ports: int
:ivar frontend_ip_configurations: The Frontend IP addresses of the load balancer.
:vartype frontend_ip_configurations: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load
balanced across IPs in the backend IPs.
:vartype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"allocated_outbound_ports": {"key": "properties.allocatedOutboundPorts", "type": "int"},
"frontend_ip_configurations": {"key": "properties.frontendIPConfigurations", "type": "[SubResource]"},
"backend_address_pool": {"key": "properties.backendAddressPool", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
allocated_outbound_ports: Optional[int] = None,
frontend_ip_configurations: Optional[List["_models.SubResource"]] = None,
backend_address_pool: Optional["_models.SubResource"] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword allocated_outbound_ports: The number of outbound ports to be used for NAT.
:paramtype allocated_outbound_ports: int
:keyword frontend_ip_configurations: The Frontend IP addresses of the load balancer.
:paramtype frontend_ip_configurations: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:keyword backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load
balanced across IPs in the backend IPs.
:paramtype backend_address_pool: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword provisioning_state: Gets the provisioning state of the PublicIP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.allocated_outbound_ports = allocated_outbound_ports
self.frontend_ip_configurations = frontend_ip_configurations
self.backend_address_pool = backend_address_pool
self.provisioning_state = provisioning_state
class PacketCapture(_serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:ivar target: The ID of the targeted resource, only VM is currently supported. Required.
:vartype target: str
:ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are
truncated.
:vartype bytes_to_capture_per_packet: int
:ivar total_bytes_per_session: Maximum size of the capture output.
:vartype total_bytes_per_session: int
:ivar time_limit_in_seconds: Maximum duration of the capture session in seconds.
:vartype time_limit_in_seconds: int
:ivar storage_location: Describes the storage location for a packet capture session. Required.
:vartype storage_location: ~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:ivar filters:
:vartype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
"""
_validation = {
"target": {"required": True},
"storage_location": {"required": True},
}
_attribute_map = {
"target": {"key": "properties.target", "type": "str"},
"bytes_to_capture_per_packet": {"key": "properties.bytesToCapturePerPacket", "type": "int"},
"total_bytes_per_session": {"key": "properties.totalBytesPerSession", "type": "int"},
"time_limit_in_seconds": {"key": "properties.timeLimitInSeconds", "type": "int"},
"storage_location": {"key": "properties.storageLocation", "type": "PacketCaptureStorageLocation"},
"filters": {"key": "properties.filters", "type": "[PacketCaptureFilter]"},
}
def __init__(
self,
*,
target: str,
storage_location: "_models.PacketCaptureStorageLocation",
bytes_to_capture_per_packet: int = 0,
total_bytes_per_session: int = 1073741824,
time_limit_in_seconds: int = 18000,
filters: Optional[List["_models.PacketCaptureFilter"]] = None,
**kwargs
):
"""
:keyword target: The ID of the targeted resource, only VM is currently supported. Required.
:paramtype target: str
:keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:paramtype bytes_to_capture_per_packet: int
:keyword total_bytes_per_session: Maximum size of the capture output.
:paramtype total_bytes_per_session: int
:keyword time_limit_in_seconds: Maximum duration of the capture session in seconds.
:paramtype time_limit_in_seconds: int
:keyword storage_location: Describes the storage location for a packet capture session.
Required.
:paramtype storage_location:
~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:keyword filters:
:paramtype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
"""
super().__init__(**kwargs)
self.target = target
self.bytes_to_capture_per_packet = bytes_to_capture_per_packet
self.total_bytes_per_session = total_bytes_per_session
self.time_limit_in_seconds = time_limit_in_seconds
self.storage_location = storage_location
self.filters = filters
class PacketCaptureFilter(_serialization.Model):
"""Filter that is applied to packet capture request. Multiple filters can be applied.
:ivar protocol: Protocol to be filtered on. Known values are: "TCP", "UDP", and "Any".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.PcProtocol
:ivar local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:vartype local_ip_address: str
:ivar remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:vartype remote_ip_address: str
:ivar local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:vartype local_port: str
:ivar remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:vartype remote_port: str
"""
_attribute_map = {
"protocol": {"key": "protocol", "type": "str"},
"local_ip_address": {"key": "localIPAddress", "type": "str"},
"remote_ip_address": {"key": "remoteIPAddress", "type": "str"},
"local_port": {"key": "localPort", "type": "str"},
"remote_port": {"key": "remotePort", "type": "str"},
}
def __init__(
self,
*,
protocol: Union[str, "_models.PcProtocol"] = "Any",
local_ip_address: Optional[str] = None,
remote_ip_address: Optional[str] = None,
local_port: Optional[str] = None,
remote_port: Optional[str] = None,
**kwargs
):
"""
:keyword protocol: Protocol to be filtered on. Known values are: "TCP", "UDP", and "Any".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.PcProtocol
:keyword local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:paramtype local_ip_address: str
:keyword remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for
single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple
entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not
currently supported. Default = null.
:paramtype remote_ip_address: str
:keyword local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:paramtype local_port: str
:keyword remote_port: Remote port to be filtered on. Notation: "80" for single port
entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently
supported. Mixing ranges with multiple entries not currently supported. Default = null.
:paramtype remote_port: str
"""
super().__init__(**kwargs)
self.protocol = protocol
self.local_ip_address = local_ip_address
self.remote_ip_address = remote_ip_address
self.local_port = local_port
self.remote_port = remote_port
class PacketCaptureListResult(_serialization.Model):
"""List of packet capture sessions.
:ivar value: Information about packet capture sessions.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureResult]
"""
_attribute_map = {
"value": {"key": "value", "type": "[PacketCaptureResult]"},
}
def __init__(self, *, value: Optional[List["_models.PacketCaptureResult"]] = None, **kwargs):
"""
:keyword value: Information about packet capture sessions.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureResult]
"""
super().__init__(**kwargs)
self.value = value
class PacketCaptureParameters(_serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:ivar target: The ID of the targeted resource, only VM is currently supported. Required.
:vartype target: str
:ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are
truncated.
:vartype bytes_to_capture_per_packet: int
:ivar total_bytes_per_session: Maximum size of the capture output.
:vartype total_bytes_per_session: int
:ivar time_limit_in_seconds: Maximum duration of the capture session in seconds.
:vartype time_limit_in_seconds: int
:ivar storage_location: Describes the storage location for a packet capture session. Required.
:vartype storage_location: ~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:ivar filters:
:vartype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
"""
_validation = {
"target": {"required": True},
"storage_location": {"required": True},
}
_attribute_map = {
"target": {"key": "target", "type": "str"},
"bytes_to_capture_per_packet": {"key": "bytesToCapturePerPacket", "type": "int"},
"total_bytes_per_session": {"key": "totalBytesPerSession", "type": "int"},
"time_limit_in_seconds": {"key": "timeLimitInSeconds", "type": "int"},
"storage_location": {"key": "storageLocation", "type": "PacketCaptureStorageLocation"},
"filters": {"key": "filters", "type": "[PacketCaptureFilter]"},
}
def __init__(
self,
*,
target: str,
storage_location: "_models.PacketCaptureStorageLocation",
bytes_to_capture_per_packet: int = 0,
total_bytes_per_session: int = 1073741824,
time_limit_in_seconds: int = 18000,
filters: Optional[List["_models.PacketCaptureFilter"]] = None,
**kwargs
):
"""
:keyword target: The ID of the targeted resource, only VM is currently supported. Required.
:paramtype target: str
:keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:paramtype bytes_to_capture_per_packet: int
:keyword total_bytes_per_session: Maximum size of the capture output.
:paramtype total_bytes_per_session: int
:keyword time_limit_in_seconds: Maximum duration of the capture session in seconds.
:paramtype time_limit_in_seconds: int
:keyword storage_location: Describes the storage location for a packet capture session.
Required.
:paramtype storage_location:
~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:keyword filters:
:paramtype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
"""
super().__init__(**kwargs)
self.target = target
self.bytes_to_capture_per_packet = bytes_to_capture_per_packet
self.total_bytes_per_session = total_bytes_per_session
self.time_limit_in_seconds = time_limit_in_seconds
self.storage_location = storage_location
self.filters = filters
class PacketCaptureQueryStatusResult(_serialization.Model):
"""Status of packet capture session.
:ivar name: The name of the packet capture resource.
:vartype name: str
:ivar id: The ID of the packet capture resource.
:vartype id: str
:ivar capture_start_time: The start time of the packet capture session.
:vartype capture_start_time: ~datetime.datetime
:ivar packet_capture_status: The status of the packet capture session. Known values are:
"NotStarted", "Running", "Stopped", "Error", and "Unknown".
:vartype packet_capture_status: str or ~azure.mgmt.network.v2017_03_01.models.PcStatus
:ivar stop_reason: The reason the current packet capture session was stopped.
:vartype stop_reason: str
:ivar packet_capture_error: List of errors of packet capture session.
:vartype packet_capture_error: list[str or ~azure.mgmt.network.v2017_03_01.models.PcError]
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"id": {"key": "id", "type": "str"},
"capture_start_time": {"key": "captureStartTime", "type": "iso-8601"},
"packet_capture_status": {"key": "packetCaptureStatus", "type": "str"},
"stop_reason": {"key": "stopReason", "type": "str"},
"packet_capture_error": {"key": "packetCaptureError", "type": "[str]"},
}
def __init__(
self,
*,
name: Optional[str] = None,
id: Optional[str] = None, # pylint: disable=redefined-builtin
capture_start_time: Optional[datetime.datetime] = None,
packet_capture_status: Optional[Union[str, "_models.PcStatus"]] = None,
stop_reason: Optional[str] = None,
packet_capture_error: Optional[List[Union[str, "_models.PcError"]]] = None,
**kwargs
):
"""
:keyword name: The name of the packet capture resource.
:paramtype name: str
:keyword id: The ID of the packet capture resource.
:paramtype id: str
:keyword capture_start_time: The start time of the packet capture session.
:paramtype capture_start_time: ~datetime.datetime
:keyword packet_capture_status: The status of the packet capture session. Known values are:
"NotStarted", "Running", "Stopped", "Error", and "Unknown".
:paramtype packet_capture_status: str or ~azure.mgmt.network.v2017_03_01.models.PcStatus
:keyword stop_reason: The reason the current packet capture session was stopped.
:paramtype stop_reason: str
:keyword packet_capture_error: List of errors of packet capture session.
:paramtype packet_capture_error: list[str or ~azure.mgmt.network.v2017_03_01.models.PcError]
"""
super().__init__(**kwargs)
self.name = name
self.id = id
self.capture_start_time = capture_start_time
self.packet_capture_status = packet_capture_status
self.stop_reason = stop_reason
self.packet_capture_error = packet_capture_error
class PacketCaptureResult(_serialization.Model):
"""Information about packet capture session.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the packet capture session.
:vartype name: str
:ivar id: ID of the packet capture operation.
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar target: The ID of the targeted resource, only VM is currently supported.
:vartype target: str
:ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are
truncated.
:vartype bytes_to_capture_per_packet: int
:ivar total_bytes_per_session: Maximum size of the capture output.
:vartype total_bytes_per_session: int
:ivar time_limit_in_seconds: Maximum duration of the capture session in seconds.
:vartype time_limit_in_seconds: int
:ivar storage_location: Describes the storage location for a packet capture session.
:vartype storage_location: ~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:ivar filters:
:vartype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Known values
are: "Succeeded", "Updating", "Deleting", and "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2017_03_01.models.ProvisioningState
"""
_validation = {
"name": {"readonly": True},
"id": {"readonly": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"id": {"key": "id", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"target": {"key": "properties.target", "type": "str"},
"bytes_to_capture_per_packet": {"key": "properties.bytesToCapturePerPacket", "type": "int"},
"total_bytes_per_session": {"key": "properties.totalBytesPerSession", "type": "int"},
"time_limit_in_seconds": {"key": "properties.timeLimitInSeconds", "type": "int"},
"storage_location": {"key": "properties.storageLocation", "type": "PacketCaptureStorageLocation"},
"filters": {"key": "properties.filters", "type": "[PacketCaptureFilter]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
etag: str = "A unique read-only string that changes whenever the resource is updated.",
target: Optional[str] = None,
bytes_to_capture_per_packet: int = 0,
total_bytes_per_session: int = 1073741824,
time_limit_in_seconds: int = 18000,
storage_location: Optional["_models.PacketCaptureStorageLocation"] = None,
filters: Optional[List["_models.PacketCaptureFilter"]] = None,
provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None,
**kwargs
):
"""
:keyword etag:
:paramtype etag: str
:keyword target: The ID of the targeted resource, only VM is currently supported.
:paramtype target: str
:keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:paramtype bytes_to_capture_per_packet: int
:keyword total_bytes_per_session: Maximum size of the capture output.
:paramtype total_bytes_per_session: int
:keyword time_limit_in_seconds: Maximum duration of the capture session in seconds.
:paramtype time_limit_in_seconds: int
:keyword storage_location: Describes the storage location for a packet capture session.
:paramtype storage_location:
~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:keyword filters:
:paramtype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
:keyword provisioning_state: The provisioning state of the packet capture session. Known values
are: "Succeeded", "Updating", "Deleting", and "Failed".
:paramtype provisioning_state: str or ~azure.mgmt.network.v2017_03_01.models.ProvisioningState
"""
super().__init__(**kwargs)
self.name = None
self.id = None
self.etag = etag
self.target = target
self.bytes_to_capture_per_packet = bytes_to_capture_per_packet
self.total_bytes_per_session = total_bytes_per_session
self.time_limit_in_seconds = time_limit_in_seconds
self.storage_location = storage_location
self.filters = filters
self.provisioning_state = provisioning_state
class PacketCaptureResultProperties(PacketCaptureParameters):
"""Describes the properties of a packet capture session.
All required parameters must be populated in order to send to Azure.
:ivar target: The ID of the targeted resource, only VM is currently supported. Required.
:vartype target: str
:ivar bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes are
truncated.
:vartype bytes_to_capture_per_packet: int
:ivar total_bytes_per_session: Maximum size of the capture output.
:vartype total_bytes_per_session: int
:ivar time_limit_in_seconds: Maximum duration of the capture session in seconds.
:vartype time_limit_in_seconds: int
:ivar storage_location: Describes the storage location for a packet capture session. Required.
:vartype storage_location: ~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:ivar filters:
:vartype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Known values
are: "Succeeded", "Updating", "Deleting", and "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2017_03_01.models.ProvisioningState
"""
_validation = {
"target": {"required": True},
"storage_location": {"required": True},
}
_attribute_map = {
"target": {"key": "target", "type": "str"},
"bytes_to_capture_per_packet": {"key": "bytesToCapturePerPacket", "type": "int"},
"total_bytes_per_session": {"key": "totalBytesPerSession", "type": "int"},
"time_limit_in_seconds": {"key": "timeLimitInSeconds", "type": "int"},
"storage_location": {"key": "storageLocation", "type": "PacketCaptureStorageLocation"},
"filters": {"key": "filters", "type": "[PacketCaptureFilter]"},
"provisioning_state": {"key": "provisioningState", "type": "str"},
}
def __init__(
self,
*,
target: str,
storage_location: "_models.PacketCaptureStorageLocation",
bytes_to_capture_per_packet: int = 0,
total_bytes_per_session: int = 1073741824,
time_limit_in_seconds: int = 18000,
filters: Optional[List["_models.PacketCaptureFilter"]] = None,
provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = None,
**kwargs
):
"""
:keyword target: The ID of the targeted resource, only VM is currently supported. Required.
:paramtype target: str
:keyword bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:paramtype bytes_to_capture_per_packet: int
:keyword total_bytes_per_session: Maximum size of the capture output.
:paramtype total_bytes_per_session: int
:keyword time_limit_in_seconds: Maximum duration of the capture session in seconds.
:paramtype time_limit_in_seconds: int
:keyword storage_location: Describes the storage location for a packet capture session.
Required.
:paramtype storage_location:
~azure.mgmt.network.v2017_03_01.models.PacketCaptureStorageLocation
:keyword filters:
:paramtype filters: list[~azure.mgmt.network.v2017_03_01.models.PacketCaptureFilter]
:keyword provisioning_state: The provisioning state of the packet capture session. Known values
are: "Succeeded", "Updating", "Deleting", and "Failed".
:paramtype provisioning_state: str or ~azure.mgmt.network.v2017_03_01.models.ProvisioningState
"""
super().__init__(
target=target,
bytes_to_capture_per_packet=bytes_to_capture_per_packet,
total_bytes_per_session=total_bytes_per_session,
time_limit_in_seconds=time_limit_in_seconds,
storage_location=storage_location,
filters=filters,
**kwargs
)
self.provisioning_state = provisioning_state
class PacketCaptureStorageLocation(_serialization.Model):
"""Describes the storage location for a packet capture session.
:ivar storage_id: The ID of the storage account to save the packet capture session. Required if
no local file path is provided.
:vartype storage_id: str
:ivar storage_path: The URI of the storage path to save the packet capture. Must be a
well-formed URI describing the location to save the packet capture.
:vartype storage_path: str
:ivar file_path: A valid local path on the targeting VM. Must include the name of the capture
file (*.cap). For linux virtual machine it must start with /var/captures. Required if no
storage ID is provided, otherwise optional.
:vartype file_path: str
"""
_attribute_map = {
"storage_id": {"key": "storageId", "type": "str"},
"storage_path": {"key": "storagePath", "type": "str"},
"file_path": {"key": "filePath", "type": "str"},
}
def __init__(
self,
*,
storage_id: Optional[str] = None,
storage_path: Optional[str] = None,
file_path: Optional[str] = None,
**kwargs
):
"""
:keyword storage_id: The ID of the storage account to save the packet capture session. Required
if no local file path is provided.
:paramtype storage_id: str
:keyword storage_path: The URI of the storage path to save the packet capture. Must be a
well-formed URI describing the location to save the packet capture.
:paramtype storage_path: str
:keyword file_path: A valid local path on the targeting VM. Must include the name of the
capture file (*.cap). For linux virtual machine it must start with /var/captures. Required if
no storage ID is provided, otherwise optional.
:paramtype file_path: str
"""
super().__init__(**kwargs)
self.storage_id = storage_id
self.storage_path = storage_path
self.file_path = file_path
class PatchRouteFilter(SubResource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar rules: Collection of RouteFilterRules contained within a route filter.
:vartype rules: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
:ivar peerings: A collection of references to express route circuit peerings.
:vartype peerings: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"etag": {"readonly": True},
"type": {"readonly": True},
"peerings": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"type": {"key": "type", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"rules": {"key": "properties.rules", "type": "[RouteFilterRule]"},
"peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
tags: Optional[Dict[str, str]] = None,
rules: Optional[List["_models.RouteFilterRule"]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword rules: Collection of RouteFilterRules contained within a route filter.
:paramtype rules: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
"""
super().__init__(id=id, **kwargs)
self.name = None
self.etag = None
self.type = None
self.tags = tags
self.rules = rules
self.peerings = None
self.provisioning_state = None
class PatchRouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are:
"Allow" and "Deny".
:vartype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:ivar route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'.
"Community"
:vartype route_filter_rule_type: str or
~azure.mgmt.network.v2017_03_01.models.RouteFilterRuleType
:ivar communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:vartype communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"etag": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"access": {"key": "properties.access", "type": "str"},
"route_filter_rule_type": {"key": "properties.routeFilterRuleType", "type": "str"},
"communities": {"key": "properties.communities", "type": "[str]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
tags: Optional[Dict[str, str]] = None,
access: Optional[Union[str, "_models.Access"]] = None,
route_filter_rule_type: Optional[Union[str, "_models.RouteFilterRuleType"]] = None,
communities: Optional[List[str]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values
are: "Allow" and "Deny".
:paramtype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:keyword route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'.
"Community"
:paramtype route_filter_rule_type: str or
~azure.mgmt.network.v2017_03_01.models.RouteFilterRuleType
:keyword communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:paramtype communities: list[str]
"""
super().__init__(id=id, **kwargs)
self.name = None
self.etag = None
self.tags = tags
self.access = access
self.route_filter_rule_type = route_filter_rule_type
self.communities = communities
self.provisioning_state = None
class Probe(SubResource):
"""A load balancer probe.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Gets name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar load_balancing_rules: The load balancer rules that use this probe.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2017_03_01.models.SubResource]
:ivar protocol: The protocol of the end point. Possible values are: 'Http' or 'Tcp'. If 'Tcp'
is specified, a received ACK is required for the probe to be successful. If 'Http' is
specified, a 200 OK response from the specifies URI is required for the probe to be successful.
Known values are: "Http" and "Tcp".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ProbeProtocol
:ivar port: The port for communicating the probe. Possible values range from 1 to 65535,
inclusive.
:vartype port: int
:ivar interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint
for health status. Typically, the interval is slightly less than half the allocated timeout
period (in seconds) which allows two full probes before taking the instance out of rotation.
The default value is 15, the minimum value is 5.
:vartype interval_in_seconds: int
:ivar number_of_probes: The number of probes where if no response, will result in stopping
further traffic from being delivered to the endpoint. This values allows endpoints to be taken
out of rotation faster or slower than the typical times used in Azure.
:vartype number_of_probes: int
:ivar request_path: The URI used for requesting health status from the VM. Path is required if
a protocol is set to http. Otherwise, it is not allowed. There is no default value.
:vartype request_path: str
:ivar provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"load_balancing_rules": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"load_balancing_rules": {"key": "properties.loadBalancingRules", "type": "[SubResource]"},
"protocol": {"key": "properties.protocol", "type": "str"},
"port": {"key": "properties.port", "type": "int"},
"interval_in_seconds": {"key": "properties.intervalInSeconds", "type": "int"},
"number_of_probes": {"key": "properties.numberOfProbes", "type": "int"},
"request_path": {"key": "properties.requestPath", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
protocol: Optional[Union[str, "_models.ProbeProtocol"]] = None,
port: Optional[int] = None,
interval_in_seconds: Optional[int] = None,
number_of_probes: Optional[int] = None,
request_path: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Gets name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword protocol: The protocol of the end point. Possible values are: 'Http' or 'Tcp'. If
'Tcp' is specified, a received ACK is required for the probe to be successful. If 'Http' is
specified, a 200 OK response from the specifies URI is required for the probe to be successful.
Known values are: "Http" and "Tcp".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.ProbeProtocol
:keyword port: The port for communicating the probe. Possible values range from 1 to 65535,
inclusive.
:paramtype port: int
:keyword interval_in_seconds: The interval, in seconds, for how frequently to probe the
endpoint for health status. Typically, the interval is slightly less than half the allocated
timeout period (in seconds) which allows two full probes before taking the instance out of
rotation. The default value is 15, the minimum value is 5.
:paramtype interval_in_seconds: int
:keyword number_of_probes: The number of probes where if no response, will result in stopping
further traffic from being delivered to the endpoint. This values allows endpoints to be taken
out of rotation faster or slower than the typical times used in Azure.
:paramtype number_of_probes: int
:keyword request_path: The URI used for requesting health status from the VM. Path is required
if a protocol is set to http. Otherwise, it is not allowed. There is no default value.
:paramtype request_path: str
:keyword provisioning_state: Gets the provisioning state of the public IP resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.load_balancing_rules = None
self.protocol = protocol
self.port = port
self.interval_in_seconds = interval_in_seconds
self.number_of_probes = number_of_probes
self.request_path = request_path
self.provisioning_state = provisioning_state
class PublicIPAddress(Resource): # pylint: disable=too-many-instance-attributes
"""Public IP address resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar public_ip_allocation_method: The public IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:vartype public_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and
'IPv6'. Known values are: "IPv4" and "IPv6".
:vartype public_ip_address_version: str or ~azure.mgmt.network.v2017_03_01.models.IPVersion
:ivar ip_configuration: IPConfiguration.
:vartype ip_configuration: ~azure.mgmt.network.v2017_03_01.models.IPConfiguration
:ivar dns_settings: The FQDN of the DNS record associated with the public IP address.
:vartype dns_settings: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddressDnsSettings
:ivar ip_address:
:vartype ip_address: str
:ivar idle_timeout_in_minutes: The idle timeout of the public IP address.
:vartype idle_timeout_in_minutes: int
:ivar resource_guid: The resource GUID property of the public IP resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the PublicIP resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"ip_configuration": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"public_ip_allocation_method": {"key": "properties.publicIPAllocationMethod", "type": "str"},
"public_ip_address_version": {"key": "properties.publicIPAddressVersion", "type": "str"},
"ip_configuration": {"key": "properties.ipConfiguration", "type": "IPConfiguration"},
"dns_settings": {"key": "properties.dnsSettings", "type": "PublicIPAddressDnsSettings"},
"ip_address": {"key": "properties.ipAddress", "type": "str"},
"idle_timeout_in_minutes": {"key": "properties.idleTimeoutInMinutes", "type": "int"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
public_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
public_ip_address_version: Optional[Union[str, "_models.IPVersion"]] = None,
dns_settings: Optional["_models.PublicIPAddressDnsSettings"] = None,
ip_address: Optional[str] = None,
idle_timeout_in_minutes: Optional[int] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword public_ip_allocation_method: The public IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:paramtype public_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword public_ip_address_version: The public IP address version. Possible values are: 'IPv4'
and 'IPv6'. Known values are: "IPv4" and "IPv6".
:paramtype public_ip_address_version: str or ~azure.mgmt.network.v2017_03_01.models.IPVersion
:keyword dns_settings: The FQDN of the DNS record associated with the public IP address.
:paramtype dns_settings: ~azure.mgmt.network.v2017_03_01.models.PublicIPAddressDnsSettings
:keyword ip_address:
:paramtype ip_address: str
:keyword idle_timeout_in_minutes: The idle timeout of the public IP address.
:paramtype idle_timeout_in_minutes: int
:keyword resource_guid: The resource GUID property of the public IP resource.
:paramtype resource_guid: str
:keyword provisioning_state: The provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.public_ip_allocation_method = public_ip_allocation_method
self.public_ip_address_version = public_ip_address_version
self.ip_configuration = None
self.dns_settings = dns_settings
self.ip_address = ip_address
self.idle_timeout_in_minutes = idle_timeout_in_minutes
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class PublicIPAddressDnsSettings(_serialization.Model):
"""Contains FQDN of the DNS record associated with the public IP address.
:ivar domain_name_label: Gets or sets the Domain name label.The concatenation of the domain
name label and the regionalized DNS zone make up the fully qualified domain name associated
with the public IP address. If a domain name label is specified, an A DNS record is created for
the public IP in the Microsoft Azure DNS system.
:vartype domain_name_label: str
:ivar fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the
public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:vartype fqdn: str
:ivar reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name
that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record
is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
:vartype reverse_fqdn: str
"""
_attribute_map = {
"domain_name_label": {"key": "domainNameLabel", "type": "str"},
"fqdn": {"key": "fqdn", "type": "str"},
"reverse_fqdn": {"key": "reverseFqdn", "type": "str"},
}
def __init__(
self,
*,
domain_name_label: Optional[str] = None,
fqdn: Optional[str] = None,
reverse_fqdn: Optional[str] = None,
**kwargs
):
"""
:keyword domain_name_label: Gets or sets the Domain name label.The concatenation of the domain
name label and the regionalized DNS zone make up the fully qualified domain name associated
with the public IP address. If a domain name label is specified, an A DNS record is created for
the public IP in the Microsoft Azure DNS system.
:paramtype domain_name_label: str
:keyword fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with
the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:paramtype fqdn: str
:keyword reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain
name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS
record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
:paramtype reverse_fqdn: str
"""
super().__init__(**kwargs)
self.domain_name_label = domain_name_label
self.fqdn = fqdn
self.reverse_fqdn = reverse_fqdn
class PublicIPAddressListResult(_serialization.Model):
"""Response for ListPublicIpAddresses API service call.
:ivar value: A list of public IP addresses that exists in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.PublicIPAddress]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[PublicIPAddress]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.PublicIPAddress"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: A list of public IP addresses that exists in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.PublicIPAddress]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class QueryTroubleshootingParameters(_serialization.Model):
"""Parameters that define the resource to query the troubleshooting result.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The target resource ID to query the troubleshooting result. Required.
:vartype target_resource_id: str
"""
_validation = {
"target_resource_id": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
}
def __init__(self, *, target_resource_id: str, **kwargs):
"""
:keyword target_resource_id: The target resource ID to query the troubleshooting result.
Required.
:paramtype target_resource_id: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
class ResourceNavigationLink(SubResource):
"""ResourceNavigationLink resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar linked_resource_type: Resource type of the linked resource.
:vartype linked_resource_type: str
:ivar link: Link to the external resource.
:vartype link: str
:ivar provisioning_state: Provisioning state of the ResourceNavigationLink resource.
:vartype provisioning_state: str
"""
_validation = {
"etag": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"linked_resource_type": {"key": "properties.linkedResourceType", "type": "str"},
"link": {"key": "properties.link", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
linked_resource_type: Optional[str] = None,
link: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: Name of the resource that is unique within a resource group. This name can be
used to access the resource.
:paramtype name: str
:keyword linked_resource_type: Resource type of the linked resource.
:paramtype linked_resource_type: str
:keyword link: Link to the external resource.
:paramtype link: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = None
self.linked_resource_type = linked_resource_type
self.link = link
self.provisioning_state = None
class RetentionPolicyParameters(_serialization.Model):
"""Parameters that define the retention policy for flow log.
:ivar days: Number of days to retain flow log records.
:vartype days: int
:ivar enabled: Flag to enable/disable retention.
:vartype enabled: bool
"""
_attribute_map = {
"days": {"key": "days", "type": "int"},
"enabled": {"key": "enabled", "type": "bool"},
}
def __init__(self, *, days: int = 0, enabled: bool = False, **kwargs):
"""
:keyword days: Number of days to retain flow log records.
:paramtype days: int
:keyword enabled: Flag to enable/disable retention.
:paramtype enabled: bool
"""
super().__init__(**kwargs)
self.days = days
self.enabled = enabled
class Route(SubResource):
"""Route resource.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar address_prefix: The destination CIDR to which the route applies.
:vartype address_prefix: str
:ivar next_hop_type: The type of Azure hop the packet should be sent to. Possible values are:
'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known values
are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None".
:vartype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.RouteNextHopType
:ivar next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are
only allowed in routes where the next hop type is VirtualAppliance.
:vartype next_hop_ip_address: str
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"address_prefix": {"key": "properties.addressPrefix", "type": "str"},
"next_hop_type": {"key": "properties.nextHopType", "type": "str"},
"next_hop_ip_address": {"key": "properties.nextHopIpAddress", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
address_prefix: Optional[str] = None,
next_hop_type: Optional[Union[str, "_models.RouteNextHopType"]] = None,
next_hop_ip_address: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword address_prefix: The destination CIDR to which the route applies.
:paramtype address_prefix: str
:keyword next_hop_type: The type of Azure hop the packet should be sent to. Possible values
are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. Known
values are: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", and "None".
:paramtype next_hop_type: str or ~azure.mgmt.network.v2017_03_01.models.RouteNextHopType
:keyword next_hop_ip_address: The IP address packets should be forwarded to. Next hop values
are only allowed in routes where the next hop type is VirtualAppliance.
:paramtype next_hop_ip_address: str
:keyword provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.address_prefix = address_prefix
self.next_hop_type = next_hop_type
self.next_hop_ip_address = next_hop_ip_address
self.provisioning_state = provisioning_state
class RouteFilter(Resource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar rules: Collection of RouteFilterRules contained within a route filter.
:vartype rules: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
:ivar peerings: A collection of references to express route circuit peerings.
:vartype peerings: list[~azure.mgmt.network.v2017_03_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"etag": {"readonly": True},
"peerings": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"rules": {"key": "properties.rules", "type": "[RouteFilterRule]"},
"peerings": {"key": "properties.peerings", "type": "[ExpressRouteCircuitPeering]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
rules: Optional[List["_models.RouteFilterRule"]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword rules: Collection of RouteFilterRules contained within a route filter.
:paramtype rules: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = None
self.rules = rules
self.peerings = None
self.provisioning_state = None
class RouteFilterListResult(_serialization.Model):
"""Response for the ListRouteFilters API service call.
:ivar value: Gets a list of route filters in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.RouteFilter]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[RouteFilter]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.RouteFilter"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: Gets a list of route filters in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.RouteFilter]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class RouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar location: Resource location.
:vartype location: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values are:
"Allow" and "Deny".
:vartype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:ivar route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'.
"Community"
:vartype route_filter_rule_type: str or
~azure.mgmt.network.v2017_03_01.models.RouteFilterRuleType
:ivar communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:vartype communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"etag": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"location": {"key": "location", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"access": {"key": "properties.access", "type": "str"},
"route_filter_rule_type": {"key": "properties.routeFilterRuleType", "type": "str"},
"communities": {"key": "properties.communities", "type": "[str]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
access: Optional[Union[str, "_models.Access"]] = None,
route_filter_rule_type: Optional[Union[str, "_models.RouteFilterRuleType"]] = None,
communities: Optional[List[str]] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword access: The access type of the rule. Valid values are: 'Allow', 'Deny'. Known values
are: "Allow" and "Deny".
:paramtype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:keyword route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'.
"Community"
:paramtype route_filter_rule_type: str or
~azure.mgmt.network.v2017_03_01.models.RouteFilterRuleType
:keyword communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:paramtype communities: list[str]
"""
super().__init__(id=id, **kwargs)
self.name = None
self.location = location
self.etag = None
self.tags = tags
self.access = access
self.route_filter_rule_type = route_filter_rule_type
self.communities = communities
self.provisioning_state = None
class RouteFilterRuleListResult(_serialization.Model):
"""Response for the ListRouteFilterRules API service call.
:ivar value: Gets a list of RouteFilterRules in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[RouteFilterRule]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.RouteFilterRule"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: Gets a list of RouteFilterRules in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.RouteFilterRule]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class RouteListResult(_serialization.Model):
"""Response for the ListRoute API service call.
:ivar value: Gets a list of routes in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.Route]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Route]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.Route"]] = None, next_link: Optional[str] = None, **kwargs):
"""
:keyword value: Gets a list of routes in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.Route]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class RouteTable(Resource):
"""Route table resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar routes: Collection of routes contained within a route table.
:vartype routes: list[~azure.mgmt.network.v2017_03_01.models.Route]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"subnets": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"routes": {"key": "properties.routes", "type": "[Route]"},
"subnets": {"key": "properties.subnets", "type": "[Subnet]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
routes: Optional[List["_models.Route"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: Gets a unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword routes: Collection of routes contained within a route table.
:paramtype routes: list[~azure.mgmt.network.v2017_03_01.models.Route]
:keyword provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.routes = routes
self.subnets = None
self.provisioning_state = provisioning_state
class RouteTableListResult(_serialization.Model):
"""Response for the ListRouteTable API service call.
:ivar value: Gets a list of route tables in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.RouteTable]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[RouteTable]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.RouteTable"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: Gets a list of route tables in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.RouteTable]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class SecurityGroupNetworkInterface(_serialization.Model):
"""Network interface and all its associated security rules.
:ivar id: ID of the network interface.
:vartype id: str
:ivar security_rule_associations: All security rules associated with the network interface.
:vartype security_rule_associations:
~azure.mgmt.network.v2017_03_01.models.SecurityRuleAssociations
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"security_rule_associations": {"key": "securityRuleAssociations", "type": "SecurityRuleAssociations"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
security_rule_associations: Optional["_models.SecurityRuleAssociations"] = None,
**kwargs
):
"""
:keyword id: ID of the network interface.
:paramtype id: str
:keyword security_rule_associations: All security rules associated with the network interface.
:paramtype security_rule_associations:
~azure.mgmt.network.v2017_03_01.models.SecurityRuleAssociations
"""
super().__init__(**kwargs)
self.id = id
self.security_rule_associations = security_rule_associations
class SecurityGroupViewParameters(_serialization.Model):
"""Parameters that define the VM to check security groups for.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: ID of the target VM. Required.
:vartype target_resource_id: str
"""
_validation = {
"target_resource_id": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
}
def __init__(self, *, target_resource_id: str, **kwargs):
"""
:keyword target_resource_id: ID of the target VM. Required.
:paramtype target_resource_id: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
class SecurityGroupViewResult(_serialization.Model):
"""The information about security rules applied to the specified VM.
:ivar network_interfaces: List of network interfaces on the specified VM.
:vartype network_interfaces:
list[~azure.mgmt.network.v2017_03_01.models.SecurityGroupNetworkInterface]
"""
_attribute_map = {
"network_interfaces": {"key": "networkInterfaces", "type": "[SecurityGroupNetworkInterface]"},
}
def __init__(self, *, network_interfaces: Optional[List["_models.SecurityGroupNetworkInterface"]] = None, **kwargs):
"""
:keyword network_interfaces: List of network interfaces on the specified VM.
:paramtype network_interfaces:
list[~azure.mgmt.network.v2017_03_01.models.SecurityGroupNetworkInterface]
"""
super().__init__(**kwargs)
self.network_interfaces = network_interfaces
class SecurityRule(SubResource): # pylint: disable=too-many-instance-attributes
"""Network security rule.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar description: A description for this rule. Restricted to 140 chars.
:vartype description: str
:ivar protocol: Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and
'*'. Known values are: "Tcp", "Udp", and "*".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleProtocol
:ivar source_port_range: The source port or range. Integer or range between 0 and 65535.
Asterisk '*' can also be used to match all ports.
:vartype source_port_range: str
:ivar destination_port_range: The destination port or range. Integer or range between 0 and
65535. Asterisk '*' can also be used to match all ports.
:vartype destination_port_range: str
:ivar source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to
match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet'
can also be used. If this is an ingress rule, specifies where network traffic originates from.
:vartype source_address_prefix: str
:ivar destination_address_prefix: The destination address prefix. CIDR or source IP range.
Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork',
'AzureLoadBalancer' and 'Internet' can also be used.
:vartype destination_address_prefix: str
:ivar access: The network traffic is allowed or denied. Possible values are: 'Allow' and
'Deny'. Known values are: "Allow" and "Deny".
:vartype access: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleAccess
:ivar priority: The priority of the rule. The value can be between 100 and 4096. The priority
number must be unique for each rule in the collection. The lower the priority number, the
higher the priority of the rule.
:vartype priority: int
:ivar direction: The direction of the rule. The direction specifies if rule will be evaluated
on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'. Known values
are: "Inbound" and "Outbound".
:vartype direction: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleDirection
:ivar provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"description": {"key": "properties.description", "type": "str"},
"protocol": {"key": "properties.protocol", "type": "str"},
"source_port_range": {"key": "properties.sourcePortRange", "type": "str"},
"destination_port_range": {"key": "properties.destinationPortRange", "type": "str"},
"source_address_prefix": {"key": "properties.sourceAddressPrefix", "type": "str"},
"destination_address_prefix": {"key": "properties.destinationAddressPrefix", "type": "str"},
"access": {"key": "properties.access", "type": "str"},
"priority": {"key": "properties.priority", "type": "int"},
"direction": {"key": "properties.direction", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
description: Optional[str] = None,
protocol: Optional[Union[str, "_models.SecurityRuleProtocol"]] = None,
source_port_range: Optional[str] = None,
destination_port_range: Optional[str] = None,
source_address_prefix: Optional[str] = None,
destination_address_prefix: Optional[str] = None,
access: Optional[Union[str, "_models.SecurityRuleAccess"]] = None,
priority: Optional[int] = None,
direction: Optional[Union[str, "_models.SecurityRuleDirection"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword description: A description for this rule. Restricted to 140 chars.
:paramtype description: str
:keyword protocol: Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and
'*'. Known values are: "Tcp", "Udp", and "*".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleProtocol
:keyword source_port_range: The source port or range. Integer or range between 0 and 65535.
Asterisk '*' can also be used to match all ports.
:paramtype source_port_range: str
:keyword destination_port_range: The destination port or range. Integer or range between 0 and
65535. Asterisk '*' can also be used to match all ports.
:paramtype destination_port_range: str
:keyword source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to
match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet'
can also be used. If this is an ingress rule, specifies where network traffic originates from.
:paramtype source_address_prefix: str
:keyword destination_address_prefix: The destination address prefix. CIDR or source IP range.
Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork',
'AzureLoadBalancer' and 'Internet' can also be used.
:paramtype destination_address_prefix: str
:keyword access: The network traffic is allowed or denied. Possible values are: 'Allow' and
'Deny'. Known values are: "Allow" and "Deny".
:paramtype access: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleAccess
:keyword priority: The priority of the rule. The value can be between 100 and 4096. The
priority number must be unique for each rule in the collection. The lower the priority number,
the higher the priority of the rule.
:paramtype priority: int
:keyword direction: The direction of the rule. The direction specifies if rule will be
evaluated on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'. Known
values are: "Inbound" and "Outbound".
:paramtype direction: str or ~azure.mgmt.network.v2017_03_01.models.SecurityRuleDirection
:keyword provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.description = description
self.protocol = protocol
self.source_port_range = source_port_range
self.destination_port_range = destination_port_range
self.source_address_prefix = source_address_prefix
self.destination_address_prefix = destination_address_prefix
self.access = access
self.priority = priority
self.direction = direction
self.provisioning_state = provisioning_state
class SecurityRuleAssociations(_serialization.Model):
"""All security rules associated with the network interface.
:ivar network_interface_association: Network interface and its custom security rules.
:vartype network_interface_association:
~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceAssociation
:ivar subnet_association: Network interface and its custom security rules.
:vartype subnet_association: ~azure.mgmt.network.v2017_03_01.models.SubnetAssociation
:ivar default_security_rules: Collection of default security rules of the network security
group.
:vartype default_security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:ivar effective_security_rules: Collection of effective security rules.
:vartype effective_security_rules:
list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityRule]
"""
_attribute_map = {
"network_interface_association": {"key": "networkInterfaceAssociation", "type": "NetworkInterfaceAssociation"},
"subnet_association": {"key": "subnetAssociation", "type": "SubnetAssociation"},
"default_security_rules": {"key": "defaultSecurityRules", "type": "[SecurityRule]"},
"effective_security_rules": {"key": "effectiveSecurityRules", "type": "[EffectiveNetworkSecurityRule]"},
}
def __init__(
self,
*,
network_interface_association: Optional["_models.NetworkInterfaceAssociation"] = None,
subnet_association: Optional["_models.SubnetAssociation"] = None,
default_security_rules: Optional[List["_models.SecurityRule"]] = None,
effective_security_rules: Optional[List["_models.EffectiveNetworkSecurityRule"]] = None,
**kwargs
):
"""
:keyword network_interface_association: Network interface and its custom security rules.
:paramtype network_interface_association:
~azure.mgmt.network.v2017_03_01.models.NetworkInterfaceAssociation
:keyword subnet_association: Network interface and its custom security rules.
:paramtype subnet_association: ~azure.mgmt.network.v2017_03_01.models.SubnetAssociation
:keyword default_security_rules: Collection of default security rules of the network security
group.
:paramtype default_security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:keyword effective_security_rules: Collection of effective security rules.
:paramtype effective_security_rules:
list[~azure.mgmt.network.v2017_03_01.models.EffectiveNetworkSecurityRule]
"""
super().__init__(**kwargs)
self.network_interface_association = network_interface_association
self.subnet_association = subnet_association
self.default_security_rules = default_security_rules
self.effective_security_rules = effective_security_rules
class SecurityRuleListResult(_serialization.Model):
"""Response for ListSecurityRule API service call. Retrieves all security rules that belongs to a network security group.
:ivar value: The security rules in a network security group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[SecurityRule]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.SecurityRule"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: The security rules in a network security group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class Subnet(SubResource):
"""Subnet in a virtual network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar address_prefix: The address prefix for the subnet.
:vartype address_prefix: str
:ivar network_security_group: The reference of the NetworkSecurityGroup resource.
:vartype network_security_group: ~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup
:ivar route_table: The reference of the RouteTable resource.
:vartype route_table: ~azure.mgmt.network.v2017_03_01.models.RouteTable
:ivar ip_configurations: Gets an array of references to the network interface IP configurations
using subnet.
:vartype ip_configurations: list[~azure.mgmt.network.v2017_03_01.models.IPConfiguration]
:ivar resource_navigation_links: Gets an array of references to the external resources using
subnet.
:vartype resource_navigation_links:
list[~azure.mgmt.network.v2017_03_01.models.ResourceNavigationLink]
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_validation = {
"ip_configurations": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"address_prefix": {"key": "properties.addressPrefix", "type": "str"},
"network_security_group": {"key": "properties.networkSecurityGroup", "type": "NetworkSecurityGroup"},
"route_table": {"key": "properties.routeTable", "type": "RouteTable"},
"ip_configurations": {"key": "properties.ipConfigurations", "type": "[IPConfiguration]"},
"resource_navigation_links": {"key": "properties.resourceNavigationLinks", "type": "[ResourceNavigationLink]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
address_prefix: Optional[str] = None,
network_security_group: Optional["_models.NetworkSecurityGroup"] = None,
route_table: Optional["_models.RouteTable"] = None,
resource_navigation_links: Optional[List["_models.ResourceNavigationLink"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword address_prefix: The address prefix for the subnet.
:paramtype address_prefix: str
:keyword network_security_group: The reference of the NetworkSecurityGroup resource.
:paramtype network_security_group: ~azure.mgmt.network.v2017_03_01.models.NetworkSecurityGroup
:keyword route_table: The reference of the RouteTable resource.
:paramtype route_table: ~azure.mgmt.network.v2017_03_01.models.RouteTable
:keyword resource_navigation_links: Gets an array of references to the external resources using
subnet.
:paramtype resource_navigation_links:
list[~azure.mgmt.network.v2017_03_01.models.ResourceNavigationLink]
:keyword provisioning_state: The provisioning state of the resource.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.address_prefix = address_prefix
self.network_security_group = network_security_group
self.route_table = route_table
self.ip_configurations = None
self.resource_navigation_links = resource_navigation_links
self.provisioning_state = provisioning_state
class SubnetAssociation(_serialization.Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Subnet ID.
:vartype id: str
:ivar security_rules: Collection of custom security rules.
:vartype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
"""
_validation = {
"id": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"security_rules": {"key": "securityRules", "type": "[SecurityRule]"},
}
def __init__(self, *, security_rules: Optional[List["_models.SecurityRule"]] = None, **kwargs):
"""
:keyword security_rules: Collection of custom security rules.
:paramtype security_rules: list[~azure.mgmt.network.v2017_03_01.models.SecurityRule]
"""
super().__init__(**kwargs)
self.id = None
self.security_rules = security_rules
class SubnetListResult(_serialization.Model):
"""Response for ListSubnets API service callRetrieves all subnet that belongs to a virtual network.
:ivar value: The subnets in a virtual network.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Subnet]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.Subnet"]] = None, next_link: Optional[str] = None, **kwargs):
"""
:keyword value: The subnets in a virtual network.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class Topology(_serialization.Model):
"""Topology of the specified resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: GUID representing the operation id.
:vartype id: str
:ivar created_date_time: The datetime when the topology was initially created for the resource
group.
:vartype created_date_time: ~datetime.datetime
:ivar last_modified: The datetime when the topology was last modified.
:vartype last_modified: ~datetime.datetime
:ivar resources:
:vartype resources: list[~azure.mgmt.network.v2017_03_01.models.TopologyResource]
"""
_validation = {
"id": {"readonly": True},
"created_date_time": {"readonly": True},
"last_modified": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"created_date_time": {"key": "createdDateTime", "type": "iso-8601"},
"last_modified": {"key": "lastModified", "type": "iso-8601"},
"resources": {"key": "resources", "type": "[TopologyResource]"},
}
def __init__(self, *, resources: Optional[List["_models.TopologyResource"]] = None, **kwargs):
"""
:keyword resources:
:paramtype resources: list[~azure.mgmt.network.v2017_03_01.models.TopologyResource]
"""
super().__init__(**kwargs)
self.id = None
self.created_date_time = None
self.last_modified = None
self.resources = resources
class TopologyAssociation(_serialization.Model):
"""Resources that have an association with the parent resource.
:ivar name: The name of the resource that is associated with the parent resource.
:vartype name: str
:ivar resource_id: The ID of the resource that is associated with the parent resource.
:vartype resource_id: str
:ivar association_type: The association type of the child resource to the parent resource.
Known values are: "Associated" and "Contains".
:vartype association_type: str or ~azure.mgmt.network.v2017_03_01.models.AssociationType
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"resource_id": {"key": "resourceId", "type": "str"},
"association_type": {"key": "associationType", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
resource_id: Optional[str] = None,
association_type: Optional[Union[str, "_models.AssociationType"]] = None,
**kwargs
):
"""
:keyword name: The name of the resource that is associated with the parent resource.
:paramtype name: str
:keyword resource_id: The ID of the resource that is associated with the parent resource.
:paramtype resource_id: str
:keyword association_type: The association type of the child resource to the parent resource.
Known values are: "Associated" and "Contains".
:paramtype association_type: str or ~azure.mgmt.network.v2017_03_01.models.AssociationType
"""
super().__init__(**kwargs)
self.name = name
self.resource_id = resource_id
self.association_type = association_type
class TopologyParameters(_serialization.Model):
"""Parameters that define the representation of topology.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_group_name: The name of the target resource group to perform topology on.
Required.
:vartype target_resource_group_name: str
"""
_validation = {
"target_resource_group_name": {"required": True},
}
_attribute_map = {
"target_resource_group_name": {"key": "targetResourceGroupName", "type": "str"},
}
def __init__(self, *, target_resource_group_name: str, **kwargs):
"""
:keyword target_resource_group_name: The name of the target resource group to perform topology
on. Required.
:paramtype target_resource_group_name: str
"""
super().__init__(**kwargs)
self.target_resource_group_name = target_resource_group_name
class TopologyResource(_serialization.Model):
"""The network resource topology information for the given resource group.
:ivar name: Name of the resource.
:vartype name: str
:ivar id: ID of the resource.
:vartype id: str
:ivar location: Resource location.
:vartype location: str
:ivar associations: Holds the associations the resource has with other resources in the
resource group.
:vartype associations: list[~azure.mgmt.network.v2017_03_01.models.TopologyAssociation]
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"id": {"key": "id", "type": "str"},
"location": {"key": "location", "type": "str"},
"associations": {"key": "associations", "type": "[TopologyAssociation]"},
}
def __init__(
self,
*,
name: Optional[str] = None,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
associations: Optional[List["_models.TopologyAssociation"]] = None,
**kwargs
):
"""
:keyword name: Name of the resource.
:paramtype name: str
:keyword id: ID of the resource.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword associations: Holds the associations the resource has with other resources in the
resource group.
:paramtype associations: list[~azure.mgmt.network.v2017_03_01.models.TopologyAssociation]
"""
super().__init__(**kwargs)
self.name = name
self.id = id
self.location = location
self.associations = associations
class TroubleshootingDetails(_serialization.Model):
"""Information gained from troubleshooting of specified resource.
:ivar id: The id of the get troubleshoot operation.
:vartype id: str
:ivar reason_type: Reason type of failure.
:vartype reason_type: str
:ivar summary: A summary of troubleshooting.
:vartype summary: str
:ivar detail: Details on troubleshooting results.
:vartype detail: str
:ivar recommended_actions: List of recommended actions.
:vartype recommended_actions:
list[~azure.mgmt.network.v2017_03_01.models.TroubleshootingRecommendedActions]
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"reason_type": {"key": "reasonType", "type": "str"},
"summary": {"key": "summary", "type": "str"},
"detail": {"key": "detail", "type": "str"},
"recommended_actions": {"key": "recommendedActions", "type": "[TroubleshootingRecommendedActions]"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
reason_type: Optional[str] = None,
summary: Optional[str] = None,
detail: Optional[str] = None,
recommended_actions: Optional[List["_models.TroubleshootingRecommendedActions"]] = None,
**kwargs
):
"""
:keyword id: The id of the get troubleshoot operation.
:paramtype id: str
:keyword reason_type: Reason type of failure.
:paramtype reason_type: str
:keyword summary: A summary of troubleshooting.
:paramtype summary: str
:keyword detail: Details on troubleshooting results.
:paramtype detail: str
:keyword recommended_actions: List of recommended actions.
:paramtype recommended_actions:
list[~azure.mgmt.network.v2017_03_01.models.TroubleshootingRecommendedActions]
"""
super().__init__(**kwargs)
self.id = id
self.reason_type = reason_type
self.summary = summary
self.detail = detail
self.recommended_actions = recommended_actions
class TroubleshootingParameters(_serialization.Model):
"""Parameters that define the resource to troubleshoot.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The target resource to troubleshoot. Required.
:vartype target_resource_id: str
:ivar storage_id: The ID for the storage account to save the troubleshoot result. Required.
:vartype storage_id: str
:ivar storage_path: The path to the blob to save the troubleshoot result in. Required.
:vartype storage_path: str
"""
_validation = {
"target_resource_id": {"required": True},
"storage_id": {"required": True},
"storage_path": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
"storage_id": {"key": "properties.storageId", "type": "str"},
"storage_path": {"key": "properties.storagePath", "type": "str"},
}
def __init__(self, *, target_resource_id: str, storage_id: str, storage_path: str, **kwargs):
"""
:keyword target_resource_id: The target resource to troubleshoot. Required.
:paramtype target_resource_id: str
:keyword storage_id: The ID for the storage account to save the troubleshoot result. Required.
:paramtype storage_id: str
:keyword storage_path: The path to the blob to save the troubleshoot result in. Required.
:paramtype storage_path: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
self.storage_id = storage_id
self.storage_path = storage_path
class TroubleshootingRecommendedActions(_serialization.Model):
"""Recommended actions based on discovered issues.
:ivar action_id: ID of the recommended action.
:vartype action_id: str
:ivar action_text: Description of recommended actions.
:vartype action_text: str
:ivar action_uri: The uri linking to a documentation for the recommended troubleshooting
actions.
:vartype action_uri: str
:ivar action_uri_text: The information from the URI for the recommended troubleshooting
actions.
:vartype action_uri_text: str
"""
_attribute_map = {
"action_id": {"key": "actionId", "type": "str"},
"action_text": {"key": "actionText", "type": "str"},
"action_uri": {"key": "actionUri", "type": "str"},
"action_uri_text": {"key": "actionUriText", "type": "str"},
}
def __init__(
self,
*,
action_id: Optional[str] = None,
action_text: Optional[str] = None,
action_uri: Optional[str] = None,
action_uri_text: Optional[str] = None,
**kwargs
):
"""
:keyword action_id: ID of the recommended action.
:paramtype action_id: str
:keyword action_text: Description of recommended actions.
:paramtype action_text: str
:keyword action_uri: The uri linking to a documentation for the recommended troubleshooting
actions.
:paramtype action_uri: str
:keyword action_uri_text: The information from the URI for the recommended troubleshooting
actions.
:paramtype action_uri_text: str
"""
super().__init__(**kwargs)
self.action_id = action_id
self.action_text = action_text
self.action_uri = action_uri
self.action_uri_text = action_uri_text
class TroubleshootingResult(_serialization.Model):
"""Troubleshooting information gained from specified resource.
:ivar start_time: The start time of the troubleshooting.
:vartype start_time: ~datetime.datetime
:ivar end_time: The end time of the troubleshooting.
:vartype end_time: ~datetime.datetime
:ivar code: The result code of the troubleshooting.
:vartype code: str
:ivar results: Information from troubleshooting.
:vartype results: list[~azure.mgmt.network.v2017_03_01.models.TroubleshootingDetails]
"""
_attribute_map = {
"start_time": {"key": "startTime", "type": "iso-8601"},
"end_time": {"key": "endTime", "type": "iso-8601"},
"code": {"key": "code", "type": "str"},
"results": {"key": "results", "type": "[TroubleshootingDetails]"},
}
def __init__(
self,
*,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
code: Optional[str] = None,
results: Optional[List["_models.TroubleshootingDetails"]] = None,
**kwargs
):
"""
:keyword start_time: The start time of the troubleshooting.
:paramtype start_time: ~datetime.datetime
:keyword end_time: The end time of the troubleshooting.
:paramtype end_time: ~datetime.datetime
:keyword code: The result code of the troubleshooting.
:paramtype code: str
:keyword results: Information from troubleshooting.
:paramtype results: list[~azure.mgmt.network.v2017_03_01.models.TroubleshootingDetails]
"""
super().__init__(**kwargs)
self.start_time = start_time
self.end_time = end_time
self.code = code
self.results = results
class TunnelConnectionHealth(_serialization.Model):
"""VirtualNetworkGatewayConnection properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar tunnel: Tunnel name.
:vartype tunnel: str
:ivar connection_status: Virtual network Gateway connection status. Known values are:
"Unknown", "Connecting", "Connected", and "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnectionStatus
:ivar ingress_bytes_transferred: The Ingress Bytes Transferred in this connection.
:vartype ingress_bytes_transferred: int
:ivar egress_bytes_transferred: The Egress Bytes Transferred in this connection.
:vartype egress_bytes_transferred: int
:ivar last_connection_established_utc_time: The time at which connection was established in Utc
format.
:vartype last_connection_established_utc_time: str
"""
_validation = {
"tunnel": {"readonly": True},
"connection_status": {"readonly": True},
"ingress_bytes_transferred": {"readonly": True},
"egress_bytes_transferred": {"readonly": True},
"last_connection_established_utc_time": {"readonly": True},
}
_attribute_map = {
"tunnel": {"key": "tunnel", "type": "str"},
"connection_status": {"key": "connectionStatus", "type": "str"},
"ingress_bytes_transferred": {"key": "ingressBytesTransferred", "type": "int"},
"egress_bytes_transferred": {"key": "egressBytesTransferred", "type": "int"},
"last_connection_established_utc_time": {"key": "lastConnectionEstablishedUtcTime", "type": "str"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.tunnel = None
self.connection_status = None
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.last_connection_established_utc_time = None
class Usage(_serialization.Model):
"""Describes network resource usage.
All required parameters must be populated in order to send to Azure.
:ivar unit: An enum describing the unit of measurement. Required. "Count"
:vartype unit: str or ~azure.mgmt.network.v2017_03_01.models.UsageUnit
:ivar current_value: The current value of the usage. Required.
:vartype current_value: int
:ivar limit: The limit of usage. Required.
:vartype limit: int
:ivar name: The name of the type of usage. Required.
:vartype name: ~azure.mgmt.network.v2017_03_01.models.UsageName
"""
_validation = {
"unit": {"required": True},
"current_value": {"required": True},
"limit": {"required": True},
"name": {"required": True},
}
_attribute_map = {
"unit": {"key": "unit", "type": "str"},
"current_value": {"key": "currentValue", "type": "int"},
"limit": {"key": "limit", "type": "int"},
"name": {"key": "name", "type": "UsageName"},
}
def __init__(
self,
*,
unit: Union[str, "_models.UsageUnit"],
current_value: int,
limit: int,
name: "_models.UsageName",
**kwargs
):
"""
:keyword unit: An enum describing the unit of measurement. Required. "Count"
:paramtype unit: str or ~azure.mgmt.network.v2017_03_01.models.UsageUnit
:keyword current_value: The current value of the usage. Required.
:paramtype current_value: int
:keyword limit: The limit of usage. Required.
:paramtype limit: int
:keyword name: The name of the type of usage. Required.
:paramtype name: ~azure.mgmt.network.v2017_03_01.models.UsageName
"""
super().__init__(**kwargs)
self.unit = unit
self.current_value = current_value
self.limit = limit
self.name = name
class UsageName(_serialization.Model):
"""The usage names.
:ivar value: A string describing the resource name.
:vartype value: str
:ivar localized_value: A localized string describing the resource name.
:vartype localized_value: str
"""
_attribute_map = {
"value": {"key": "value", "type": "str"},
"localized_value": {"key": "localizedValue", "type": "str"},
}
def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None, **kwargs):
"""
:keyword value: A string describing the resource name.
:paramtype value: str
:keyword localized_value: A localized string describing the resource name.
:paramtype localized_value: str
"""
super().__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class UsagesListResult(_serialization.Model):
"""The list usages operation response.
:ivar value: The list network resource usages.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.Usage]
:ivar next_link: URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Usage]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.Usage"]] = None, next_link: Optional[str] = None, **kwargs):
"""
:keyword value: The list network resource usages.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.Usage]
:keyword next_link: URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class VerificationIPFlowParameters(_serialization.Model):
"""Parameters that define the IP flow to be verified.
All required parameters must be populated in order to send to Azure.
:ivar target_resource_id: The ID of the target resource to perform next-hop on. Required.
:vartype target_resource_id: str
:ivar direction: The direction of the packet represented as a 5-tuple. Required. Known values
are: "Inbound" and "Outbound".
:vartype direction: str or ~azure.mgmt.network.v2017_03_01.models.Direction
:ivar protocol: Protocol to be verified on. Required. Known values are: "TCP" and "UDP".
:vartype protocol: str or ~azure.mgmt.network.v2017_03_01.models.Protocol
:ivar local_port: The local port. Acceptable values are a single integer in the range
(0-65535). Support for * for the source port, which depends on the direction. Required.
:vartype local_port: str
:ivar remote_port: The remote port. Acceptable values are a single integer in the range
(0-65535). Support for * for the source port, which depends on the direction. Required.
:vartype remote_port: str
:ivar local_ip_address: The local IP address. Acceptable values are valid IPv4 addresses.
Required.
:vartype local_ip_address: str
:ivar remote_ip_address: The remote IP address. Acceptable values are valid IPv4 addresses.
Required.
:vartype remote_ip_address: str
:ivar target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is enabled
on any of them, then this parameter must be specified. Otherwise optional).
:vartype target_nic_resource_id: str
"""
_validation = {
"target_resource_id": {"required": True},
"direction": {"required": True},
"protocol": {"required": True},
"local_port": {"required": True},
"remote_port": {"required": True},
"local_ip_address": {"required": True},
"remote_ip_address": {"required": True},
}
_attribute_map = {
"target_resource_id": {"key": "targetResourceId", "type": "str"},
"direction": {"key": "direction", "type": "str"},
"protocol": {"key": "protocol", "type": "str"},
"local_port": {"key": "localPort", "type": "str"},
"remote_port": {"key": "remotePort", "type": "str"},
"local_ip_address": {"key": "localIPAddress", "type": "str"},
"remote_ip_address": {"key": "remoteIPAddress", "type": "str"},
"target_nic_resource_id": {"key": "targetNicResourceId", "type": "str"},
}
def __init__(
self,
*,
target_resource_id: str,
direction: Union[str, "_models.Direction"],
protocol: Union[str, "_models.Protocol"],
local_port: str,
remote_port: str,
local_ip_address: str,
remote_ip_address: str,
target_nic_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword target_resource_id: The ID of the target resource to perform next-hop on. Required.
:paramtype target_resource_id: str
:keyword direction: The direction of the packet represented as a 5-tuple. Required. Known
values are: "Inbound" and "Outbound".
:paramtype direction: str or ~azure.mgmt.network.v2017_03_01.models.Direction
:keyword protocol: Protocol to be verified on. Required. Known values are: "TCP" and "UDP".
:paramtype protocol: str or ~azure.mgmt.network.v2017_03_01.models.Protocol
:keyword local_port: The local port. Acceptable values are a single integer in the range
(0-65535). Support for * for the source port, which depends on the direction. Required.
:paramtype local_port: str
:keyword remote_port: The remote port. Acceptable values are a single integer in the range
(0-65535). Support for * for the source port, which depends on the direction. Required.
:paramtype remote_port: str
:keyword local_ip_address: The local IP address. Acceptable values are valid IPv4 addresses.
Required.
:paramtype local_ip_address: str
:keyword remote_ip_address: The remote IP address. Acceptable values are valid IPv4 addresses.
Required.
:paramtype remote_ip_address: str
:keyword target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of them, then this parameter must be specified. Otherwise optional).
:paramtype target_nic_resource_id: str
"""
super().__init__(**kwargs)
self.target_resource_id = target_resource_id
self.direction = direction
self.protocol = protocol
self.local_port = local_port
self.remote_port = remote_port
self.local_ip_address = local_ip_address
self.remote_ip_address = remote_ip_address
self.target_nic_resource_id = target_nic_resource_id
class VerificationIPFlowResult(_serialization.Model):
"""Results of IP flow verification on the target resource.
:ivar access: Indicates whether the traffic is allowed or denied. Known values are: "Allow" and
"Deny".
:vartype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:ivar rule_name: Name of the rule. If input is not matched against any security rule, it is not
displayed.
:vartype rule_name: str
"""
_attribute_map = {
"access": {"key": "access", "type": "str"},
"rule_name": {"key": "ruleName", "type": "str"},
}
def __init__(
self, *, access: Optional[Union[str, "_models.Access"]] = None, rule_name: Optional[str] = None, **kwargs
):
"""
:keyword access: Indicates whether the traffic is allowed or denied. Known values are: "Allow"
and "Deny".
:paramtype access: str or ~azure.mgmt.network.v2017_03_01.models.Access
:keyword rule_name: Name of the rule. If input is not matched against any security rule, it is
not displayed.
:paramtype rule_name: str
"""
super().__init__(**kwargs)
self.access = access
self.rule_name = rule_name
class VirtualNetwork(Resource): # pylint: disable=too-many-instance-attributes
"""Virtual Network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar address_space: The AddressSpace that contains an array of IP address ranges that can be
used by subnets.
:vartype address_space: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:ivar dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs
deployed in the virtual network.
:vartype dhcp_options: ~azure.mgmt.network.v2017_03_01.models.DhcpOptions
:ivar subnets: A list of subnets in a Virtual Network.
:vartype subnets: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:ivar virtual_network_peerings: A list of peerings in a Virtual Network.
:vartype virtual_network_peerings:
list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeering]
:ivar resource_guid: The resourceGuid property of the Virtual Network resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the PublicIP resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"address_space": {"key": "properties.addressSpace", "type": "AddressSpace"},
"dhcp_options": {"key": "properties.dhcpOptions", "type": "DhcpOptions"},
"subnets": {"key": "properties.subnets", "type": "[Subnet]"},
"virtual_network_peerings": {"key": "properties.virtualNetworkPeerings", "type": "[VirtualNetworkPeering]"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
address_space: Optional["_models.AddressSpace"] = None,
dhcp_options: Optional["_models.DhcpOptions"] = None,
subnets: Optional[List["_models.Subnet"]] = None,
virtual_network_peerings: Optional[List["_models.VirtualNetworkPeering"]] = None,
resource_guid: Optional[str] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: Gets a unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword address_space: The AddressSpace that contains an array of IP address ranges that can
be used by subnets.
:paramtype address_space: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:keyword dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs
deployed in the virtual network.
:paramtype dhcp_options: ~azure.mgmt.network.v2017_03_01.models.DhcpOptions
:keyword subnets: A list of subnets in a Virtual Network.
:paramtype subnets: list[~azure.mgmt.network.v2017_03_01.models.Subnet]
:keyword virtual_network_peerings: A list of peerings in a Virtual Network.
:paramtype virtual_network_peerings:
list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeering]
:keyword resource_guid: The resourceGuid property of the Virtual Network resource.
:paramtype resource_guid: str
:keyword provisioning_state: The provisioning state of the PublicIP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:paramtype provisioning_state: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.address_space = address_space
self.dhcp_options = dhcp_options
self.subnets = subnets
self.virtual_network_peerings = virtual_network_peerings
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
class VirtualNetworkGateway(Resource): # pylint: disable=too-many-instance-attributes
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar ip_configurations: IP configurations for virtual network gateway.
:vartype ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayIPConfiguration]
:ivar gateway_type: The type of this virtual network gateway. Possible values are: 'Vpn' and
'ExpressRoute'. Known values are: "Vpn" and "ExpressRoute".
:vartype gateway_type: str or ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayType
:ivar vpn_type: The type of this virtual network gateway. Possible values are: 'PolicyBased'
and 'RouteBased'. Known values are: "PolicyBased" and "RouteBased".
:vartype vpn_type: str or ~azure.mgmt.network.v2017_03_01.models.VpnType
:ivar enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:vartype enable_bgp: bool
:ivar active: ActiveActive flag.
:vartype active: bool
:ivar gateway_default_site: The reference of the LocalNetworkGateway resource which represents
local network site having default routes. Assign Null value in case of removing existing
default site setting.
:vartype gateway_default_site: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU
selected for Virtual network gateway.
:vartype sku: ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySku
:ivar vpn_client_configuration: The reference of the VpnClientConfiguration resource which
represents the P2S VpnClient configurations.
:vartype vpn_client_configuration:
~azure.mgmt.network.v2017_03_01.models.VpnClientConfiguration
:ivar bgp_settings: Virtual network gateway's BGP speaker settings.
:vartype bgp_settings: ~azure.mgmt.network.v2017_03_01.models.BgpSettings
:ivar resource_guid: The resource GUID property of the VirtualNetworkGateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the VirtualNetworkGateway resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"ip_configurations": {"key": "properties.ipConfigurations", "type": "[VirtualNetworkGatewayIPConfiguration]"},
"gateway_type": {"key": "properties.gatewayType", "type": "str"},
"vpn_type": {"key": "properties.vpnType", "type": "str"},
"enable_bgp": {"key": "properties.enableBgp", "type": "bool"},
"active": {"key": "properties.activeActive", "type": "bool"},
"gateway_default_site": {"key": "properties.gatewayDefaultSite", "type": "SubResource"},
"sku": {"key": "properties.sku", "type": "VirtualNetworkGatewaySku"},
"vpn_client_configuration": {"key": "properties.vpnClientConfiguration", "type": "VpnClientConfiguration"},
"bgp_settings": {"key": "properties.bgpSettings", "type": "BgpSettings"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
ip_configurations: Optional[List["_models.VirtualNetworkGatewayIPConfiguration"]] = None,
gateway_type: Optional[Union[str, "_models.VirtualNetworkGatewayType"]] = None,
vpn_type: Optional[Union[str, "_models.VpnType"]] = None,
enable_bgp: Optional[bool] = None,
active: Optional[bool] = None,
gateway_default_site: Optional["_models.SubResource"] = None,
sku: Optional["_models.VirtualNetworkGatewaySku"] = None,
vpn_client_configuration: Optional["_models.VpnClientConfiguration"] = None,
bgp_settings: Optional["_models.BgpSettings"] = None,
resource_guid: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: Gets a unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword ip_configurations: IP configurations for virtual network gateway.
:paramtype ip_configurations:
list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayIPConfiguration]
:keyword gateway_type: The type of this virtual network gateway. Possible values are: 'Vpn' and
'ExpressRoute'. Known values are: "Vpn" and "ExpressRoute".
:paramtype gateway_type: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayType
:keyword vpn_type: The type of this virtual network gateway. Possible values are: 'PolicyBased'
and 'RouteBased'. Known values are: "PolicyBased" and "RouteBased".
:paramtype vpn_type: str or ~azure.mgmt.network.v2017_03_01.models.VpnType
:keyword enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:paramtype enable_bgp: bool
:keyword active: ActiveActive flag.
:paramtype active: bool
:keyword gateway_default_site: The reference of the LocalNetworkGateway resource which
represents local network site having default routes. Assign Null value in case of removing
existing default site setting.
:paramtype gateway_default_site: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU
selected for Virtual network gateway.
:paramtype sku: ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySku
:keyword vpn_client_configuration: The reference of the VpnClientConfiguration resource which
represents the P2S VpnClient configurations.
:paramtype vpn_client_configuration:
~azure.mgmt.network.v2017_03_01.models.VpnClientConfiguration
:keyword bgp_settings: Virtual network gateway's BGP speaker settings.
:paramtype bgp_settings: ~azure.mgmt.network.v2017_03_01.models.BgpSettings
:keyword resource_guid: The resource GUID property of the VirtualNetworkGateway resource.
:paramtype resource_guid: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.ip_configurations = ip_configurations
self.gateway_type = gateway_type
self.vpn_type = vpn_type
self.enable_bgp = enable_bgp
self.active = active
self.gateway_default_site = gateway_default_site
self.sku = sku
self.vpn_client_configuration = vpn_client_configuration
self.bgp_settings = bgp_settings
self.resource_guid = resource_guid
self.provisioning_state = None
class VirtualNetworkGatewayConnection(Resource): # pylint: disable=too-many-instance-attributes
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar authorization_key: The authorizationKey.
:vartype authorization_key: str
:ivar virtual_network_gateway1: A common class for general resource information. Required.
:vartype virtual_network_gateway1: ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway
:ivar virtual_network_gateway2: A common class for general resource information.
:vartype virtual_network_gateway2: ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway
:ivar local_network_gateway2: A common class for general resource information.
:vartype local_network_gateway2: ~azure.mgmt.network.v2017_03_01.models.LocalNetworkGateway
:ivar connection_type: Gateway connection type. Possible values are:
'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec",
"Vnet2Vnet", "ExpressRoute", and "VPNClient".
:vartype connection_type: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnectionType
:ivar routing_weight: The routing weight.
:vartype routing_weight: int
:ivar shared_key: The IPSec shared key.
:vartype shared_key: str
:ivar connection_status: Virtual network Gateway connection status. Possible values are
'Unknown', 'Connecting', 'Connected' and 'NotConnected'. Known values are: "Unknown",
"Connecting", "Connected", and "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2017_03_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this connection.
:vartype egress_bytes_transferred: int
:ivar ingress_bytes_transferred: The ingress bytes transferred in this connection.
:vartype ingress_bytes_transferred: int
:ivar peer: The reference to peerings resource.
:vartype peer: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar enable_bgp: EnableBgp flag.
:vartype enable_bgp: bool
:ivar use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:vartype use_policy_based_traffic_selectors: bool
:ivar ipsec_policies: The IPSec Policies to be considered by this connection.
:vartype ipsec_policies: list[~azure.mgmt.network.v2017_03_01.models.IpsecPolicy]
:ivar resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection
resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the VirtualNetworkGatewayConnection
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"name": {"readonly": True},
"type": {"readonly": True},
"virtual_network_gateway1": {"required": True},
"connection_type": {"required": True},
"connection_status": {"readonly": True},
"tunnel_connection_status": {"readonly": True},
"egress_bytes_transferred": {"readonly": True},
"ingress_bytes_transferred": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"authorization_key": {"key": "properties.authorizationKey", "type": "str"},
"virtual_network_gateway1": {"key": "properties.virtualNetworkGateway1", "type": "VirtualNetworkGateway"},
"virtual_network_gateway2": {"key": "properties.virtualNetworkGateway2", "type": "VirtualNetworkGateway"},
"local_network_gateway2": {"key": "properties.localNetworkGateway2", "type": "LocalNetworkGateway"},
"connection_type": {"key": "properties.connectionType", "type": "str"},
"routing_weight": {"key": "properties.routingWeight", "type": "int"},
"shared_key": {"key": "properties.sharedKey", "type": "str"},
"connection_status": {"key": "properties.connectionStatus", "type": "str"},
"tunnel_connection_status": {"key": "properties.tunnelConnectionStatus", "type": "[TunnelConnectionHealth]"},
"egress_bytes_transferred": {"key": "properties.egressBytesTransferred", "type": "int"},
"ingress_bytes_transferred": {"key": "properties.ingressBytesTransferred", "type": "int"},
"peer": {"key": "properties.peer", "type": "SubResource"},
"enable_bgp": {"key": "properties.enableBgp", "type": "bool"},
"use_policy_based_traffic_selectors": {"key": "properties.usePolicyBasedTrafficSelectors", "type": "bool"},
"ipsec_policies": {"key": "properties.ipsecPolicies", "type": "[IpsecPolicy]"},
"resource_guid": {"key": "properties.resourceGuid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
virtual_network_gateway1: "_models.VirtualNetworkGateway",
connection_type: Union[str, "_models.VirtualNetworkGatewayConnectionType"],
id: Optional[str] = None, # pylint: disable=redefined-builtin
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
authorization_key: Optional[str] = None,
virtual_network_gateway2: Optional["_models.VirtualNetworkGateway"] = None,
local_network_gateway2: Optional["_models.LocalNetworkGateway"] = None,
routing_weight: Optional[int] = None,
shared_key: Optional[str] = None,
peer: Optional["_models.SubResource"] = None,
enable_bgp: Optional[bool] = None,
use_policy_based_traffic_selectors: Optional[bool] = None,
ipsec_policies: Optional[List["_models.IpsecPolicy"]] = None,
resource_guid: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword location: Resource location.
:paramtype location: str
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword etag: Gets a unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword authorization_key: The authorizationKey.
:paramtype authorization_key: str
:keyword virtual_network_gateway1: A common class for general resource information. Required.
:paramtype virtual_network_gateway1:
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway
:keyword virtual_network_gateway2: A common class for general resource information.
:paramtype virtual_network_gateway2:
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway
:keyword local_network_gateway2: A common class for general resource information.
:paramtype local_network_gateway2: ~azure.mgmt.network.v2017_03_01.models.LocalNetworkGateway
:keyword connection_type: Gateway connection type. Possible values are:
'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Required. Known values are: "IPsec",
"Vnet2Vnet", "ExpressRoute", and "VPNClient".
:paramtype connection_type: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnectionType
:keyword routing_weight: The routing weight.
:paramtype routing_weight: int
:keyword shared_key: The IPSec shared key.
:paramtype shared_key: str
:keyword peer: The reference to peerings resource.
:paramtype peer: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword enable_bgp: EnableBgp flag.
:paramtype enable_bgp: bool
:keyword use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:paramtype use_policy_based_traffic_selectors: bool
:keyword ipsec_policies: The IPSec Policies to be considered by this connection.
:paramtype ipsec_policies: list[~azure.mgmt.network.v2017_03_01.models.IpsecPolicy]
:keyword resource_guid: The resource GUID property of the VirtualNetworkGatewayConnection
resource.
:paramtype resource_guid: str
"""
super().__init__(id=id, location=location, tags=tags, **kwargs)
self.etag = etag
self.authorization_key = authorization_key
self.virtual_network_gateway1 = virtual_network_gateway1
self.virtual_network_gateway2 = virtual_network_gateway2
self.local_network_gateway2 = local_network_gateway2
self.connection_type = connection_type
self.routing_weight = routing_weight
self.shared_key = shared_key
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = peer
self.enable_bgp = enable_bgp
self.use_policy_based_traffic_selectors = use_policy_based_traffic_selectors
self.ipsec_policies = ipsec_policies
self.resource_guid = resource_guid
self.provisioning_state = None
class VirtualNetworkGatewayConnectionListResult(_serialization.Model):
"""Response for the ListVirtualNetworkGatewayConnections API service call.
:ivar value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a resource
group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[VirtualNetworkGatewayConnection]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.VirtualNetworkGatewayConnection"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: Gets a list of VirtualNetworkGatewayConnection resources that exists in a
resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewayConnection]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class VirtualNetworkGatewayIPConfiguration(SubResource):
"""IP configuration for virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_ip_allocation_method: The private IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:vartype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:ivar subnet: The reference of the subnet resource.
:vartype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar public_ip_address: The reference of the public IP resource.
:vartype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar provisioning_state: The provisioning state of the public IP resource. Possible values
are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"private_ip_allocation_method": {"key": "properties.privateIPAllocationMethod", "type": "str"},
"subnet": {"key": "properties.subnet", "type": "SubResource"},
"public_ip_address": {"key": "properties.publicIPAddress", "type": "SubResource"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
private_ip_allocation_method: Optional[Union[str, "_models.IPAllocationMethod"]] = None,
subnet: Optional["_models.SubResource"] = None,
public_ip_address: Optional["_models.SubResource"] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword private_ip_allocation_method: The private IP allocation method. Possible values are:
'Static' and 'Dynamic'. Known values are: "Static" and "Dynamic".
:paramtype private_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:keyword subnet: The reference of the subnet resource.
:paramtype subnet: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword public_ip_address: The reference of the public IP resource.
:paramtype public_ip_address: ~azure.mgmt.network.v2017_03_01.models.SubResource
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = None
class VirtualNetworkGatewayListResult(_serialization.Model):
"""Response for the ListVirtualNetworkGateways API service call.
:ivar value: Gets a list of VirtualNetworkGateway resources that exists in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[VirtualNetworkGateway]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.VirtualNetworkGateway"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: Gets a list of VirtualNetworkGateway resources that exists in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGateway]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class VirtualNetworkGatewaySku(_serialization.Model):
"""VirtualNetworkGatewaySku details.
:ivar name: Gateway SKU name. Known values are: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", and "VpnGw3".
:vartype name: str or ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySkuName
:ivar tier: Gateway SKU tier. Known values are: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", and "VpnGw3".
:vartype tier: str or ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySkuTier
:ivar capacity: The capacity.
:vartype capacity: int
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"tier": {"key": "tier", "type": "str"},
"capacity": {"key": "capacity", "type": "int"},
}
def __init__(
self,
*,
name: Optional[Union[str, "_models.VirtualNetworkGatewaySkuName"]] = None,
tier: Optional[Union[str, "_models.VirtualNetworkGatewaySkuTier"]] = None,
capacity: Optional[int] = None,
**kwargs
):
"""
:keyword name: Gateway SKU name. Known values are: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", and "VpnGw3".
:paramtype name: str or ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySkuName
:keyword tier: Gateway SKU tier. Known values are: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", and "VpnGw3".
:paramtype tier: str or ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkGatewaySkuTier
:keyword capacity: The capacity.
:paramtype capacity: int
"""
super().__init__(**kwargs)
self.name = name
self.tier = tier
self.capacity = capacity
class VirtualNetworkListResult(_serialization.Model):
"""Response for the ListVirtualNetworks API service call.
:ivar value: Gets a list of VirtualNetwork resources in a resource group.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetwork]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[VirtualNetwork]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.VirtualNetwork"]] = None, next_link: Optional[str] = None, **kwargs
):
"""
:keyword value: Gets a list of VirtualNetwork resources in a resource group.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetwork]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class VirtualNetworkListUsageResult(_serialization.Model):
"""Response for the virtual networks GetUsage API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: VirtualNetwork usage stats.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkUsage]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
"value": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[VirtualNetworkUsage]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, next_link: Optional[str] = None, **kwargs):
"""
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = None
self.next_link = next_link
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar allow_virtual_network_access: Whether the VMs in the linked virtual network space would
be able to access all the VMs in local Virtual network space.
:vartype allow_virtual_network_access: bool
:ivar allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the remote virtual
network will be allowed/disallowed.
:vartype allow_forwarded_traffic: bool
:ivar allow_gateway_transit: If gateway links can be used in remote virtual networking to link
to this virtual network.
:vartype allow_gateway_transit: bool
:ivar use_remote_gateways: If remote gateways can be used on this virtual network. If the flag
is set to true, and allowGatewayTransit on remote peering is also true, virtual network will
use gateways of remote virtual network for transit. Only one peering can have this flag set to
true. This flag cannot be set if virtual network already has a gateway.
:vartype use_remote_gateways: bool
:ivar remote_virtual_network: The reference of the remote virtual network.
:vartype remote_virtual_network: ~azure.mgmt.network.v2017_03_01.models.SubResource
:ivar peering_state: The status of the virtual network peering. Possible values are
'Initiated', 'Connected', and 'Disconnected'. Known values are: "Initiated", "Connected", and
"Disconnected".
:vartype peering_state: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeeringState
:ivar provisioning_state: The provisioning state of the resource.
:vartype provisioning_state: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"allow_virtual_network_access": {"key": "properties.allowVirtualNetworkAccess", "type": "bool"},
"allow_forwarded_traffic": {"key": "properties.allowForwardedTraffic", "type": "bool"},
"allow_gateway_transit": {"key": "properties.allowGatewayTransit", "type": "bool"},
"use_remote_gateways": {"key": "properties.useRemoteGateways", "type": "bool"},
"remote_virtual_network": {"key": "properties.remoteVirtualNetwork", "type": "SubResource"},
"peering_state": {"key": "properties.peeringState", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
allow_virtual_network_access: Optional[bool] = None,
allow_forwarded_traffic: Optional[bool] = None,
allow_gateway_transit: Optional[bool] = None,
use_remote_gateways: Optional[bool] = None,
remote_virtual_network: Optional["_models.SubResource"] = None,
peering_state: Optional[Union[str, "_models.VirtualNetworkPeeringState"]] = None,
provisioning_state: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword allow_virtual_network_access: Whether the VMs in the linked virtual network space
would be able to access all the VMs in local Virtual network space.
:paramtype allow_virtual_network_access: bool
:keyword allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the remote
virtual network will be allowed/disallowed.
:paramtype allow_forwarded_traffic: bool
:keyword allow_gateway_transit: If gateway links can be used in remote virtual networking to
link to this virtual network.
:paramtype allow_gateway_transit: bool
:keyword use_remote_gateways: If remote gateways can be used on this virtual network. If the
flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network
will use gateways of remote virtual network for transit. Only one peering can have this flag
set to true. This flag cannot be set if virtual network already has a gateway.
:paramtype use_remote_gateways: bool
:keyword remote_virtual_network: The reference of the remote virtual network.
:paramtype remote_virtual_network: ~azure.mgmt.network.v2017_03_01.models.SubResource
:keyword peering_state: The status of the virtual network peering. Possible values are
'Initiated', 'Connected', and 'Disconnected'. Known values are: "Initiated", "Connected", and
"Disconnected".
:paramtype peering_state: str or
~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeeringState
:keyword provisioning_state: The provisioning state of the resource.
:paramtype provisioning_state: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.peering_state = peering_state
self.provisioning_state = provisioning_state
class VirtualNetworkPeeringListResult(_serialization.Model):
"""Response for ListSubnets API service call. Retrieves all subnets that belong to a virtual network.
:ivar value: The peerings in a virtual network.
:vartype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeering]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[VirtualNetworkPeering]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.VirtualNetworkPeering"]] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: The peerings in a virtual network.
:paramtype value: list[~azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeering]
:keyword next_link: The URL to get the next set of results.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class VirtualNetworkUsage(_serialization.Model):
"""Usage details for subnet.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar current_value: Indicates number of IPs used from the Subnet.
:vartype current_value: float
:ivar id: Subnet identifier.
:vartype id: str
:ivar limit: Indicates the size of the subnet.
:vartype limit: float
:ivar name: The name containing common and localized value for usage.
:vartype name: ~azure.mgmt.network.v2017_03_01.models.VirtualNetworkUsageName
:ivar unit: Usage units. Returns 'Count'.
:vartype unit: str
"""
_validation = {
"current_value": {"readonly": True},
"id": {"readonly": True},
"limit": {"readonly": True},
"name": {"readonly": True},
"unit": {"readonly": True},
}
_attribute_map = {
"current_value": {"key": "currentValue", "type": "float"},
"id": {"key": "id", "type": "str"},
"limit": {"key": "limit", "type": "float"},
"name": {"key": "name", "type": "VirtualNetworkUsageName"},
"unit": {"key": "unit", "type": "str"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.current_value = None
self.id = None
self.limit = None
self.name = None
self.unit = None
class VirtualNetworkUsageName(_serialization.Model):
"""Usage strings container.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar localized_value: Localized subnet size and usage string.
:vartype localized_value: str
:ivar value: Subnet size and usage string.
:vartype value: str
"""
_validation = {
"localized_value": {"readonly": True},
"value": {"readonly": True},
}
_attribute_map = {
"localized_value": {"key": "localizedValue", "type": "str"},
"value": {"key": "value", "type": "str"},
}
def __init__(self, **kwargs):
""" """
super().__init__(**kwargs)
self.localized_value = None
self.value = None
class VpnClientConfiguration(_serialization.Model):
"""VpnClientConfiguration for P2S client.
:ivar vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:vartype vpn_client_address_pool: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:ivar vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway.
:vartype vpn_client_root_certificates:
list[~azure.mgmt.network.v2017_03_01.models.VpnClientRootCertificate]
:ivar vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network gateway.
:vartype vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2017_03_01.models.VpnClientRevokedCertificate]
"""
_attribute_map = {
"vpn_client_address_pool": {"key": "vpnClientAddressPool", "type": "AddressSpace"},
"vpn_client_root_certificates": {"key": "vpnClientRootCertificates", "type": "[VpnClientRootCertificate]"},
"vpn_client_revoked_certificates": {
"key": "vpnClientRevokedCertificates",
"type": "[VpnClientRevokedCertificate]",
},
}
def __init__(
self,
*,
vpn_client_address_pool: Optional["_models.AddressSpace"] = None,
vpn_client_root_certificates: Optional[List["_models.VpnClientRootCertificate"]] = None,
vpn_client_revoked_certificates: Optional[List["_models.VpnClientRevokedCertificate"]] = None,
**kwargs
):
"""
:keyword vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:paramtype vpn_client_address_pool: ~azure.mgmt.network.v2017_03_01.models.AddressSpace
:keyword vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway.
:paramtype vpn_client_root_certificates:
list[~azure.mgmt.network.v2017_03_01.models.VpnClientRootCertificate]
:keyword vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network
gateway.
:paramtype vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2017_03_01.models.VpnClientRevokedCertificate]
"""
super().__init__(**kwargs)
self.vpn_client_address_pool = vpn_client_address_pool
self.vpn_client_root_certificates = vpn_client_root_certificates
self.vpn_client_revoked_certificates = vpn_client_revoked_certificates
class VpnClientParameters(_serialization.Model):
"""Vpn Client Parameters for package generation.
All required parameters must be populated in order to send to Azure.
:ivar processor_architecture: VPN client Processor Architecture. Possible values are: 'AMD64'
and 'X86'. Required. Known values are: "Amd64" and "X86".
:vartype processor_architecture: str or
~azure.mgmt.network.v2017_03_01.models.ProcessorArchitecture
"""
_validation = {
"processor_architecture": {"required": True},
}
_attribute_map = {
"processor_architecture": {"key": "processorArchitecture", "type": "str"},
}
def __init__(self, *, processor_architecture: Union[str, "_models.ProcessorArchitecture"], **kwargs):
"""
:keyword processor_architecture: VPN client Processor Architecture. Possible values are:
'AMD64' and 'X86'. Required. Known values are: "Amd64" and "X86".
:paramtype processor_architecture: str or
~azure.mgmt.network.v2017_03_01.models.ProcessorArchitecture
"""
super().__init__(**kwargs)
self.processor_architecture = processor_architecture
class VpnClientRevokedCertificate(SubResource):
"""VPN client revoked certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar thumbprint: The revoked VPN client certificate thumbprint.
:vartype thumbprint: str
:ivar provisioning_state: The provisioning state of the VPN client revoked certificate
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"thumbprint": {"key": "properties.thumbprint", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
thumbprint: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword thumbprint: The revoked VPN client certificate thumbprint.
:paramtype thumbprint: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.thumbprint = thumbprint
self.provisioning_state = None
class VpnClientRootCertificate(SubResource):
"""VPN client root certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar public_cert_data: The certificate public data. Required.
:vartype public_cert_data: str
:ivar provisioning_state: The provisioning state of the VPN client root certificate resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
"""
_validation = {
"public_cert_data": {"required": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"public_cert_data": {"key": "properties.publicCertData", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
public_cert_data: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
etag: Optional[str] = None,
**kwargs
):
"""
:keyword id: Resource ID.
:paramtype id: str
:keyword name: The name of the resource that is unique within a resource group. This name can
be used to access the resource.
:paramtype name: str
:keyword etag: A unique read-only string that changes whenever the resource is updated.
:paramtype etag: str
:keyword public_cert_data: The certificate public data. Required.
:paramtype public_cert_data: str
"""
super().__init__(id=id, **kwargs)
self.name = name
self.etag = etag
self.public_cert_data = public_cert_data
self.provisioning_state = None
|
{
"content_hash": "3fd01c4f22df66b1b29d387d04f7ede2",
"timestamp": "",
"source": "github",
"line_count": 9650,
"max_line_length": 540,
"avg_line_length": 45.208082901554405,
"alnum_prop": 0.6532350123092299,
"repo_name": "Azure/azure-sdk-for-python",
"id": "5d6318e3ecf679f4e2d142d8c3528771a8eff912",
"size": "436759",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/_models_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from functools import partial
from typing import (
Any,
Collection,
DefaultDict,
Dict,
List,
Mapping,
Optional,
Tuple,
Union,
cast,
)
from ..language import (
DirectiveDefinitionNode,
DirectiveLocation,
DocumentNode,
EnumTypeDefinitionNode,
EnumTypeExtensionNode,
EnumValueDefinitionNode,
FieldDefinitionNode,
InputObjectTypeDefinitionNode,
InputObjectTypeExtensionNode,
InputValueDefinitionNode,
InterfaceTypeDefinitionNode,
InterfaceTypeExtensionNode,
ListTypeNode,
NamedTypeNode,
NonNullTypeNode,
ObjectTypeDefinitionNode,
ObjectTypeExtensionNode,
OperationType,
ScalarTypeDefinitionNode,
ScalarTypeExtensionNode,
SchemaDefinitionNode,
SchemaExtensionNode,
TypeDefinitionNode,
TypeExtensionNode,
TypeNode,
UnionTypeDefinitionNode,
UnionTypeExtensionNode,
)
from ..pyutils import inspect, merge_kwargs
from ..type import (
GraphQLArgument,
GraphQLArgumentMap,
GraphQLDeprecatedDirective,
GraphQLDirective,
GraphQLEnumType,
GraphQLEnumValue,
GraphQLEnumValueMap,
GraphQLField,
GraphQLFieldMap,
GraphQLInputField,
GraphQLInputFieldMap,
GraphQLInputObjectType,
GraphQLInputType,
GraphQLInterfaceType,
GraphQLList,
GraphQLNamedType,
GraphQLNonNull,
GraphQLNullableType,
GraphQLObjectType,
GraphQLOutputType,
GraphQLScalarType,
GraphQLSchema,
GraphQLSchemaKwargs,
GraphQLSpecifiedByDirective,
GraphQLType,
GraphQLUnionType,
assert_schema,
introspection_types,
is_enum_type,
is_input_object_type,
is_interface_type,
is_introspection_type,
is_list_type,
is_non_null_type,
is_object_type,
is_scalar_type,
is_specified_directive,
is_specified_scalar_type,
is_union_type,
specified_scalar_types,
)
from .value_from_ast import value_from_ast
__all__ = [
"extend_schema",
"ExtendSchemaImpl",
]
def extend_schema(
schema: GraphQLSchema,
document_ast: DocumentNode,
assume_valid: bool = False,
assume_valid_sdl: bool = False,
) -> GraphQLSchema:
"""Extend the schema with extensions from a given document.
Produces a new schema given an existing schema and a document which may contain
GraphQL type extensions and definitions. The original schema will remain unaltered.
Because a schema represents a graph of references, a schema cannot be extended
without effectively making an entire copy. We do not know until it's too late if
subgraphs remain unchanged.
This algorithm copies the provided schema, applying extensions while producing the
copy. The original schema remains unaltered.
When extending a schema with a known valid extension, it might be safe to assume the
schema is valid. Set ``assume_valid`` to ``True`` to assume the produced schema is
valid. Set ``assume_valid_sdl`` to ``True`` to assume it is already a valid SDL
document.
"""
assert_schema(schema)
if not isinstance(document_ast, DocumentNode):
raise TypeError("Must provide valid Document AST.")
if not (assume_valid or assume_valid_sdl):
from ..validation.validate import assert_valid_sdl_extension
assert_valid_sdl_extension(document_ast, schema)
schema_kwargs = schema.to_kwargs()
extended_kwargs = ExtendSchemaImpl.extend_schema_args(
schema_kwargs, document_ast, assume_valid
)
return (
schema if schema_kwargs is extended_kwargs else GraphQLSchema(**extended_kwargs)
)
class ExtendSchemaImpl:
"""Helper class implementing the methods to extend a schema.
Note: We use a class instead of an implementation with local functions
and lambda functions so that the extended schema can be pickled.
For internal use only.
"""
type_map: Dict[str, GraphQLNamedType]
type_extensions_map: Dict[str, Any]
def __init__(self, type_extensions_map: Dict[str, Any]):
self.type_map = {}
self.type_extensions_map = type_extensions_map
@classmethod
def extend_schema_args(
cls,
schema_kwargs: GraphQLSchemaKwargs,
document_ast: DocumentNode,
assume_valid: bool = False,
) -> GraphQLSchemaKwargs:
"""Extend the given schema arguments with extensions from a given document.
For internal use only.
"""
# Note: schema_kwargs should become a TypedDict once we require Python 3.8
# Collect the type definitions and extensions found in the document.
type_defs: List[TypeDefinitionNode] = []
type_extensions_map: DefaultDict[str, Any] = defaultdict(list)
# New directives and types are separate because a directives and types can have
# the same name. For example, a type named "skip".
directive_defs: List[DirectiveDefinitionNode] = []
schema_def: Optional[SchemaDefinitionNode] = None
# Schema extensions are collected which may add additional operation types.
schema_extensions: List[SchemaExtensionNode] = []
for def_ in document_ast.definitions:
if isinstance(def_, SchemaDefinitionNode):
schema_def = def_
elif isinstance(def_, SchemaExtensionNode):
schema_extensions.append(def_)
elif isinstance(def_, TypeDefinitionNode):
type_defs.append(def_)
elif isinstance(def_, TypeExtensionNode):
extended_type_name = def_.name.value
type_extensions_map[extended_type_name].append(def_)
elif isinstance(def_, DirectiveDefinitionNode):
directive_defs.append(def_)
# If this document contains no new types, extensions, or directives then return
# the same unmodified GraphQLSchema instance.
if (
not type_extensions_map
and not type_defs
and not directive_defs
and not schema_extensions
and not schema_def
):
return schema_kwargs
self = cls(type_extensions_map)
for existing_type in schema_kwargs["types"] or ():
self.type_map[existing_type.name] = self.extend_named_type(existing_type)
for type_node in type_defs:
name = type_node.name.value
self.type_map[name] = std_type_map.get(name) or self.build_type(type_node)
# Get the extended root operation types.
operation_types: Dict[OperationType, GraphQLNamedType] = {}
for operation_type in OperationType:
original_type = schema_kwargs[operation_type.value]
if original_type:
operation_types[operation_type] = self.replace_named_type(original_type)
# Then, incorporate schema definition and all schema extensions.
if schema_def:
operation_types.update(self.get_operation_types([schema_def]))
if schema_extensions:
operation_types.update(self.get_operation_types(schema_extensions))
# Then produce and return the kwargs for a Schema with these types.
get_operation = operation_types.get
return GraphQLSchemaKwargs(
query=get_operation(OperationType.QUERY), # type: ignore
mutation=get_operation(OperationType.MUTATION), # type: ignore
subscription=get_operation(OperationType.SUBSCRIPTION), # type: ignore
types=tuple(self.type_map.values()),
directives=tuple(
self.replace_directive(directive)
for directive in schema_kwargs["directives"]
)
+ tuple(self.build_directive(directive) for directive in directive_defs),
description=schema_def.description.value
if schema_def and schema_def.description
else None,
extensions={},
ast_node=schema_def or schema_kwargs["ast_node"],
extension_ast_nodes=schema_kwargs["extension_ast_nodes"]
+ tuple(schema_extensions),
assume_valid=assume_valid,
)
# noinspection PyTypeChecker,PyUnresolvedReferences
def replace_type(self, type_: GraphQLType) -> GraphQLType:
if is_list_type(type_):
return GraphQLList(self.replace_type(type_.of_type))
if is_non_null_type(type_):
return GraphQLNonNull(self.replace_type(type_.of_type)) # type: ignore
return self.replace_named_type(type_) # type: ignore
def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType:
# Note: While this could make early assertions to get the correctly
# typed values below, that would throw immediately while type system
# validation with validate_schema() will produce more actionable results.
return self.type_map[type_.name]
# noinspection PyShadowingNames
def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective:
if is_specified_directive(directive):
# Builtin directives are not extended.
return directive
kwargs = directive.to_kwargs()
return GraphQLDirective(
**merge_kwargs(
kwargs,
args={
name: self.extend_arg(arg) for name, arg in kwargs["args"].items()
},
)
)
def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType:
if is_introspection_type(type_) or is_specified_scalar_type(type_):
# Builtin types are not extended.
return type_
if is_scalar_type(type_):
return self.extend_scalar_type(type_)
if is_object_type(type_):
return self.extend_object_type(type_)
if is_interface_type(type_):
return self.extend_interface_type(type_)
if is_union_type(type_):
return self.extend_union_type(type_)
if is_enum_type(type_):
return self.extend_enum_type(type_)
if is_input_object_type(type_):
return self.extend_input_object_type(type_)
# Not reachable. All possible types have been considered.
raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover
def extend_input_object_type_fields(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> GraphQLInputFieldMap:
return {
**{
name: GraphQLInputField(
**merge_kwargs(
field.to_kwargs(),
type_=self.replace_type(field.type),
)
)
for name, field in kwargs["fields"].items()
},
**self.build_input_field_map(extensions),
}
# noinspection PyShadowingNames
def extend_input_object_type(
self,
type_: GraphQLInputObjectType,
) -> GraphQLInputObjectType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
return GraphQLInputObjectType(
**merge_kwargs(
kwargs,
fields=partial(
self.extend_input_object_type_fields, kwargs, extensions
),
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
)
)
def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
return GraphQLEnumType(
**merge_kwargs(
kwargs,
values={**kwargs["values"], **self.build_enum_value_map(extensions)},
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
)
)
def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
specified_by_url = kwargs["specified_by_url"]
for extension_node in extensions:
specified_by_url = get_specified_by_url(extension_node) or specified_by_url
return GraphQLScalarType(
**merge_kwargs(
kwargs,
specified_by_url=specified_by_url,
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
)
)
def extend_object_type_interfaces(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> List[GraphQLInterfaceType]:
return [
cast(GraphQLInterfaceType, self.replace_named_type(interface))
for interface in kwargs["interfaces"]
] + self.build_interfaces(extensions)
def extend_object_type_fields(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> GraphQLFieldMap:
return {
**{
name: self.extend_field(field)
for name, field in kwargs["fields"].items()
},
**self.build_field_map(extensions),
}
# noinspection PyShadowingNames
def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
return GraphQLObjectType(
**merge_kwargs(
kwargs,
interfaces=partial(
self.extend_object_type_interfaces, kwargs, extensions
),
fields=partial(self.extend_object_type_fields, kwargs, extensions),
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
)
)
def extend_interface_type_interfaces(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> List[GraphQLInterfaceType]:
return [
cast(GraphQLInterfaceType, self.replace_named_type(interface))
for interface in kwargs["interfaces"]
] + self.build_interfaces(extensions)
def extend_interface_type_fields(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> GraphQLFieldMap:
return {
**{
name: self.extend_field(field)
for name, field in kwargs["fields"].items()
},
**self.build_field_map(extensions),
}
# noinspection PyShadowingNames
def extend_interface_type(
self, type_: GraphQLInterfaceType
) -> GraphQLInterfaceType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
return GraphQLInterfaceType(
**merge_kwargs(
kwargs,
interfaces=partial(
self.extend_interface_type_interfaces, kwargs, extensions
),
fields=partial(self.extend_interface_type_fields, kwargs, extensions),
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
)
)
def extend_union_type_types(
self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...]
) -> List[GraphQLObjectType]:
return [
cast(GraphQLObjectType, self.replace_named_type(member_type))
for member_type in kwargs["types"]
] + self.build_union_types(extensions)
def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType:
kwargs = type_.to_kwargs()
extensions = tuple(self.type_extensions_map[kwargs["name"]])
return GraphQLUnionType(
**merge_kwargs(
kwargs,
types=partial(self.extend_union_type_types, kwargs, extensions),
extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions,
),
)
# noinspection PyShadowingNames
def extend_field(self, field: GraphQLField) -> GraphQLField:
return GraphQLField(
**merge_kwargs(
field.to_kwargs(),
type_=self.replace_type(field.type),
args={name: self.extend_arg(arg) for name, arg in field.args.items()},
)
)
def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument:
return GraphQLArgument(
**merge_kwargs(
arg.to_kwargs(),
type_=self.replace_type(arg.type),
)
)
# noinspection PyShadowingNames
def get_operation_types(
self, nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]]
) -> Dict[OperationType, GraphQLNamedType]:
# Note: While this could make early assertions to get the correctly
# typed values below, that would throw immediately while type system
# validation with validate_schema() will produce more actionable results.
return {
operation_type.operation: self.get_named_type(operation_type.type)
for node in nodes
for operation_type in node.operation_types or []
}
# noinspection PyShadowingNames
def get_named_type(self, node: NamedTypeNode) -> GraphQLNamedType:
name = node.name.value
type_ = std_type_map.get(name) or self.type_map.get(name)
if not type_:
raise TypeError(f"Unknown type: '{name}'.")
return type_
def get_wrapped_type(self, node: TypeNode) -> GraphQLType:
if isinstance(node, ListTypeNode):
return GraphQLList(self.get_wrapped_type(node.type))
if isinstance(node, NonNullTypeNode):
return GraphQLNonNull(
cast(GraphQLNullableType, self.get_wrapped_type(node.type))
)
return self.get_named_type(cast(NamedTypeNode, node))
def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective:
locations = [DirectiveLocation[node.value] for node in node.locations]
return GraphQLDirective(
name=node.name.value,
description=node.description.value if node.description else None,
locations=locations,
is_repeatable=node.repeatable,
args=self.build_argument_map(node.arguments),
ast_node=node,
)
def build_field_map(
self,
nodes: Collection[
Union[
InterfaceTypeDefinitionNode,
InterfaceTypeExtensionNode,
ObjectTypeDefinitionNode,
ObjectTypeExtensionNode,
]
],
) -> GraphQLFieldMap:
field_map: GraphQLFieldMap = {}
for node in nodes:
for field in node.fields or []:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
field_map[field.name.value] = GraphQLField(
type_=cast(GraphQLOutputType, self.get_wrapped_type(field.type)),
description=field.description.value if field.description else None,
args=self.build_argument_map(field.arguments),
deprecation_reason=get_deprecation_reason(field),
ast_node=field,
)
return field_map
def build_argument_map(
self,
args: Optional[Collection[InputValueDefinitionNode]],
) -> GraphQLArgumentMap:
arg_map: GraphQLArgumentMap = {}
for arg in args or []:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
type_ = cast(GraphQLInputType, self.get_wrapped_type(arg.type))
arg_map[arg.name.value] = GraphQLArgument(
type_=type_,
description=arg.description.value if arg.description else None,
default_value=value_from_ast(arg.default_value, type_),
deprecation_reason=get_deprecation_reason(arg),
ast_node=arg,
)
return arg_map
def build_input_field_map(
self,
nodes: Collection[
Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode]
],
) -> GraphQLInputFieldMap:
input_field_map: GraphQLInputFieldMap = {}
for node in nodes:
for field in node.fields or []:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
type_ = cast(GraphQLInputType, self.get_wrapped_type(field.type))
input_field_map[field.name.value] = GraphQLInputField(
type_=type_,
description=field.description.value if field.description else None,
default_value=value_from_ast(field.default_value, type_),
deprecation_reason=get_deprecation_reason(field),
ast_node=field,
)
return input_field_map
@staticmethod
def build_enum_value_map(
nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]]
) -> GraphQLEnumValueMap:
enum_value_map: GraphQLEnumValueMap = {}
for node in nodes:
for value in node.values or []:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
value_name = value.name.value
enum_value_map[value_name] = GraphQLEnumValue(
value=value_name,
description=value.description.value if value.description else None,
deprecation_reason=get_deprecation_reason(value),
ast_node=value,
)
return enum_value_map
def build_interfaces(
self,
nodes: Collection[
Union[
InterfaceTypeDefinitionNode,
InterfaceTypeExtensionNode,
ObjectTypeDefinitionNode,
ObjectTypeExtensionNode,
]
],
) -> List[GraphQLInterfaceType]:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
return [
cast(GraphQLInterfaceType, self.get_named_type(type_))
for node in nodes
for type_ in node.interfaces or []
]
def build_union_types(
self,
nodes: Collection[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]],
) -> List[GraphQLObjectType]:
# Note: While this could make assertions to get the correctly typed
# value, that would throw immediately while type system validation
# with validate_schema() will produce more actionable results.
return [
cast(GraphQLObjectType, self.get_named_type(type_))
for node in nodes
for type_ in node.types or []
]
def build_object_type(
self, ast_node: ObjectTypeDefinitionNode
) -> GraphQLObjectType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [
ast_node,
*extension_nodes,
]
return GraphQLObjectType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
interfaces=partial(self.build_interfaces, all_nodes),
fields=partial(self.build_field_map, all_nodes),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_interface_type(
self,
ast_node: InterfaceTypeDefinitionNode,
) -> GraphQLInterfaceType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
all_nodes: List[
Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode]
] = [ast_node, *extension_nodes]
return GraphQLInterfaceType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
interfaces=partial(self.build_interfaces, all_nodes),
fields=partial(self.build_field_map, all_nodes),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [
ast_node,
*extension_nodes,
]
return GraphQLEnumType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
values=self.build_enum_value_map(all_nodes),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [
ast_node,
*extension_nodes,
]
return GraphQLUnionType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
types=partial(self.build_union_types, all_nodes),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_scalar_type(
self, ast_node: ScalarTypeDefinitionNode
) -> GraphQLScalarType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
return GraphQLScalarType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
specified_by_url=get_specified_by_url(ast_node),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_input_object_type(
self,
ast_node: InputObjectTypeDefinitionNode,
) -> GraphQLInputObjectType:
extension_nodes = self.type_extensions_map[ast_node.name.value]
all_nodes: List[
Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode]
] = [ast_node, *extension_nodes]
return GraphQLInputObjectType(
name=ast_node.name.value,
description=ast_node.description.value if ast_node.description else None,
fields=partial(self.build_input_field_map, all_nodes),
ast_node=ast_node,
extension_ast_nodes=extension_nodes,
)
def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType:
kind = ast_node.kind
try:
kind = kind.removesuffix("_definition")
except AttributeError: # pragma: no cover (Python < 3.9)
if kind.endswith("_definition"):
kind = kind[:-11]
try:
build = getattr(self, f"build_{kind}")
except AttributeError: # pragma: no cover
# Not reachable. All possible type definition nodes have been considered.
raise TypeError( # pragma: no cover
f"Unexpected type definition node: {inspect(ast_node)}."
)
return build(ast_node)
std_type_map: Mapping[str, Union[GraphQLNamedType, GraphQLObjectType]] = {
**specified_scalar_types,
**introspection_types,
}
def get_deprecation_reason(
node: Union[EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode]
) -> Optional[str]:
"""Given a field or enum value node, get deprecation reason as string."""
from ..execution import get_directive_values
deprecated = get_directive_values(GraphQLDeprecatedDirective, node)
return deprecated["reason"] if deprecated else None
def get_specified_by_url(
node: Union[ScalarTypeDefinitionNode, ScalarTypeExtensionNode]
) -> Optional[str]:
"""Given a scalar node, return the string value for the specifiedByURL."""
from ..execution import get_directive_values
specified_by_url = get_directive_values(GraphQLSpecifiedByDirective, node)
return specified_by_url["url"] if specified_by_url else None
|
{
"content_hash": "7f7cfd8c7ee26c30e45f610d76ef7a5a",
"timestamp": "",
"source": "github",
"line_count": 759,
"max_line_length": 88,
"avg_line_length": 38.179183135704875,
"alnum_prop": 0.6214024432327973,
"repo_name": "graphql-python/graphql-core",
"id": "445f0c030d4c2415aa51dca08b5ee28168d1afcd",
"size": "28978",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/graphql/utilities/extend_schema.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2235538"
}
],
"symlink_target": ""
}
|
import logging
import uuid
from datetime import date, datetime, timedelta
from django.conf import settings
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.translation import gettext
from celery import chord
from corehq.apps.users.role_utils import initialize_domain_with_default_roles
from corehq.util.soft_assert import soft_assert
from dimagi.utils.couch import CriticalSection
from dimagi.utils.couch.database import get_safe_write_kwargs
from dimagi.utils.name_to_url import name_to_url
from dimagi.utils.web import get_ip, get_url_base, get_static_url_prefix
from corehq.apps.accounting.models import (
DEFAULT_ACCOUNT_FORMAT,
BillingAccount,
BillingAccountType,
BillingContactInfo,
Currency,
DefaultProductPlan,
PreOrPostPay,
SoftwarePlanEdition,
Subscription,
SubscriptionAdjustmentMethod,
SubscriptionType,
)
from corehq.apps.accounting.utils.subscription import ensure_community_or_paused_subscription
from corehq.apps.analytics.tasks import (
HUBSPOT_CREATED_NEW_PROJECT_SPACE_FORM_ID,
send_hubspot_form,
)
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_html_email_async, send_mail_async
from corehq.apps.registration.models import RegistrationRequest
from corehq.apps.registration.tasks import send_domain_registration_email
from corehq.apps.users.models import CouchUser, WebUser
from corehq.util.view_utils import absolute_reverse
APPCUES_APP_SLUGS = ['health', 'agriculture', 'wash']
_soft_assert_registration_issues = soft_assert(
to=[
'{}@{}'.format(name, 'dimagi.com')
for name in ['biyeun']
],
exponential_backoff=False,
)
def activate_new_user_via_reg_form(form, created_by, created_via, is_domain_admin=False, domain=None, ip=None):
full_name = form.cleaned_data['full_name']
new_user = activate_new_user(
username=form.cleaned_data['email'],
password=form.cleaned_data['password'],
created_by=created_by,
created_via=created_via,
first_name=full_name[0],
last_name=full_name[1],
is_domain_admin=is_domain_admin,
domain=domain,
ip=ip,
atypical_user=form.cleaned_data.get('atypical_user', False),
)
return new_user
def activate_new_user(
username, password, created_by, created_via, first_name=None, last_name=None,
is_domain_admin=False, domain=None, ip=None, atypical_user=False
):
now = datetime.utcnow()
new_user = WebUser.create(
domain,
username,
password,
created_by,
created_via,
is_admin=is_domain_admin,
by_domain_required_for_log=bool(domain),
)
new_user.first_name = first_name
new_user.last_name = last_name
new_user.email = username
new_user.subscribed_to_commcare_users = False
new_user.eula.signed = True
new_user.eula.date = now
new_user.eula.type = 'End User License Agreement'
if ip:
new_user.eula.user_ip = ip
new_user.is_staff = False # Can't log in to admin site
new_user.is_active = True
new_user.is_superuser = False
new_user.last_login = now
new_user.date_joined = now
new_user.last_password_set = now
new_user.atypical_user = atypical_user
new_user.save()
return new_user
def request_new_domain(request, project_name, is_new_user=True, is_new_sso_user=False):
now = datetime.utcnow()
current_user = CouchUser.from_django_user(request.user, strict=True)
dom_req = RegistrationRequest()
if is_new_user:
dom_req.request_time = now
dom_req.request_ip = get_ip(request)
dom_req.activation_guid = uuid.uuid1().hex
name = name_to_url(project_name, "project")
with CriticalSection(['request_domain_name_{}'.format(name)]):
name = Domain.generate_name(name)
new_domain = Domain(
name=name,
hr_name=project_name,
is_active=False,
date_created=datetime.utcnow(),
creating_user=current_user.username,
secure_submissions=True,
first_domain_for_user=is_new_user
)
# Avoid projects created by dimagi.com staff members as self started
new_domain.internal.self_started = not current_user.is_dimagi
if not is_new_user or is_new_sso_user:
new_domain.is_active = True
# ensure no duplicate domain documents get created on cloudant
new_domain.save(**get_safe_write_kwargs())
if not new_domain.name:
new_domain.name = new_domain._id
new_domain.save() # we need to get the name from the _id
dom_req.domain = new_domain.name
if not settings.ENTERPRISE_MODE:
_setup_subscription(new_domain.name, current_user)
initialize_domain_with_default_roles(new_domain.name)
if request.user.is_authenticated:
if not current_user:
current_user = WebUser()
current_user.sync_from_django_user(request.user)
current_user.save()
current_user.add_domain_membership(new_domain.name, is_admin=True)
current_user.save()
dom_req.requesting_user_username = request.user.username
dom_req.new_user_username = request.user.username
elif is_new_user:
_soft_assert_registration_issues(
f"A new user {request.user.username} was not added to their domain "
f"{new_domain.name} during registration"
)
if is_new_user and not is_new_sso_user:
dom_req.save()
if settings.IS_SAAS_ENVIRONMENT:
# Load template apps to the user's new domain in parallel
from corehq.apps.app_manager.tasks import load_appcues_template_app
header = [
load_appcues_template_app.si(new_domain.name, current_user.username, slug)
for slug in APPCUES_APP_SLUGS
]
callback = send_domain_registration_email.si(
request.user.email,
dom_req.domain,
dom_req.activation_guid,
request.user.get_full_name(),
request.user.first_name
)
chord(header)(callback)
else:
send_domain_registration_email(request.user.email,
dom_req.domain,
dom_req.activation_guid,
request.user.get_full_name(),
request.user.first_name)
send_new_request_update_email(
request.user,
get_ip(request),
new_domain.name,
is_new_user=is_new_user,
is_new_sso_user=is_new_sso_user
)
send_hubspot_form(HUBSPOT_CREATED_NEW_PROJECT_SPACE_FORM_ID, request)
return new_domain.name
def _setup_subscription(domain_name, user):
with transaction.atomic():
ensure_community_or_paused_subscription(
domain_name, date.today(), SubscriptionAdjustmentMethod.USER,
web_user=user.username,
)
# add user's email as contact email for billing account for the domain
account = BillingAccount.get_account_by_domain(domain_name)
billing_contact, _ = BillingContactInfo.objects.get_or_create(account=account)
billing_contact.email_list = [user.email]
billing_contact.save()
def send_new_request_update_email(user, requesting_ip, entity_name, entity_type="domain",
is_new_user=False, is_confirming=False, is_new_sso_user=False):
entity_texts = {"domain": ["project space", "Project"],
"org": ["organization", "Organization"]}[entity_type]
if is_new_sso_user:
message = f"A new SSO user just requested a {entity_texts[0]} called {entity_name}."
elif is_confirming:
message = "A (basically) brand new user just confirmed his/her account. The %s requested was %s." % (entity_texts[0], entity_name)
elif is_new_user:
message = "A brand new user just requested a %s called %s." % (entity_texts[0], entity_name)
else:
message = "An existing user just created a new %s called %s." % (entity_texts[0], entity_name)
message = """%s
Details include...
Username: %s
IP Address: %s
You can view the %s here: %s""" % (
message,
user.username,
requesting_ip,
entity_texts[0],
get_url_base() + "/%s/%s/" % ("o" if entity_type == "org" else "a", entity_name))
try:
recipients = settings.NEW_DOMAIN_RECIPIENTS
send_mail_async.delay(
"New %s: %s" % (entity_texts[0], entity_name),
message, settings.SERVER_EMAIL, recipients
)
except Exception:
logging.warning("Can't send email, but the message was:\n%s" % message)
def send_mobile_experience_reminder(recipient, full_name):
url = absolute_reverse("login")
params = {
"full_name": full_name,
"url": url,
'url_prefix': get_static_url_prefix(),
}
message_plaintext = render_to_string(
'registration/email/mobile_signup_reminder.txt', params)
message_html = render_to_string(
'registration/email/mobile_signup_reminder.html', params)
subject = gettext('Visit CommCareHQ on your computer!')
try:
send_html_email_async.delay(subject, recipient, message_html,
text_content=message_plaintext,
email_from=settings.DEFAULT_FROM_EMAIL)
except Exception:
logging.warning(
"Can't send email, but the message was:\n%s" % message_plaintext)
raise
|
{
"content_hash": "939f88589c892f209a91ade53a98721f",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 138,
"avg_line_length": 35.73529411764706,
"alnum_prop": 0.6385802469135803,
"repo_name": "dimagi/commcare-hq",
"id": "d368e7f8e830e6c27e10c48239496f3172670cd1",
"size": "9720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/registration/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
}
|
from .setup import TestPygletGUI
from pyglet_gui.core import Viewer
from pyglet_gui.manager import Manager
from pyglet_gui.containers import HorizontalContainer
class TestHorizontalContainer(TestPygletGUI):
"""
This test case tests basic functionality of
an horizontal container.
"""
def setUp(self):
super(TestHorizontalContainer, self).setUp()
self.container = HorizontalContainer([Viewer(width=50, height=50),
Viewer(width=50, height=50)])
self.manager = Manager(self.container, window=self.window, batch=self.batch, theme=self.theme)
def _test_content_position(self):
"""
Tests the position of the two widgets within the container.
"""
# first widget x is the left x (container.x)
self.assertEqual(self.container.content[0].x,
self.container.x)
# second widget x is the left x (container.x + container.content[0].width)
# plus the padding (self.container.padding)
self.assertEqual(self.container.content[1].x,
self.container.x + self.container.content[0].width
+ self.container.padding)
def test_top_down_draw(self):
"""
Tests that the manager's size was set according to the child size.
"""
# manager size is correct
self.assertEqual(self.manager.width, 100 + self.container.padding)
self.assertEqual(self.manager.height, 50)
# widget is centered in the window
self.assertEqual(self.container.x, self.window.width//2 - self.container.width//2)
self.assertEqual(self.container.y, self.window.height//2 - self.container.height//2)
self._test_content_position()
def test_bottom_up_draw(self):
"""
Tests that the manager's size is modified
if we set a new size to the widget.
"""
self.container.content[0].width = 60
self.container.content[0].height = 60
self.container.content[0].parent.reset_size()
# manager width was set
self.assertEqual(self.manager.width, 110 + self.container.padding)
# container height was set
self.assertEqual(self.container.height, 60)
# container and manager were re-centered in the window
self.assertEqual(self.container.x, self.window.width//2 - self.container.width//2)
self.assertEqual(self.manager.y, self.window.height//2 - self.manager.height//2)
self._test_content_position()
def test_add_widget(self):
self.container.add(Viewer(width=50, height=50))
self.assertEqual(self.manager.width, 150 + 2*self.container.padding)
self.assertEqual(self.manager.height, 50)
self._test_content_position()
def test_remove_widget(self):
self.container.remove(self.container.content[0])
self.assertEqual(self.manager.width, 50 + self.container.padding)
self.assertEqual(self.manager.height, 50)
def tearDown(self):
self.manager.delete()
super(TestHorizontalContainer, self).tearDown()
if __name__ == "__main__":
import unittest
unittest.main()
|
{
"content_hash": "48ba7b9be1f79fffad0fa6eff2ece47e",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 102,
"avg_line_length": 35.74444444444445,
"alnum_prop": 0.642524090767796,
"repo_name": "jorgecarleitao/pyglet-gui",
"id": "ac9a217de315550dadc9bf781189e4c76fd16f7d",
"size": "3217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_horizontal_container.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "134819"
}
],
"symlink_target": ""
}
|
import math
import tensorflow as tf
import numpy as np
from utils import tf_mean_std_normalize
from tensorflow.python.framework.tensor_shape import TensorShape
class HolographicMemory:
def __init__(self, sess, input_size, num_models, seed=None,
complex_normalize=True, l2_normalize=False, use_fft_method=True):
self.sess = sess
self.input_size = input_size
self.num_models = num_models
self.complex_normalize = complex_normalize
self.l2_normalize = l2_normalize
self.conv_func = HolographicMemory.fft_circ_conv1d if use_fft_method \
else HolographicMemory.circ_conv1d
# Perm dimensions are: num_models * [num_features x num_features]
# Variables are used to store the results of the random values
# as they need to be the same during recovery
# self.perms = tf.pack([tf.Variable(self.create_permutation_matrix(input_size, seed+i if seed else None),
# trainable=False, name="perm_%d" % i)
# for i in range(num_models)])
self.perms = [self.create_permutation_matrix(input_size, seed+i if seed else None)
for i in range(num_models)]
# Gather ND method
# np.random.seed(seed if seed else None)
# self.perms = [np.random.permutation(input_size) for _ in range(num_models)]
# print 'perms = ', len(self.perms)
# Random_Shuffle method
# np.random.seed(seed if seed else None)
# self.perms = [np.random.randint(9999999) for _ in range(num_models)]
# print 'perms = ', len(self.perms)
@staticmethod
def _get_batch_perms(batch_size, perms):
num_models = len(perms)
input_size = perms[0].shape[0]
perms_expanded = np.array([np.tile(p, batch_size) for p in perms]).flatten()
print 'perms_expanded = ', perms_expanded, '| len = ', len(perms_expanded)
x_inds = np.array(([[i]*input_size for i in range(batch_size)]*num_models)).flatten()
return [[x, y] for x,y in zip(x_inds, perms_expanded)]
'''
Helper to decay the memories
'''
def update_hebb_weights(self, A, x, gamma=0.9):
return gamma*A + tf.matmul(tf.transpose(x), x)
'''
Get complex mod of a real vector
'''
@staticmethod
def complex_mod_of_real(x):
xshp = x.get_shape().as_list()
assert xshp[1] % 2 == 0
xcplx = tf.complex(x[:, 0:xshp[1]/2], x[:, xshp[1]/2:])
return tf.complex_abs(xcplx)
'''
Helper to validate that all the keys have complex mod of 1.0
'''
def verify_key_mod(self, keys, print_l2=False):
ops = []
for k in [HolographicMemory.complex_mod_of_real(k) for k in keys]:
ops.append(tf.nn.l2_loss(k - tf.ones_like(k)))
keys_l2 = self.sess.run(ops)
for l2, k in zip(keys_l2, keys):
assert l2 < 1e-9, "key [%s] is not normalized, l2 = %f \n%s" \
% (k, l2, str(self.sess.run(k)))
if print_l2:
print 'l2 = ', l2
print '|keys| ~= 1.0: verified'
'''
Normalizes real valued keys to have complex abs of 1.0
keys: f32/f64 list of keys of [1, input_size]
Returns: list of [1, input_size] f32/f64
'''
@staticmethod
def normalize_real_by_complex_abs(keys):
input_size = keys.get_shape().as_list()[1]
assert input_size % 2 == 0, "input_size [%d] not divisible by 2" % input_size
keys_mag = tf.maximum(tf.sqrt(tf.square(keys[:, 0:input_size/2])
+ tf.square(keys[:, input_size/2:])),
1.0)
return keys / tf.concat(1, [keys_mag, keys_mag])
@staticmethod
def conj_real_by_complex(keys):
reversed = tf.reverse(keys, [False, True])
return tf.concat(1, [tf.expand_dims(keys[:, 0], 1),
reversed[:, 0:-1]])
'''
Accepts the already permuted keys and the data and encodes them
keys: [num_models, num_features]
X: [batch_size, num_features]
Returns: [num_models, num_features]
'''
@staticmethod
def circ_conv1d(X, keys, batch_size, num_copies, num_keys=None, conj=False):
if conj:
keys = HolographicMemory.conj_real_by_complex(keys)
# Get our original shapes
xshp = X.get_shape().as_list()
xshp[0] = batch_size if xshp[0] is None else xshp[0]
kshp = keys.get_shape().as_list()
kshp[0] = num_keys if num_keys is not None else kshp[0]
kshp[1] = xshp[1] if kshp[1] is None else kshp[1]
print 'X : ', xshp, ' | keys : ', kshp, ' | batch_size = ', batch_size
# Concatenate X & keys
num_dupes = kshp[0] / batch_size
print 'num dupes = ', num_dupes
xconcat = tf.tile(X, [num_dupes, 1]) \
if num_dupes > 1 else X
xspshp = xconcat.get_shape().as_list()
xspshp[0] = num_keys if xspshp[0] is None else xspshp[0]
print 'xconcatinated : ', xspshp
# The following computes all of the values individually, i.e
# [P0k0 * x0, P0k1 * x1 + ...]
# Input : [batch, in_width, in_channels]
# Filter : [filter_width, in_channels, out_channels]
# Result : [batch, out_width, out_channels]
conv = [tf.expand_dims(tf.squeeze(tf.nn.conv1d(tf.reshape(xconcat[i], [1, xspshp[1], 1]),
tf.reshape(keys[i], [kshp[1], 1, 1]),
stride=1,
padding='SAME')), 0) for i in range(kshp[0])]
conv = tf.concat(0, conv)
print 'conv = ', conv.get_shape().as_list()
# We now aggregate them as follows:
# c0 = P0k0 * x0 + P0k1 * x1 + ... P0k_batch * x_batch
# and do that for all the c's and store separately
#batch_size = xshp[0]
batch_iter = min(batch_size, xshp[0]) if xshp[0] is not None else batch_size
conv_concat = [tf.expand_dims(tf.reduce_mean(conv[begin:end], 0), 0)
for begin, end in zip(range(0, kshp[0], batch_iter),
range(batch_iter, kshp[0]+1, batch_iter))]
print 'conv concat = ', len(conv_concat), ' x ', conv_concat[0].get_shape().as_list()
# return a single concatenated tensor:
# C = [c0; c1; ...]
# C = tf.concat(0, conv_concat)
# return (1.0 / C.get_shape().as_list()[0]) * C
return tf.concat(0, conv_concat)
@staticmethod
def bound(x):
bound = tf.maximum(tf.sqrt(tf.mul(tf.real(x), tf.real(x)) \
+ tf.mul(tf.imag(x), tf.imag(x))),
1.0)
return tf.complex(tf.real(x) / bound, tf.imag(x) / bound)
'''
Does the entire operation within the frequency domain using
ffts and element-wise matrix multiplies followed by reductions
'''
@staticmethod
def fft_circ_conv1d(X, keys, batch_size, num_copies, num_keys=None, conj=False):
if conj:
keys = HolographicMemory.conj_real_by_complex(keys)
# Get our original shapes
xshp = X.get_shape().as_list()
kshp = keys.get_shape().as_list()
kshp[0] = num_keys if num_keys is not None else kshp[0]
kshp[1] = xshp[1] if kshp[1] is None else kshp[1]
print 'X : ', xshp, ' | keys : ', kshp, ' | batch_size = ', batch_size
# duplicate out input data by the ratio: number_keys / batch_size
# eg: |input| = [2, 784] ; |keys| = 3*[2, 784] ; (3 is the num_copies)
# |new_input| = 6/2 |input| = [input; input; input]
#
# At test: |memories| = [3, 784] ; |keys| = 3*[n, 784] ;
# |new_input| = 3n / 3 = n [where n is the number of desired parallel retrievals]
num_dupes = kshp[0] / batch_size
print 'num dupes = ', num_dupes
xcplx = HolographicMemory.split_to_complex(tf.tile(X, [num_dupes, 1]) \
if num_dupes > 1 else X)
xshp = xcplx.get_shape().as_list()
kcplx = HolographicMemory.split_to_complex(keys, kshp)
# Convolve & re-cast to a real valued function
unsplit_func = HolographicMemory.unsplit_from_complex_ri if not conj \
else HolographicMemory.unsplit_from_complex_ir
#fft_mul = HolographicMemory.bound(tf.mul(tf.fft(xcplx), tf.fft(kcplx)))
fft_mul = tf.mul(tf.fft(xcplx), tf.fft(kcplx))
conv = unsplit_func(tf.ifft(fft_mul))
print 'full conv = ', conv.get_shape().as_list()
batch_iter = min(batch_size, xshp[0]) if xshp[0] is not None else batch_size
print 'batch = ', batch_size, ' | num_copies = ', num_copies, '| num_keys = ', num_keys, \
'| xshp[0] = ', xshp[0], ' | len(keys) = ', kshp[0], ' | batch iter = ', batch_iter
conv_concat = [tf.expand_dims(tf.reduce_mean(conv[begin:end], 0), 0)
for begin, end in zip(range(0, kshp[0], batch_iter),
range(batch_iter, kshp[0]+1, batch_iter))]
print 'conv concat = ', len(conv_concat), ' x ', conv_concat[0].get_shape().as_list()
# return a single concatenated tensor:
# C = [c0; c1; ...]
C = tf.concat(0, conv_concat)
return C
#C = tf_mean_std_normalize(C)
#return C / tf.maximum(tf.reduce_max(C), 1e-20)
#return tf.nn.sigmoid(C)
#return tf_mean_std_normalize(C)
'''
Helper to return the product of the permutation matrices and the keys
K: [num_models, num_features]
P: [num_models, feature_size, feature_size]
'''
@staticmethod
# def perm_keys(K, P):
# # utilizes the random_shuffle method
# return tf.concat(0, [tf.transpose(tf.random_shuffle(tf.transpose(K), seed=s)) for s in P])
def perm_keys(K, P, num_keys=None):
# utilizes the sparse matmul method
return tf.concat(0, [tf.transpose(tf.sparse_tensor_dense_matmul(P_i, K, adjoint_b=True))
for P_i in P])
# def perm_keys(K, P):
# # utilizes the gather_nd method to permute
# kshp = K.get_shape().as_list()[1]
# print 'gathered = ', tf.gather_nd(K, P).get_shape().as_list()
# return tf.reshape(tf.gather_nd(K, P), [-1, kshp]) #tf.concat(0, [tf.reshape(tf.gather(K, p), kshp) for p in P])
'''
pads [batch, feature_size] --> [batch, feature_size + num_pad]
'''
@staticmethod
def zero_pad(x, num_pad, index_to_pad=1):
# Handle base case
if num_pad == 0:
return x
xshp = x.get_shape().as_list()
zeros = tf.zeros([xshp[0], num_pad]) if len(xshp) == 2 \
else tf.zeros([num_pad])
return tf.concat(index_to_pad, [x, zeros])
def _normalize(self, keys):
# Normalize our keys to mod 1 if specified
if self.complex_normalize:
print 'normalizing via complex abs..'
keys = HolographicMemory.normalize_real_by_complex_abs(keys)
# Normalize our keys using the l2 norm
if self.l2_normalize:
print 'normalizing via l2..'
keys = tf.nn.l2_normalize(keys, 1)
return keys
'''
Encoders some keys and values together
values: [batch_size, feature_size]
keys: [num_models, feature_size]
perms: [num_models, feature_size, feature_size]
returns: [num_models, features]
'''
def encode(self, v, keys, batch_size=None):
keys = self._normalize(keys)
batch_size = v.get_shape().as_list()[0] if batch_size is None else batch_size
permed_keys = self.perm_keys(keys, self.perms, num_keys=batch_size)
print 'enc_perms = ', permed_keys.get_shape().as_list(), ' | batch_size = ', batch_size
return self.conv_func(v, permed_keys,
batch_size,
self.num_models,
num_keys=batch_size*self.num_models)
'''
Decoders values out of memories
memories: [num_models, feature_size]
keys: [num_models, feature_size]
perms: [num_models, feature_size, feature_size]
returns: [num_models, features]
'''
def decode(self, memories, keys, num_keys=None):
keys = self._normalize(keys)
num_memories = memories.get_shape().as_list()
num_memories[0] = self.num_models if num_memories[0] is None else num_memories[0]
num_keys = keys.get_shape().as_list()[0] if num_keys is None else num_keys
print 'decode: numkeys = ', num_keys, ' | num_memories = ', num_memories
# re-gather keys to avoid mixing between different keys.
perms = self.perm_keys(keys, self.perms, num_keys=num_keys)
pshp = perms.get_shape().as_list()
pshp[0] = num_keys*self.num_models if pshp[0] is None else pshp[0]
pshp[1] = num_memories[1] if pshp[1] is None else pshp[1]
permed_keys = tf.concat(0, [tf.strided_slice(perms, [i, 0], pshp, [num_keys, 1])
for i in range(num_keys)])
print 'memories = ', num_memories, \
'| dec_perms =', permed_keys.get_shape().as_list()
return self.conv_func(memories, permed_keys,
num_memories[0],
self.num_models,
num_keys=num_keys*self.num_models,
conj=True)
'''
Helper to create an [input_size, input_size] random permutation matrix
'''
@staticmethod
def create_permutation_matrix(input_size, seed=None):
#return tf.random_shuffle(tf.eye(input_size), seed=seed)
ind = np.arange(0, input_size)
ind_shuffled = np.copy(ind)
np.random.seed(seed)
np.random.shuffle(ind)
indices = np.asarray([[x,y] for x,y in zip(ind, ind_shuffled)], dtype=np.int32)
values = np.ones([len(indices)], dtype=np.float32)
indices = indices[indices[:, 0].argsort()]
return tf.SparseTensor(indices, values, shape=[input_size, input_size])
'''
Simple takes x and splits it in half --> Re{x[0:mid]} + Im{x[mid:end]}
Works for batches in addition to single vectors
'''
@staticmethod
def split_to_complex(x, xshp=None):
xshp = x.get_shape().as_list() if xshp is None else xshp
if len(xshp) == 2:
assert xshp[1] % 2 == 0, \
"Vector is not evenly divisible into complex: %d" % xshp[1]
mid = xshp[1] / 2
return tf.complex(x[:, 0:mid], x[:, mid:])
else:
assert xshp[0] % 2 == 0, \
"Vector is not evenly divisible into complex: %d" % xshp[0]
mid = xshp[0] / 2
return tf.complex(x[0:mid], x[mid:])
'''
Helper to un-concat (real, imag) --> single vector
'''
@staticmethod
def unsplit_from_complex_ri(x):
return tf.concat(1, [tf.real(x), tf.imag(x)])
'''
Helper to un-concat (imag, real) --> single vector
'''
@staticmethod
def unsplit_from_complex_ir(x):
#return tf.concat(1, [tf.imag(x), tf.abs(tf.real(x))])
return tf.abs(tf.concat(1, [tf.imag(x), tf.real(x)]))
#mag = tf.maximum(1.0, tf.complex_abs(x))
#x = tf.complex(tf.real(x) / (mag + 1e-10), tf.imag(x) / (mag + 1e-10))
# real = tf.concat(1, [tf.imag(x), tf.real(x)])
# return tf.abs(HolographicMemory.normalize_real_by_complex_abs([real])[0])
|
{
"content_hash": "63ad3a180a9d9a605687357de86a4afa",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 121,
"avg_line_length": 41.773936170212764,
"alnum_prop": 0.5540841662952823,
"repo_name": "jramapuram/holographic_memory",
"id": "909cf9ebddd60d869a2a64c25e04fffff1a367b2",
"size": "15707",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45275"
}
],
"symlink_target": ""
}
|
"""
A helper class to get the files generated from thrift IDL files.
"""
import os
import blade
import configparse
import console
import build_rules
import java_jar_target
import py_targets
from blade_util import var_to_list
from cc_targets import CcTarget
from thrift_helper import ThriftHelper
class ThriftLibrary(CcTarget):
"""A scons thrift library target subclass.
This class is derived from CcTarget.
"""
def __init__(self,
name,
srcs,
deps,
optimize,
deprecated,
blade,
kwargs):
"""Init method.
Init the thrift target.
"""
srcs = var_to_list(srcs)
self._check_thrift_srcs_name(srcs)
CcTarget.__init__(self,
name,
'thrift_library',
srcs,
deps,
'',
[], [], [], optimize, [], [],
blade,
kwargs)
self.data['python_vars'] = []
self.data['python_sources'] = []
thrift_config = configparse.blade_config.get_config('thrift_config')
thrift_lib = var_to_list(thrift_config['thrift_libs'])
thrift_bin = thrift_config['thrift']
if thrift_bin.startswith("//"):
dkey = self._convert_string_to_target_helper(thrift_bin)
if dkey not in self.expanded_deps:
self.expanded_deps.append(dkey)
if dkey not in self.deps:
self.deps.append(dkey)
# Hardcode deps rule to thrift libraries.
self._add_hardcode_library(thrift_lib)
# Link all the symbols by default
self.data['link_all_symbols'] = True
self.data['deprecated'] = deprecated
self.data['java_sources_explict_dependency'] = []
# For each thrift file initialize a ThriftHelper, which will be used
# to get the source files generated from thrift file.
self.thrift_helpers = {}
for src in srcs:
self.thrift_helpers[src] = ThriftHelper(
os.path.join(self.path, src))
def _check_thrift_srcs_name(self, srcs):
"""_check_thrift_srcs_name.
Checks whether the thrift file's name ends with 'thrift'.
"""
error = 0
for src in srcs:
base_name = os.path.basename(src)
pos = base_name.rfind('.')
if pos == -1:
console.error('invalid thrift file name %s' % src)
error += 1
file_suffix = base_name[pos + 1:]
if file_suffix != 'thrift':
console.error('invalid thrift file name %s' % src)
error += 1
if error > 0:
console.error_exit('invalid thrift file names found.')
def _generate_header_files(self):
"""Whether this target generates header files during building."""
return True
def _thrift_gen_cpp_files(self, path, src):
"""_thrift_gen_cpp_files.
Get the c++ files generated from thrift file.
"""
return [self._target_file_path(path, f)
for f in self.thrift_helpers[src].get_generated_cpp_files()]
def _thrift_gen_py_files(self, path, src):
"""_thrift_gen_py_files.
Get the python files generated from thrift file.
"""
return [self._target_file_path(path, f)
for f in self.thrift_helpers[src].get_generated_py_files()]
def _thrift_gen_java_files(self, path, src):
"""_thrift_gen_java_files.
Get the java files generated from thrift file.
"""
return [self._target_file_path(path, f)
for f in self.thrift_helpers[src].get_generated_java_files()]
def _thrift_java_rules(self):
"""_thrift_java_rules.
Generate scons rules for the java files from thrift file.
"""
for src in self.srcs:
src_path = os.path.join(self.path, src)
thrift_java_src_files = self._thrift_gen_java_files(self.path,
src)
self._write_rule('%s.ThriftJava(%s, "%s")' % (
self._env_name(),
str(thrift_java_src_files),
src_path))
self.data['java_sources'] = (
os.path.dirname(thrift_java_src_files[0]),
os.path.join(self.build_path, self.path),
self.name)
self.data['java_sources_explict_dependency'] += thrift_java_src_files
def _thrift_python_rules(self):
"""_thrift_python_rules.
Generate python files.
"""
for src in self.srcs:
src_path = os.path.join(self.path, src)
thrift_py_src_files = self._thrift_gen_py_files(self.path, src)
py_cmd_var = '%s_python' % self._generate_variable_name(
self.path, self.name)
self._write_rule('%s = %s.ThriftPython(%s, "%s")' % (
py_cmd_var,
self._env_name(),
str(thrift_py_src_files),
src_path))
self.data['python_vars'].append(py_cmd_var)
self.data['python_sources'] += thrift_py_src_files
def scons_rules(self):
"""scons_rules.
It outputs the scons rules according to user options.
"""
self._prepare_to_generate_rule()
# Build java source according to its option
env_name = self._env_name()
self.options = self.blade.get_options()
self.direct_targets = self.blade.get_direct_targets()
if (getattr(self.options, 'generate_java', False) or
self.data.get('generate_java') or
self.key in self.direct_targets):
self._thrift_java_rules()
if (getattr(self.options, 'generate_python', False) or
self.data.get('generate_python') or
self.key in self.direct_targets):
self._thrift_python_rules()
self._setup_cc_flags()
sources = []
obj_names = []
for src in self.srcs:
thrift_cpp_files = self._thrift_gen_cpp_files(self.path, src)
thrift_cpp_src_files = [f for f in thrift_cpp_files if f.endswith('.cpp')]
self._write_rule('%s.Thrift(%s, "%s")' % (
env_name,
str(thrift_cpp_files),
os.path.join(self.path, src)))
for thrift_cpp_src in thrift_cpp_src_files:
obj_name = '%s_object' % self._generate_variable_name(
self.path, thrift_cpp_src)
obj_names.append(obj_name)
self._write_rule(
'%s = %s.SharedObject(target="%s" + top_env["OBJSUFFIX"], '
'source="%s")' % (obj_name,
env_name,
thrift_cpp_src,
thrift_cpp_src))
sources.append(thrift_cpp_src)
self._write_rule('%s = [%s]' % (self._objs_name(), ','.join(obj_names)))
self._write_rule('%s.Depends(%s, %s)' % (
env_name, self._objs_name(), sources))
self._cc_library()
options = self.blade.get_options()
if (getattr(options, 'generate_dynamic', False) or
self.data.get('build_dynamic')):
self._dynamic_cc_library()
def thrift_library(name,
srcs=[],
deps=[],
optimize=[],
deprecated=False,
**kwargs):
"""thrift_library target. """
thrift_library_target = ThriftLibrary(name,
srcs,
deps,
optimize,
deprecated,
blade.blade,
kwargs)
blade.blade.register_target(thrift_library_target)
build_rules.register_function(thrift_library)
|
{
"content_hash": "fd74db9589ce3a435bd1f300db3dc41e",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 86,
"avg_line_length": 32.6,
"alnum_prop": 0.5009022013713461,
"repo_name": "Lunewcome/typhoon-blade",
"id": "16188fd3ca1e2933ce60ddbba2c219a53bb334d9",
"size": "8519",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/blade/thrift_library.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "293"
},
{
"name": "C++",
"bytes": "2267"
},
{
"name": "Cuda",
"bytes": "5412"
},
{
"name": "Objective-C",
"bytes": "83"
},
{
"name": "Protocol Buffer",
"bytes": "351"
},
{
"name": "Python",
"bytes": "463043"
},
{
"name": "Shell",
"bytes": "16669"
},
{
"name": "Thrift",
"bytes": "6217"
},
{
"name": "VimL",
"bytes": "7375"
}
],
"symlink_target": ""
}
|
from inferbeddings.nli.evaluation.base import accuracy, stats
from inferbeddings.nli.evaluation.util import evaluate
__all__ = [
'accuracy',
'stats',
'evaluate'
]
|
{
"content_hash": "f4e7136e4f4188583415bbce1ef81127",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 61,
"avg_line_length": 22,
"alnum_prop": 0.7102272727272727,
"repo_name": "uclmr/inferbeddings",
"id": "3567bef82764321811c9bf3ed634f4ebc22f0bcd",
"size": "201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "inferbeddings/nli/evaluation/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "102860"
},
{
"name": "Prolog",
"bytes": "569340"
},
{
"name": "Python",
"bytes": "1319760"
},
{
"name": "R",
"bytes": "769"
},
{
"name": "Shell",
"bytes": "22689"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
import py2exe
setup(name='completedir', scripts=['btcompletedirgui.py'])
|
{
"content_hash": "9090e9a6bb7bc825ad09265a375ab7cd",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 58,
"avg_line_length": 26.75,
"alnum_prop": 0.794392523364486,
"repo_name": "jakesyl/BitTornado",
"id": "62be63be386f32c9a8457b68c5a338f2ea1f7aa3",
"size": "198",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wincompletedirsetup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "634"
},
{
"name": "Python",
"bytes": "510409"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
# Register your models here.
from .models import Job, Server, LoadMeasurement, JobSchedulingEvent
admin.site.register(Job)
admin.site.register(Server)
admin.site.register(LoadMeasurement)
admin.site.register(JobSchedulingEvent)
|
{
"content_hash": "09d37de7f242270cc3616f5a1e246da8",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 68,
"avg_line_length": 26.4,
"alnum_prop": 0.8257575757575758,
"repo_name": "ewerkema/job-scheduler",
"id": "c0f31fe2b731ad0bd4998f87614e44d0a622ad49",
"size": "264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "job_scheduler_web/scheduler_web/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4954"
},
{
"name": "HTML",
"bytes": "12288"
},
{
"name": "Java",
"bytes": "92845"
},
{
"name": "Python",
"bytes": "27724"
}
],
"symlink_target": ""
}
|
import sys
from sumologic import SumoLogic
args = sys.argv
sumo = SumoLogic(args[1], args[2])
fromCat, toCat = args[3], args[4]
cs = sumo.collectors()
for c in cs:
if 'category' in c and c['category'] == fromCat:
cv, etag = sumo.collector(c['id'])
cv['collector']['category'] = toCat
print(sumo.update_collector(cv, etag).text)
ss = sumo.sources(c['id'])
for s in ss:
if s['category'] == fromCat:
sv, etag = sumo.source(c['id'], s['id'])
sv['source']['category'] = toCat
print(sumo.update_source(c['id'], sv, etag).text)
|
{
"content_hash": "d383ed31f4f20bdc6aed067395cd396e",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 52,
"avg_line_length": 27.2,
"alnum_prop": 0.6397058823529411,
"repo_name": "SumoLogic/sumologic-python-sdk",
"id": "366a9dc295fcf0bbcdef328c203b79117e7b8ed8",
"size": "740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/mv-cat.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19598"
}
],
"symlink_target": ""
}
|
"""
Error Bars showing Confidence Interval
======================================
This example shows how to show error bars using covidence intervals.
The confidence intervals are computed internally in vega by
a non-parametric [bootstrap of the mean](https://github.com/vega/vega-statistics/blob/master/src/bootstrapCI.js).
"""
# category: bar charts
import altair as alt
from vega_datasets import data
barley = data.barley()
points = alt.Chart(barley).mark_point(filled=True).encode(
alt.X(
'mean(yield)',
scale=alt.Scale(zero=False),
axis=alt.Axis(title='Barley Yield')
),
y='variety',
color=alt.value('black')
)
error_bars = alt.Chart(barley).mark_rule().encode(
x='ci0(yield)',
x2='ci1(yield)',
y='variety'
)
points + error_bars
|
{
"content_hash": "e5e366ee4dc35c486825de1a17f375e8",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 113,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.6582278481012658,
"repo_name": "ellisonbg/altair",
"id": "41aaaca0be0b1c8cf09889c44c971bfc3c5069af",
"size": "790",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "altair/vegalite/v2/examples/error_bars_with_ci.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "136763"
},
{
"name": "Makefile",
"bytes": "312"
},
{
"name": "Python",
"bytes": "1150719"
}
],
"symlink_target": ""
}
|
from neutronclient._i18n import _
from neutronclient.common import exceptions
from neutronclient.common import utils
from neutronclient.common import validators
from neutronclient.neutron import v2_0 as neutronv20
def get_bgp_peer_id(client, id_or_name):
return neutronv20.find_resourceid_by_name_or_id(client,
'bgp_peer',
id_or_name)
def validate_peer_attributes(parsed_args):
# Validate AS number
validators.validate_int_range(parsed_args, 'remote_as',
neutronv20.bgp.speaker.MIN_AS_NUM,
neutronv20.bgp.speaker.MAX_AS_NUM)
# Validate password
if parsed_args.auth_type != 'none' and parsed_args.password is None:
raise exceptions.CommandError(_('Must provide password if auth-type '
'is specified.'))
if parsed_args.auth_type == 'none' and parsed_args.password:
raise exceptions.CommandError(_('Must provide auth-type if password '
'is specified.'))
class ListPeers(neutronv20.ListCommand):
"""List BGP peers."""
resource = 'bgp_peer'
list_columns = ['id', 'name', 'peer_ip', 'remote_as']
pagination_support = True
sorting_support = True
class ShowPeer(neutronv20.ShowCommand):
"""Show information of a given BGP peer."""
resource = 'bgp_peer'
class CreatePeer(neutronv20.CreateCommand):
"""Create a BGP Peer."""
resource = 'bgp_peer'
def add_known_arguments(self, parser):
parser.add_argument(
'name',
metavar='NAME',
help=_('Name of the BGP peer to create.'))
parser.add_argument(
'--peer-ip',
metavar='PEER_IP_ADDRESS',
required=True,
help=_('Peer IP address.'))
parser.add_argument(
'--remote-as',
required=True,
metavar='PEER_REMOTE_AS',
help=_('Peer AS number. (Integer in [%(min_val)s, %(max_val)s] '
'is allowed.)') %
{'min_val': neutronv20.bgp.speaker.MIN_AS_NUM,
'max_val': neutronv20.bgp.speaker.MAX_AS_NUM})
parser.add_argument(
'--auth-type',
metavar='PEER_AUTH_TYPE',
choices=['none', 'md5'],
default='none',
type=utils.convert_to_lowercase,
help=_('Authentication algorithm. Supported algorithms: '
'none(default), md5'))
parser.add_argument(
'--password',
metavar='AUTH_PASSWORD',
help=_('Authentication password.'))
def args2body(self, parsed_args):
body = {}
validate_peer_attributes(parsed_args)
neutronv20.update_dict(parsed_args, body,
['name', 'peer_ip',
'remote_as', 'auth_type', 'password'])
return {self.resource: body}
class UpdatePeer(neutronv20.UpdateCommand):
"""Update BGP Peer's information."""
resource = 'bgp_peer'
def add_known_arguments(self, parser):
parser.add_argument(
'--name',
help=_('Updated name of the BGP peer.'))
parser.add_argument(
'--password',
metavar='AUTH_PASSWORD',
help=_('Updated authentication password.'))
def args2body(self, parsed_args):
body = {}
neutronv20.update_dict(parsed_args, body, ['name', 'password'])
return {self.resource: body}
class DeletePeer(neutronv20.DeleteCommand):
"""Delete a BGP peer."""
resource = 'bgp_peer'
|
{
"content_hash": "73d0bb46fcbcd8d7111aa225d9f7c17e",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 77,
"avg_line_length": 33.450450450450454,
"alnum_prop": 0.5545381093455427,
"repo_name": "eayunstack/python-neutronclient",
"id": "8fefb660a68834f80c06cdc71cadd7ca4d0bb595",
"size": "4366",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "neutronclient/neutron/v2_0/bgp/peer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1091670"
},
{
"name": "Shell",
"bytes": "9783"
}
],
"symlink_target": ""
}
|
"""Trains an nltk language model."""
import random
import pickle
from typing import List, Tuple
from nltk.lm.preprocessing import padded_everygram_pipeline
from nltk.lm import Laplace
from absl import app
from absl import flags
from tqdm import tqdm
FLAGS = flags.FLAGS
flags.DEFINE_string('string_to_normalize', None, 'the string to normalize')
flags.DEFINE_string('language', None, 'the language to normalize')
flags.DEFINE_spaceseplist('data_source', None, 'data source to preprocess')
flags.DEFINE_string('pass_valid', "token", 'pass only valid tokens or sentences')
flags.DEFINE_string('experiment', None, 'the normalization experiment to run')
def main(argv):
"""Trains an nltk language model.
Loads in files of normalized text, partitions them into a train partition
(3/4 of data) and a test partition (last 1/4 of data). Uses Laplace
smoothing for unseen ngrams.
"""
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
normalized_data = load_normalized_data(FLAGS.language,
FLAGS.data_source,
FLAGS.pass_valid,
FLAGS.experiment)
train_partition, test_partition = partition_data(normalized_data)
train_ngrams, vocab = padded_everygram_pipeline(2, train_partition)
test_ngrams, _ = padded_everygram_pipeline(2, test_partition)
language_model = Laplace(2)
language_model.fit(train_ngrams, vocab)
avg_perp, count = compute_avg_perplexity(test_ngrams, language_model)
print("\n----------------------------\n"
"Language Model Parameters:\n"
f"\tLanguage={FLAGS.language}\n"
f"\tData Sources={FLAGS.data_source}\n"
f"\tPass Valid={FLAGS.pass_valid}\n"
f"\tExperiment={FLAGS.experiment}\n"
"----------------------------\n")
print(f"Average perplexity across {count} ngrams:\t{avg_perp}")
def load_normalized_data(language: str,
data_source: str,
pass_valid: str,
experiment: str
) -> List[List[str]]:
"""Loads one or more files of normalized data.
Args:
language: The language of the data.
data_source: A list of sources of data to load in.
pass_valid: Whether the whole sentence or just tokens was filtered.
experiment: The name of the specific experiment being run.
Returns:
normalized_data: The normalized data as a list of lists of strings.
"""
normalized_data = []
for source in data_source:
condition: str = ("language=" + language + "_" +
"datasource=" + source + "_" +
"passvalid=" + pass_valid)
filename: str = ("output/" + experiment + "/" +
condition + "_normalized.p")
try:
normalized_data = (normalized_data +
pickle.load(open(filename, "rb")))
except Exception:
print(f"No normalized data for LANGUAGE={language} "
f"from SOURCE={source} for EXPERIMENT={experiment}.")
returned = []
for line in normalized_data:
if line[0] != "<SENTENCE_REJECTED>":
returned.append(line)
random.seed(42)
random.shuffle(returned)
rejected = len(normalized_data) - len(returned)
print(f"Loaded {len(normalized_data)} sentences!")
print(f"Kept {len(returned)} sentences!")
print(f"Rejected {rejected} sentences ({100*(rejected/len(normalized_data))} %)!")
return returned
def partition_data(data: List[List[str]]
) -> Tuple[List[List[str]], List[List[str]]]:
"""Partitions data into train and test partitions.
The train partition consists of 80% of the data.
The test partition consists of 20% of the data.
Args:
data: The normalized data as a list of lists of strings.
Returns:
train_partition: The training partition.
test_partition: The testing partition.
"""
partition_size = round(len(data)/5)
train_partition = data[:partition_size*4]
test_partition = data[partition_size*4:]
return train_partition, test_partition
def compute_avg_perplexity(test_ngrams, language_model) -> Tuple[float, int]:
"""Computes the average perplexity of all bigrams using Laplace smoothing.
Args:
test_ngrams: The ngrams from the testing partition.
Returns:
count: The number of ngrams.
avg_perp: The average perplexity across all ngrams.
"""
count = 0
total_perp = 0
print(f"Computing perplexity of ngrams...")
for sent in tqdm(test_ngrams):
for ngram in sent:
perp = language_model.perplexity([ngram])
count += 1
total_perp += perp
avg_perp = total_perp/count
return avg_perp, count
if __name__ == "__main__":
app.run(main)
|
{
"content_hash": "7652b81be007afabd9193e62812c92c6",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 86,
"avg_line_length": 36.518248175182485,
"alnum_prop": 0.6092344593244053,
"repo_name": "googleinterns/text-norm-for-low-resource-languages",
"id": "d10ce02ed75052797a7594e00166ca4bb91d3ae5",
"size": "5022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "language_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "71000"
},
{
"name": "Shell",
"bytes": "309"
},
{
"name": "Starlark",
"bytes": "2139"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from django.template import Engine
from .utils import TEMPLATE_DIR
class OriginTestCase(TestCase):
def setUp(self):
self.engine = Engine(dirs=[TEMPLATE_DIR])
def test_origin_compares_equal(self):
a = self.engine.get_template('index.html')
b = self.engine.get_template('index.html')
self.assertEqual(a.origin, b.origin)
self.assertTrue(a.origin == b.origin)
self.assertFalse(a.origin != b.origin)
def test_origin_compares_not_equal(self):
a = self.engine.get_template('first/test.html')
b = self.engine.get_template('second/test.html')
self.assertNotEqual(a.origin, b.origin)
self.assertFalse(a.origin == b.origin)
self.assertTrue(a.origin != b.origin)
|
{
"content_hash": "1adbf225f45820265e414b58af1c2941",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 56,
"avg_line_length": 33.833333333333336,
"alnum_prop": 0.6440886699507389,
"repo_name": "yephper/django",
"id": "12afa35ec5d541869e424e23ed234e4bf3356d56",
"size": "812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/template_tests/test_origin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
}
|
'''
Copyright (c) 2014, K. Kumar (me@kartikkumar.com)
All rights reserved.
'''
###################################################################################################
# Set up input deck
###################################################################################################
# Set path to TLE catalog file.
tleCatalogFilePath = ""
# Set number of lines per entry in TLE catalog (2 or 3).
tleEntryNumberOfLines = 3
# Set path to output directory.
outputPath = "."
# Set figure DPI.
figureDPI = 300
# Set font size for axes labels.
fontSize = 24
###################################################################################################
'''
DO NOT EDIT PARAMETERS BEYOND THIS POINT!!!
'''
###################################################################################################
# Set up modules and packages
###################################################################################################
from sgp4.earth_gravity import wgs72
from sgp4.io import twoline2rv
from sgp4.propagation import getgravconst
from matplotlib import rcParams
import matplotlib.pyplot as plt
import numpy as np
from twoBodyMethods import convertMeanMotionToSemiMajorAxis
###################################################################################################
###################################################################################################
# Read and store TLE catalog
###################################################################################################
# Read in catalog and store lines in list.
fileHandle = open(tleCatalogFilePath)
catalogLines = fileHandle.readlines()
fileHandle.close()
# Strip newline and return carriage characters.
for i in xrange(len(catalogLines)):
catalogLines[i] = catalogLines[i].strip('\r\n')
# Parse TLE entries and store debris objects.
debrisObjects = []
for tleEntry in xrange(0,len(catalogLines),tleEntryNumberOfLines):
debrisObjects.append(twoline2rv(catalogLines[tleEntry+1], catalogLines[tleEntry+2], wgs72))
# Sort list of debris objects based on inclination.
inclinationSortedObjects = sorted(debrisObjects, key=lambda x: x.inclo, reverse=False)
inclinations = []
raan = []
ecc = []
aop = []
for i in xrange(len(inclinationSortedObjects)-1):
inclinations.append(inclinationSortedObjects[i].inclo)
raan.append(inclinationSortedObjects[i].nodeo)
ecc.append(inclinationSortedObjects[i].ecco)
aop.append(inclinationSortedObjects[i].argpo)
###################################################################################################
###################################################################################################
# Generate plots
###################################################################################################
# Set font size for plot labels.
rcParams.update({'font.size': fontSize})
# Plot distribution of eccentricity [-] against semi-major axis [km].
figure = plt.figure()
axis = figure.add_subplot(111)
plt.xlabel("Semi-major axis [km]")
plt.ylabel("Eccentricity [-]")
plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
plt.plot([convertMeanMotionToSemiMajorAxis(debrisObject.no/60.0, getgravconst('wgs72')[1]) \
for debrisObject in debrisObjects], \
[debrisObject.ecco for debrisObject in debrisObjects], \
marker='o', markersize=5, linestyle='none')
axis.set_xlim(xmax=5.0e4)
figure.set_tight_layout(True)
plt.savefig(outputPath + "/figure1_debrisPopulation_eccentricityVsSemiMajorAxis.pdf", \
dpi = figureDPI)
plt.close()
# Plot components of eccentricity vector [-].
figure = plt.figure()
axis = figure.add_subplot(111)
plt.xlabel("$e \cos{\omega}$ [-]")
plt.ylabel("$e \sin{\omega}$ [-]")
plt.plot(ecc*np.cos(aop),ecc*np.sin(aop), marker='o', markersize=5, linestyle='none')
plt.axis('equal')
axis.set_xlim(xmin=-1.0, xmax=1.0)
axis.set_ylim(ymin=-1.0, ymax=1.0)
figure.set_tight_layout(True)
plt.savefig(outputPath + "/figure2_debrisPopulation_eccentricityVector.pdf", dpi = figureDPI)
plt.savefig(outputPath + "/figure2_debrisPopulation_eccentricityVector.pdf", dpi = figureDPI)
plt.close()
# Plot distribution of inclination [deg] against semi-major axis [km].
figure = plt.figure()
axis = figure.add_subplot(111)
plt.xlabel("Semi-major axis [km]")
plt.ylabel("Inclination [deg]")
plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
plt.plot([convertMeanMotionToSemiMajorAxis(debrisObject.no/60.0, getgravconst('wgs72')[1]) \
for debrisObject in debrisObjects], \
[np.rad2deg(debrisObject.inclo) for debrisObject in debrisObjects], \
marker='o', markersize=5, linestyle='none')
axis.set_xlim(xmax=5.0e4)
figure.set_tight_layout(True)
plt.savefig(outputPath + "/figure3_debrisPopulation_inclinationVsSemiMajorAxis.pdf", \
dpi = figureDPI)
plt.close()
# Plot components of inclination vector [deg].
figure = plt.figure()
axis = figure.add_subplot(111)
plt.xlabel("$i \cos{\Omega}$ [rad]")
plt.ylabel("$i \sin{\Omega}$ [rad]")
plt.plot(np.rad2deg(inclinations)*np.cos(raan),np.rad2deg(inclinations)*np.sin(raan), \
marker='o', markersize=5, linestyle='none')
plt.axis('equal')
axis.set_xlim(xmin=-180.0, xmax=180.0)
axis.set_ylim(ymin=-180.0, ymax=180.0)
figure.set_tight_layout(True)
plt.savefig(outputPath + "/figure4_debrisPopulation_inclinationVector.pdf", dpi = figureDPI)
plt.close()
###################################################################################################
|
{
"content_hash": "bf5359ae942a677641db0f21082ddc6e",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 99,
"avg_line_length": 36.79865771812081,
"alnum_prop": 0.575414918840051,
"repo_name": "kartikkumar/sda",
"id": "d815d0711244f94c3f9efc3db44190c351a61ac3",
"size": "5483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "populationPlots.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "11263"
}
],
"symlink_target": ""
}
|
"""
WSGI config for bucketlist project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bucketlist.settings")
application = Cling(get_wsgi_application())
|
{
"content_hash": "48408460c419762b4f09a920ef159beb",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 78,
"avg_line_length": 25.41176470588235,
"alnum_prop": 0.7754629629629629,
"repo_name": "andela-ggikera/djangular-bucketlist-app",
"id": "6bb3c5da938c0615fe5a2bf19b5f75cae6f3a791",
"size": "432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bucketlist/bucketlist/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2841989"
},
{
"name": "HTML",
"bytes": "40971"
},
{
"name": "JavaScript",
"bytes": "4695481"
},
{
"name": "Python",
"bytes": "30905"
}
],
"symlink_target": ""
}
|
import os
import hashlib
import random
import logging
import shutil
import glob
from twisterlib.testsuite import TestCase
from twisterlib.error import BuildError
from twisterlib.size_calc import SizeCalculator
from twisterlib.handlers import Handler, SimulationHandler, BinaryHandler, QEMUHandler, DeviceHandler, SUPPORTED_SIMS
logger = logging.getLogger('twister')
logger.setLevel(logging.DEBUG)
class TestInstance:
"""Class representing the execution of a particular TestSuite on a platform
@param test The TestSuite object we want to build/execute
@param platform Platform object that we want to build and run against
@param base_outdir Base directory for all test results. The actual
out directory used is <outdir>/<platform>/<test case name>
"""
def __init__(self, testsuite, platform, outdir):
self.testsuite = testsuite
self.platform = platform
self.status = None
self.reason = "Unknown"
self.metrics = dict()
self.handler = None
self.outdir = outdir
self.execution_time = 0
self.retries = 0
self.name = os.path.join(platform.name, testsuite.name)
self.run_id = self._get_run_id()
self.build_dir = os.path.join(outdir, platform.name, testsuite.name)
self.run = False
self.testcases = []
self.init_cases()
self.filters = []
self.filter_type = None
def add_filter(self, reason, filter_type):
self.filters.append({'type': filter_type, 'reason': reason })
self.status = "filtered"
self.reason = reason
self.filter_type = filter_type
# Fix an issue with copying objects from testsuite, need better solution.
def init_cases(self):
for c in self.testsuite.testcases:
self.add_testcase(c.name, freeform=c.freeform)
def _get_run_id(self):
""" generate run id from instance unique identifier and a random
number"""
hash_object = hashlib.md5(self.name.encode())
random_str = f"{random.getrandbits(64)}".encode()
hash_object.update(random_str)
return hash_object.hexdigest()
def add_missing_case_status(self, status, reason=None):
for case in self.testcases:
if not case.status:
case.status = status
if reason:
case.reason = reason
else:
case.reason = self.reason
def __getstate__(self):
d = self.__dict__.copy()
return d
def __setstate__(self, d):
self.__dict__.update(d)
def __lt__(self, other):
return self.name < other.name
def set_case_status_by_name(self, name, status, reason=None):
tc = self.get_case_or_create(name)
tc.status = status
if reason:
tc.reason = reason
return tc
def add_testcase(self, name, freeform=False):
tc = TestCase(name=name)
tc.freeform = freeform
self.testcases.append(tc)
return tc
def get_case_by_name(self, name):
for c in self.testcases:
if c.name == name:
return c
return None
def get_case_or_create(self, name):
for c in self.testcases:
if c.name == name:
return c
logger.debug(f"Could not find a matching testcase for {name}")
tc = TestCase(name=name)
self.testcases.append(tc)
return tc
@staticmethod
def testsuite_runnable(testsuite, fixtures):
can_run = False
# console harness allows us to run the test and capture data.
if testsuite.harness in [ 'console', 'ztest', 'pytest', 'test']:
can_run = True
# if we have a fixture that is also being supplied on the
# command-line, then we need to run the test, not just build it.
fixture = testsuite.harness_config.get('fixture')
if fixture:
can_run = (fixture in fixtures)
return can_run
def setup_handler(self, env):
if self.handler:
return
options = env.options
handler = Handler(self, "")
if self.platform.simulation:
if self.platform.simulation == "qemu":
handler = QEMUHandler(self, "qemu")
handler.args.append(f"QEMU_PIPE={handler.get_fifo()}")
handler.ready = True
else:
handler = SimulationHandler(self, self.platform.simulation)
if self.platform.simulation_exec and shutil.which(self.platform.simulation_exec):
handler.ready = True
elif self.testsuite.type == "unit":
handler = BinaryHandler(self, "unit")
handler.binary = os.path.join(self.build_dir, "testbinary")
if options.enable_coverage:
handler.args.append("COVERAGE=1")
handler.call_make_run = False
handler.ready = True
elif options.device_testing:
handler = DeviceHandler(self, "device")
handler.call_make_run = False
handler.ready = True
if handler:
handler.options = options
handler.generator_cmd = env.generator_cmd
handler.generator = env.generator
handler.suite_name_check = not options.disable_suite_name_check
self.handler = handler
# Global testsuite parameters
def check_runnable(self, enable_slow=False, filter='buildable', fixtures=[]):
# running on simulators is currently not supported on Windows
if os.name == 'nt' and self.platform.simulation != 'na':
return False
# we asked for build-only on the command line
if self.testsuite.build_only:
return False
# Do not run slow tests:
skip_slow = self.testsuite.slow and not enable_slow
if skip_slow:
return False
target_ready = bool(self.testsuite.type == "unit" or \
self.platform.type == "native" or \
self.platform.simulation in SUPPORTED_SIMS or \
filter == 'runnable')
for sim in ['nsim', 'mdb-nsim', 'renode', 'tsim', 'native']:
if self.platform.simulation == sim and self.platform.simulation_exec:
if not shutil.which(self.platform.simulation_exec):
target_ready = False
break
else:
target_ready = True
testsuite_runnable = self.testsuite_runnable(self.testsuite, fixtures)
return testsuite_runnable and target_ready
def create_overlay(self, platform, enable_asan=False, enable_ubsan=False, enable_coverage=False, coverage_platform=[]):
# Create this in a "twister/" subdirectory otherwise this
# will pass this overlay to kconfig.py *twice* and kconfig.cmake
# will silently give that second time precedence over any
# --extra-args=CONFIG_*
subdir = os.path.join(self.build_dir, "twister")
content = ""
if self.testsuite.extra_configs:
content = "\n".join(self.testsuite.extra_configs)
if enable_coverage:
if platform.name in coverage_platform:
content = content + "\nCONFIG_COVERAGE=y"
content = content + "\nCONFIG_COVERAGE_DUMP=y"
if enable_asan:
if platform.type == "native":
content = content + "\nCONFIG_ASAN=y"
if enable_ubsan:
if platform.type == "native":
content = content + "\nCONFIG_UBSAN=y"
if content:
os.makedirs(subdir, exist_ok=True)
file = os.path.join(subdir, "testsuite_extra.conf")
with open(file, "w") as f:
f.write(content)
return content
def calculate_sizes(self):
"""Get the RAM/ROM sizes of a test case.
This can only be run after the instance has been executed by
MakeGenerator, otherwise there won't be any binaries to measure.
@return A SizeCalculator object
"""
fns = glob.glob(os.path.join(self.build_dir, "zephyr", "*.elf"))
fns.extend(glob.glob(os.path.join(self.build_dir, "zephyr", "*.exe")))
fns = [x for x in fns if '_pre' not in x]
if len(fns) != 1:
raise BuildError("Missing/multiple output ELF binary")
return SizeCalculator(fns[0], self.testsuite.extra_sections)
def __repr__(self):
return "<TestSuite %s on %s>" % (self.testsuite.name, self.platform.name)
|
{
"content_hash": "f380318f5b1d2a2b916aaa911211b61e",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 123,
"avg_line_length": 34.608,
"alnum_prop": 0.5943134535367545,
"repo_name": "zephyrproject-rtos/zephyr",
"id": "1c7a702e0380a67f83e14105afbbb54bde1d15d2",
"size": "8790",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "scripts/pylib/twister/twisterlib/testinstance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "444860"
},
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "45371144"
},
{
"name": "C++",
"bytes": "29398"
},
{
"name": "CMake",
"bytes": "1408561"
},
{
"name": "Cadence",
"bytes": "1501"
},
{
"name": "EmberScript",
"bytes": "997"
},
{
"name": "Forth",
"bytes": "1648"
},
{
"name": "GDB",
"bytes": "1285"
},
{
"name": "Haskell",
"bytes": "753"
},
{
"name": "JetBrains MPS",
"bytes": "3312"
},
{
"name": "PLSQL",
"bytes": "281"
},
{
"name": "Perl",
"bytes": "215578"
},
{
"name": "Python",
"bytes": "2273122"
},
{
"name": "Shell",
"bytes": "173841"
},
{
"name": "SmPL",
"bytes": "36840"
},
{
"name": "Smalltalk",
"bytes": "1885"
},
{
"name": "SourcePawn",
"bytes": "14890"
},
{
"name": "Tcl",
"bytes": "7034"
},
{
"name": "VBA",
"bytes": "294"
},
{
"name": "Verilog",
"bytes": "6394"
}
],
"symlink_target": ""
}
|
from social_core.backends.khanacademy import BrowserBasedOAuth1, \
KhanAcademyOAuth1
|
{
"content_hash": "f924a2f6826a28440bb4b9a5f3c65f81",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 66,
"avg_line_length": 44.5,
"alnum_prop": 0.8314606741573034,
"repo_name": "cjltsod/python-social-auth",
"id": "7ae755869a37577e33ea3c9be7ca405117bec112",
"size": "89",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "social/backends/khanacademy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "618"
},
{
"name": "Python",
"bytes": "275325"
},
{
"name": "Shell",
"bytes": "479"
}
],
"symlink_target": ""
}
|
class Node():
def __init__(self, board_state=None, algebraic_move=None, value=None):
self.board_state = board_state
self.algebraic_move = algebraic_move
self.value = value
|
{
"content_hash": "a256a1f22b168b7f6b738ceeeb88d761",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 74,
"avg_line_length": 40,
"alnum_prop": 0.64,
"repo_name": "lamesjim/Chess-AI",
"id": "7609f9570e0e060ed92f5905cebda3a0a33c2ce6",
"size": "200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1344"
},
{
"name": "HTML",
"bytes": "581"
},
{
"name": "JavaScript",
"bytes": "46663"
},
{
"name": "Python",
"bytes": "66877"
}
],
"symlink_target": ""
}
|
import argparse
import json
import os
import random
import numpy as np
from ray import tune
from ray.tune import Trainable, run
class TestLogger(tune.logger.Logger):
def on_result(self, result):
print("TestLogger", result)
def trial_str_creator(trial):
return "{}_{}_123".format(trial.trainable_name, trial.trial_id)
class MyTrainableClass(Trainable):
"""Example agent whose learning curve is a random sigmoid.
The dummy hyperparameters "width" and "height" determine the slope and
maximum reward value reached.
"""
def _setup(self, config):
self.timestep = 0
def _train(self):
self.timestep += 1
v = np.tanh(float(self.timestep) / self.config.get("width", 1))
v *= self.config.get("height", 1)
# Here we use `episode_reward_mean`, but you can also report other
# objectives such as loss or accuracy.
return {"episode_reward_mean": v}
def _save(self, checkpoint_dir):
path = os.path.join(checkpoint_dir, "checkpoint")
with open(path, "w") as f:
f.write(json.dumps({"timestep": self.timestep}))
return path
def _restore(self, checkpoint_path):
with open(checkpoint_path) as f:
self.timestep = json.loads(f.read())["timestep"]
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
args, _ = parser.parse_known_args()
trials = run(
MyTrainableClass,
name="hyperband_test",
num_samples=5,
trial_name_creator=trial_str_creator,
loggers=[TestLogger],
stop={"training_iteration": 1 if args.smoke_test else 99999},
config={
"width": tune.sample_from(
lambda spec: 10 + int(90 * random.random())),
"height": tune.sample_from(lambda spec: int(100 * random.random()))
})
|
{
"content_hash": "19e708b3dcd430710b56d9c4b5f8e9b4",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 79,
"avg_line_length": 28.91176470588235,
"alnum_prop": 0.6185147507629705,
"repo_name": "stephanie-wang/ray",
"id": "9643034e70390f6b5d65b5c201cff058c9ecfe80",
"size": "1989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ray/tune/examples/logging_example.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "29882"
},
{
"name": "C++",
"bytes": "2149909"
},
{
"name": "CSS",
"bytes": "8025"
},
{
"name": "Dockerfile",
"bytes": "5499"
},
{
"name": "Go",
"bytes": "28481"
},
{
"name": "HTML",
"bytes": "30435"
},
{
"name": "Java",
"bytes": "738348"
},
{
"name": "JavaScript",
"bytes": "444"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "1965"
},
{
"name": "Python",
"bytes": "4058862"
},
{
"name": "Shell",
"bytes": "88736"
},
{
"name": "Starlark",
"bytes": "121207"
},
{
"name": "TypeScript",
"bytes": "64161"
}
],
"symlink_target": ""
}
|
from sys import argv
script, first, second = argv
print "The script is called", script
print "First argument is", first
print "Second argument is", second
missing= raw_input("What you forgot: ")
print "Entered: ", missing
|
{
"content_hash": "5ef1437585ce00e6493f3c641b2ffe5d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 39,
"avg_line_length": 20.545454545454547,
"alnum_prop": 0.7300884955752213,
"repo_name": "aurelo/lphw",
"id": "49e3a8d423848bb1fce809e0a967a4f81f054306",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/ex13.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "36708"
}
],
"symlink_target": ""
}
|
PANEL = 'cluster_templates'
# The slug of the panel group the PANEL is associated with.
PANEL_GROUP = 'container_infra'
# The slug of the dashboard the PANEL associated with. Required.
PANEL_DASHBOARD = 'project'
# Python panel class of the PANEL to be added.
ADD_PANEL = 'magnum_ui.content.cluster_templates.panel.ClusterTemplates'
|
{
"content_hash": "d265b6e0f673af1a2a80ce33b55a1e90",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 72,
"avg_line_length": 41.75,
"alnum_prop": 0.7724550898203593,
"repo_name": "openstack/magnum-ui",
"id": "7f61d3baa4280c1c4addabdedeee9fefa3701845",
"size": "1014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magnum_ui/enabled/_1372_project_container_infra_cluster_templates_panel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "18914"
},
{
"name": "JavaScript",
"bytes": "276068"
},
{
"name": "Python",
"bytes": "70448"
},
{
"name": "SCSS",
"bytes": "786"
},
{
"name": "Shell",
"bytes": "2202"
}
],
"symlink_target": ""
}
|
"""
Read data from the "current" BATSE catalog (dubbed 5Bp here, with "p" for
"preliminary," since an official 5B successor to the 4B catalog has not yet
been released). Provide access to catalog data and other GRB data via
a GRBCollection instance providing access to its individual GRB elements
in three ways:
* as an OrderedDict indexed by BATSE trigger number
* via attributes of the form .t#, with # = BATSE trigger number
* via attributes of the form .b#, with # = YYMMDD burst designation;
this returns a list of triggers matching the designation (there will
be >1 if BATSE detected multiple bursts on the specified date)
Only one function in this module is intended for users: load().
This module was created to access the data as released in Jul-Sep 2000.
Created 2012-05-06 by Tom Loredo
"""
from os.path import abspath, exists, join, split
from os import mkdir
import cPickle
from collections import OrderedDict
from numpy import array
from grb import GRB, GRBCollection
from locations import *
from utils import retrieve_gzip
__all__ = ['load_catalog']
# TODO: get_grb_classes is presently unused; is there a use case? May
# only be useful if the pickled files are unpickled outside this module.
def get_grb_classes(modname, classname):
"""
Return class objects from the "grb" module that have the specified
`classname`.
This function is for identifying classes encountered when unpickling
BATSE 5Bp data; it satisfies the cPickle "find_global" interface.
"""
# print 'Module:', modname, ' -- Class:', classname
if classname == 'GRB':
return GRB
elif classname == 'GRBCollection':
return GRBCollection
else:
raise RuntimeError, 'Unrecognized class in pickled data: %s, %s' % (modname, classname)
def read_summaries():
"""
Read GRB summary information from a pre-existing pickled data file.
"""
try:
sfile = file(join(root, summaries), 'rb')
except:
raise IOError('Summary data file does not exist!')
# Define an unpickler that will recognize grb classes even when unpickling
# is done elsewhere elsewhere (in which case grb classes may not be on the
# top level, thwarting normal unpickling).
loader = cPickle.Unpickler(sfile)
# loader.find_global = get_grb_classes
GRBs = loader.load()
sfile.close()
print 'Loaded summary data for', len(GRBs), 'GRBs comprising the 5Bp catalog.'
return GRBs
def get_grb_bright(bfile):
"""
Read the brightness data for a single GRB from the brightness data file
`bfile`; return the trigger number and a list of data entries (strings).
"""
line = bfile.readline()
if line == '':
return None, None
data = []
# 1: trigger, ch1 fluence & err, ch2 fluence & err
words = line.strip().split()
trig = int(words[0])
data.extend(words[1:])
# 2: ch3 fluence & err, ch4 fluence & err
words = bfile.readline().strip().split()
data.extend(words)
# 3: 64ms peak flux, err, time
words = bfile.readline().strip().split()
data.extend(words)
# 4: 256ms peak flux, err, time
words = bfile.readline().strip().split()
data.extend(words)
# 5: 1024ms peak flux, err, time
words = bfile.readline().strip().split()
data.extend(words)
return trig, data
def fetch_summaries():
"""
Fetch GRB summary information from the CGRO SSC; return it in a
GRBCollection instance.
"""
# Get access to the raw data files, either cached or fetched from the SSC.
cache = join(root, raw_cache)
basic = retrieve_gzip(basic_url, cache)
bright4 = retrieve_gzip(bright_url4, cache)
bright5 = retrieve_gzip(bright_url5, cache)
durn = retrieve_gzip(durn_url, cache)
comments = retrieve_gzip(comments_url4, cache)
# Read basic data, defining the GRB objects. Add the trigger data path.
GRBs = GRBCollection()
ncomp = 0 # count complete GRBs (not overwritten by subsequent GRB)
for line in basic:
if not line: # in case of empty lines at end
break
grb = GRB(line)
if grb.trigger in GRBs:
raise ValueError, 'Duplicate entries for trigger %i !' % grb.trigger
GRBs.add(grb)
if not grb.incomplete:
ncomp += 1
basic.close()
print 'Read data for', len(GRBs), 'triggers from basic table,', ncomp,\
'complete...'
print
# Add brightness (flux, fluence) data.
nf = 0
extra = [] # collect triggers in flux table but not basic table
while True:
trigger, data = get_grb_bright(bright4)
if trigger is None:
break
if trigger in GRBs:
GRBs[trigger].set_bright(trigger, data)
nf += 1
else:
extra.append(trigger)
bright4.close()
while True:
trigger, data = get_grb_bright(bright5)
if trigger is None:
break
if trigger in GRBs:
GRBs[trigger].set_bright(trigger, data)
nf += 1
else:
extra.append(trigger)
bright5.close()
print 'Read flux data for', nf, 'basic table triggers.'
print 'Extraneous flux data for:', extra
if extra:
print '***** Data for these GRBs was ignored!!! *****'
print
# Add duration data.
ndur = 0
extra = []
for line in durn:
if not line:
break
data = line.strip().split()
trigger = int(data[0])
if trigger in GRBs:
GRBs[trigger].set_durn(trigger, data[1:])
ndur += 1
else: #
extra.append(trigger)
durn.close()
print 'Read duration data for', ndur, 'basic table triggers.'
print 'Extraneous data for:', extra
if extra:
print '***** Data for these GRBs was ignored!!! *****'
print
# Add comments.
ncom = 0
extra = []
for line in comments:
if not line:
break
if line[0] == '#': # header
continue
trigger = int(line[:6].strip())
flag = line[11]
com = line[14:].strip()
if trigger in GRBs:
GRBs[trigger].comments.append((flag, com))
ncom += 1
else: #
extra.append(trigger)
durn.close()
print 'Read comment data for', ncom, 'basic table triggers.'
print 'Extraneous data for:', extra
if extra:
print '***** Data for these GRBs was ignored!!! *****'
return GRBs
def load_catalog(root_dir=root):
"""
Establish access to GRB data from the BATSE '5B' catalog, stored in the
`root_dir` directory. Return a GRBCollection providing burst-by-burst
access keyed by trigger number and via trigger and YYMMDD (date)
attributes.
If no catalog has yet been established, the directory is created and
summary data for all GRBs are fetched from the CGRO SSC and stored
locally for future use.
Detailed data for specific bursts is fetched, parsed, and cached
lazily as requested.
"""
# TODO: Probably a better way to handle this than with a global....
global root
# Make sure root directory exists.
root = abspath(root_dir) # assigns full path throughout package
rc_dir = join(root, raw_cache)
if not exists(root):
mkdir(root)
if not exists(rc_dir):
mkdir(rc_dir)
try:
GRBs = read_summaries()
except IOError:
GRBs = fetch_summaries()
sfile = file(join(root,summaries), 'wb')
cPickle.dump(GRBs, sfile, 2) # protocol 2 for efficiency
sfile.close()
return GRBs
|
{
"content_hash": "0af734f761c1c542f8406db11bc4ce47",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 95,
"avg_line_length": 32.30801687763713,
"alnum_prop": 0.628966958338775,
"repo_name": "tloredo/batse5bp",
"id": "b2af14876b67de4d929b217500ea90da34235824",
"size": "7657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "batse5bp/catalog.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "300"
},
{
"name": "Python",
"bytes": "143533"
}
],
"symlink_target": ""
}
|
"""Integration tests for the pr_curves plugin."""
import collections.abc
import functools
import os.path
import numpy as np
import tensorflow as tf
from tensorboard import context
from tensorboard.backend.event_processing import (
plugin_event_multiplexer as event_multiplexer,
)
from tensorboard.backend.event_processing import data_provider
from tensorboard.plugins import base_plugin
from tensorboard.plugins.pr_curve import pr_curve_demo
from tensorboard.plugins.pr_curve import pr_curves_plugin
# We use an absolute error instead of a relative one because the expected values
# are small. The default relative error (rtol) of 1e-7 yields many undesired
# test failures.
assert_allclose = functools.partial(
np.testing.assert_allclose, rtol=0, atol=1e-7
)
class PrCurvesPluginTest(tf.test.TestCase):
def setUp(self):
super(PrCurvesPluginTest, self).setUp()
logdir = os.path.join(self.get_temp_dir(), "logdir")
# Generate data.
pr_curve_demo.run_all(
logdir=logdir, steps=3, thresholds=5, verbose=False
)
# Create a multiplexer for reading the data we just wrote.
multiplexer = event_multiplexer.EventMultiplexer()
multiplexer.AddRunsFromDirectory(logdir)
multiplexer.Reload()
provider = data_provider.MultiplexerDataProvider(multiplexer, logdir)
context = base_plugin.TBContext(logdir=logdir, data_provider=provider)
self.plugin = pr_curves_plugin.PrCurvesPlugin(context)
def validatePrCurveEntry(
self,
expected_step,
expected_precision,
expected_recall,
expected_true_positives,
expected_false_positives,
expected_true_negatives,
expected_false_negatives,
expected_thresholds,
pr_curve_entry,
):
"""Checks that the values stored within a tensor are correct.
Args:
expected_step: The expected step.
expected_precision: A list of float values.
expected_recall: A list of float values.
expected_true_positives: A list of int values.
expected_false_positives: A list of int values.
expected_true_negatives: A list of int values.
expected_false_negatives: A list of int values.
expected_thresholds: A list of floats ranging from 0 to 1.
pr_curve_entry: The PR curve entry to evaluate.
"""
self.assertEqual(expected_step, pr_curve_entry["step"])
assert_allclose(expected_precision, pr_curve_entry["precision"])
assert_allclose(expected_recall, pr_curve_entry["recall"])
self.assertListEqual(
expected_true_positives, pr_curve_entry["true_positives"]
)
self.assertListEqual(
expected_false_positives, pr_curve_entry["false_positives"]
)
self.assertListEqual(
expected_true_negatives, pr_curve_entry["true_negatives"]
)
self.assertListEqual(
expected_false_negatives, pr_curve_entry["false_negatives"]
)
assert_allclose(expected_thresholds, pr_curve_entry["thresholds"])
def computeCorrectDescription(self, standard_deviation):
"""Generates a correct description.
Arguments:
standard_deviation: An integer standard deviation value.
Returns:
The correct description given a standard deviation value.
"""
description = (
"<p>The probabilities used to create this PR curve are "
"generated from a normal distribution. Its standard "
"deviation is initially %d and decreases"
" over time.</p>"
) % standard_deviation
return description
def testRoutesProvided(self):
"""Tests that the plugin offers the correct routes."""
routes = self.plugin.get_plugin_apps()
self.assertIsInstance(routes["/tags"], collections.abc.Callable)
self.assertIsInstance(routes["/pr_curves"], collections.abc.Callable)
def testTagsProvided(self):
"""Tests that tags are provided."""
tags_response = self.plugin.tags_impl(context.RequestContext(), "123")
# Assert that the runs are right.
self.assertItemsEqual(
["colors", "mask_every_other_prediction"],
list(tags_response.keys()),
)
# Assert that the tags for each run are correct.
self.assertItemsEqual(
["red/pr_curves", "green/pr_curves", "blue/pr_curves"],
list(tags_response["colors"].keys()),
)
self.assertItemsEqual(
["red/pr_curves", "green/pr_curves", "blue/pr_curves"],
list(tags_response["mask_every_other_prediction"].keys()),
)
# Verify the data for each run-tag combination.
self.assertDictEqual(
{
"displayName": "classifying red",
"description": self.computeCorrectDescription(168),
},
tags_response["colors"]["red/pr_curves"],
)
self.assertDictEqual(
{
"displayName": "classifying green",
"description": self.computeCorrectDescription(210),
},
tags_response["colors"]["green/pr_curves"],
)
self.assertDictEqual(
{
"displayName": "classifying blue",
"description": self.computeCorrectDescription(252),
},
tags_response["colors"]["blue/pr_curves"],
)
self.assertDictEqual(
{
"displayName": "classifying red",
"description": self.computeCorrectDescription(168),
},
tags_response["mask_every_other_prediction"]["red/pr_curves"],
)
self.assertDictEqual(
{
"displayName": "classifying green",
"description": self.computeCorrectDescription(210),
},
tags_response["mask_every_other_prediction"]["green/pr_curves"],
)
self.assertDictEqual(
{
"displayName": "classifying blue",
"description": self.computeCorrectDescription(252),
},
tags_response["mask_every_other_prediction"]["blue/pr_curves"],
)
def testPrCurvesDataCorrect(self):
"""Tests that responses for PR curves for run-tag combos are
correct."""
pr_curves_response = self.plugin.pr_curves_impl(
context.RequestContext(),
"123",
["colors", "mask_every_other_prediction"],
"blue/pr_curves",
)
# Assert that the runs are correct.
self.assertItemsEqual(
["colors", "mask_every_other_prediction"],
list(pr_curves_response.keys()),
)
# Assert that PR curve data is correct for the colors run.
entries = pr_curves_response["colors"]
self.assertEqual(3, len(entries))
self.validatePrCurveEntry(
expected_step=0,
expected_precision=[0.3333333, 0.3853211, 0.5421687, 0.75],
expected_recall=[1.0, 0.84, 0.3, 0.04],
expected_true_positives=[150, 126, 45, 6],
expected_false_positives=[300, 201, 38, 2],
expected_true_negatives=[0, 99, 262, 298],
expected_false_negatives=[0, 24, 105, 144],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[0],
)
self.validatePrCurveEntry(
expected_step=1,
expected_precision=[0.3333333, 0.3855422, 0.5357143, 0.4],
expected_recall=[1.0, 0.8533334, 0.3, 0.0266667],
expected_true_positives=[150, 128, 45, 4],
expected_false_positives=[300, 204, 39, 6],
expected_true_negatives=[0, 96, 261, 294],
expected_false_negatives=[0, 22, 105, 146],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[1],
)
self.validatePrCurveEntry(
expected_step=2,
expected_precision=[0.3333333, 0.3934426, 0.5064935, 0.6666667],
expected_recall=[1.0, 0.8, 0.26, 0.0266667],
expected_true_positives=[150, 120, 39, 4],
expected_false_positives=[300, 185, 38, 2],
expected_true_negatives=[0, 115, 262, 298],
expected_false_negatives=[0, 30, 111, 146],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[2],
)
# Assert that PR curve data is correct for the mask_every_other_prediction
# run.
entries = pr_curves_response["mask_every_other_prediction"]
self.assertEqual(3, len(entries))
self.validatePrCurveEntry(
expected_step=0,
expected_precision=[0.3333333, 0.3786982, 0.5384616, 1.0],
expected_recall=[1.0, 0.8533334, 0.28, 0.0666667],
expected_true_positives=[75, 64, 21, 5],
expected_false_positives=[150, 105, 18, 0],
expected_true_negatives=[0, 45, 132, 150],
expected_false_negatives=[0, 11, 54, 70],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[0],
)
self.validatePrCurveEntry(
expected_step=1,
expected_precision=[0.3333333, 0.3850932, 0.5, 0.25],
expected_recall=[1.0, 0.8266667, 0.28, 0.0133333],
expected_true_positives=[75, 62, 21, 1],
expected_false_positives=[150, 99, 21, 3],
expected_true_negatives=[0, 51, 129, 147],
expected_false_negatives=[0, 13, 54, 74],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[1],
)
self.validatePrCurveEntry(
expected_step=2,
expected_precision=[0.3333333, 0.3986928, 0.4444444, 0.6666667],
expected_recall=[1.0, 0.8133333, 0.2133333, 0.0266667],
expected_true_positives=[75, 61, 16, 2],
expected_false_positives=[150, 92, 20, 1],
expected_true_negatives=[0, 58, 130, 149],
expected_false_negatives=[0, 14, 59, 73],
expected_thresholds=[0.0, 0.25, 0.5, 0.75],
pr_curve_entry=entries[2],
)
def testPrCurvesRaisesValueErrorWhenNoData(self):
"""Tests that the method for obtaining PR curve data raises a
ValueError.
The handler should raise a ValueError when no PR curve data can
be found for a certain run-tag combination.
"""
with self.assertRaisesRegex(ValueError, r"No PR curves could be found"):
self.plugin.pr_curves_impl(
context.RequestContext(), "123", ["colors"], "non_existent_tag"
)
with self.assertRaisesRegex(ValueError, r"No PR curves could be found"):
self.plugin.pr_curves_impl(
context.RequestContext(),
"123",
["non_existent_run"],
"blue/pr_curves",
)
def testPluginIsNotActive(self):
"""Tests that the plugin is inactive when no relevant data exists."""
empty_logdir = os.path.join(self.get_temp_dir(), "empty_logdir")
multiplexer = event_multiplexer.EventMultiplexer()
multiplexer.AddRunsFromDirectory(empty_logdir)
multiplexer.Reload()
context = base_plugin.TBContext(
logdir=empty_logdir, multiplexer=multiplexer
)
plugin = pr_curves_plugin.PrCurvesPlugin(context)
self.assertFalse(plugin.is_active())
if __name__ == "__main__":
tf.test.main()
|
{
"content_hash": "485afb458f843698afd894001b07769e",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 82,
"avg_line_length": 38.96345514950166,
"alnum_prop": 0.593281036834925,
"repo_name": "tensorflow/tensorboard",
"id": "bb5eb41ce15a1235006b409fb366e6aaff20b54f",
"size": "12417",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorboard/plugins/pr_curve/pr_curves_plugin_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16222"
},
{
"name": "Dockerfile",
"bytes": "1226"
},
{
"name": "HTML",
"bytes": "154824"
},
{
"name": "Java",
"bytes": "20643"
},
{
"name": "JavaScript",
"bytes": "11869"
},
{
"name": "Jupyter Notebook",
"bytes": "7697"
},
{
"name": "Python",
"bytes": "2922179"
},
{
"name": "Rust",
"bytes": "311041"
},
{
"name": "SCSS",
"bytes": "136834"
},
{
"name": "Shell",
"bytes": "36731"
},
{
"name": "Starlark",
"bytes": "541743"
},
{
"name": "TypeScript",
"bytes": "5930550"
}
],
"symlink_target": ""
}
|
from django.db import models
from server.models import *
class Catalog(models.Model):
machine_group = models.ForeignKey(MachineGroup, on_delete=models.CASCADE)
content = models.TextField()
name = models.CharField(max_length=253)
sha256hash = models.CharField(max_length=64)
class Meta:
ordering = ['name', 'machine_group']
|
{
"content_hash": "089a9aa0aed83db2b8986ef790df30d1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 77,
"avg_line_length": 29.5,
"alnum_prop": 0.711864406779661,
"repo_name": "salopensource/sal",
"id": "3495fef7cebbfc1dab2c25d60cb2b984ed008638",
"size": "354",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "catalog/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "119817"
},
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "152173"
},
{
"name": "JavaScript",
"bytes": "279963"
},
{
"name": "Less",
"bytes": "67048"
},
{
"name": "Makefile",
"bytes": "2208"
},
{
"name": "Procfile",
"bytes": "23"
},
{
"name": "Python",
"bytes": "613680"
},
{
"name": "SCSS",
"bytes": "51035"
},
{
"name": "Shell",
"bytes": "4535"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("common", "0026_auto_20210325_1923"),
]
operations = [
migrations.AlterField(
model_name="address",
name="address_line",
field=models.CharField(
blank=True, default="", max_length=255, verbose_name="Address"
),
),
migrations.AlterField(
model_name="address",
name="city",
field=models.CharField(
blank=True, default="", max_length=255, verbose_name="City"
),
),
migrations.AlterField(
model_name="address",
name="postcode",
field=models.CharField(
blank=True, default="", max_length=64, verbose_name="Post/Zip-code"
),
),
migrations.AlterField(
model_name="address",
name="state",
field=models.CharField(
blank=True, default="", max_length=255, verbose_name="State"
),
),
migrations.AlterField(
model_name="address",
name="street",
field=models.CharField(
blank=True, default="", max_length=55, verbose_name="Street"
),
),
migrations.AlterField(
model_name="apisettings",
name="created_by",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="settings_created_by",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="apisettings",
name="title",
field=models.TextField(),
),
migrations.AlterField(
model_name="attachments",
name="created_by",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="attachment_created_by",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="company",
name="address",
field=models.TextField(blank=True, default=""),
),
migrations.AlterField(
model_name="company",
name="name",
field=models.CharField(blank=True, default="", max_length=100),
),
migrations.AlterField(
model_name="document",
name="created_by",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="document_uploaded",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="document",
name="title",
field=models.TextField(blank=True, default=""),
),
migrations.AlterField(
model_name="google",
name="google_url",
field=models.TextField(default=""),
),
]
|
{
"content_hash": "25f77269205b174ec0cdad2e0bd9a66d",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 83,
"avg_line_length": 31.57547169811321,
"alnum_prop": 0.4980579623543472,
"repo_name": "MicroPyramid/Django-CRM",
"id": "8e3c08bae7c3e498151eb56362c67de14ee8c1cb",
"size": "3394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/migrations/0027_auto_20210418_1112.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "819"
},
{
"name": "HTML",
"bytes": "299393"
},
{
"name": "Python",
"bytes": "888791"
},
{
"name": "Shell",
"bytes": "1035"
}
],
"symlink_target": ""
}
|
"""Replacement for ``django.template.loader`` that uses Jinja 2.
The module provides a generic way to load templates from an arbitrary
backend storage (e.g. filesystem, database).
"""
from coffin.template import Template as CoffinTemplate
from jinja2 import TemplateNotFound
def find_template_source(name, dirs=None):
# This is Django's most basic loading function through which
# all template retrievals go. Not sure if Jinja 2 publishes
# an equivalent, but no matter, it mostly for internal use
# anyway - developers will want to start with
# ``get_template()`` or ``get_template_from_string`` anyway.
raise NotImplementedError()
def get_template(template_name):
# Jinja will handle this for us, and env also initializes
# the loader backends the first time it is called.
from coffin.common import env
return env.get_template(template_name)
def get_template_from_string(source):
"""
Does not support then ``name`` and ``origin`` parameters from
the Django version.
"""
from coffin.common import env
return env.from_string(source)
def render_to_string(template_name, dictionary=None, context_instance=None):
"""Loads the given ``template_name`` and renders it with the given
dictionary as context. The ``template_name`` may be a string to load
a single template using ``get_template``, or it may be a tuple to use
``select_template`` to find one of the templates in the list.
``dictionary`` may also be Django ``Context`` object.
Returns a string.
"""
dictionary = dictionary or {}
if isinstance(template_name, (list, tuple)):
template = select_template(template_name)
else:
template = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = dictionary
return template.render(context_instance)
def select_template(template_name_list):
"Given a list of template names, returns the first that can be loaded."
for template_name in template_name_list:
try:
return get_template(template_name)
except TemplateNotFound:
continue
# If we get here, none of the templates could be loaded
raise TemplateNotFound(', '.join(template_name_list))
|
{
"content_hash": "db6a2d587d762dba695c59746cfbc74e",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 76,
"avg_line_length": 34.93939393939394,
"alnum_prop": 0.7003469210754554,
"repo_name": "havard024/prego",
"id": "9fa80496f45d85aa4abcea79be5418f2c74fafc4",
"size": "2306",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/coffin/template/loader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2978"
},
{
"name": "CSS",
"bytes": "620190"
},
{
"name": "JavaScript",
"bytes": "2456120"
},
{
"name": "PHP",
"bytes": "25856"
},
{
"name": "Python",
"bytes": "34948766"
},
{
"name": "Shell",
"bytes": "12359"
},
{
"name": "TeX",
"bytes": "113674"
}
],
"symlink_target": ""
}
|
"""
Copyright Philip Castiglione 2018
Licensed under the MIT Licence. Refer to LICENSE.txt.
"""
from game import Game
if __name__ == '__main__':
Game.start()
|
{
"content_hash": "0e937f1dc8d3a71a46aec0e1d15fe6d5",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 53,
"avg_line_length": 16.5,
"alnum_prop": 0.6666666666666666,
"repo_name": "PhilipCastiglione/learning-machines",
"id": "34c107a5e7cc4f62c38a0be814073093bcf0bda0",
"size": "185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "miscellany/riddle-competitions/golad/python3/lib/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1099"
},
{
"name": "C++",
"bytes": "32358"
},
{
"name": "Matlab",
"bytes": "189456"
},
{
"name": "Python",
"bytes": "100140"
},
{
"name": "Shell",
"bytes": "40"
}
],
"symlink_target": ""
}
|
import webob.exc
from nova.api.openstack import extensions
from nova import db
from nova import exception
from nova.openstack.common.gettextutils import _
authorize = extensions.extension_authorizer('compute', 'fixed_ips')
class FixedIPController(object):
def show(self, req, id):
"""Return data about the given fixed ip."""
context = req.environ['nova.context']
authorize(context)
try:
fixed_ip = db.fixed_ip_get_by_address_detailed(context, id)
except (exception.FixedIpNotFoundForAddress,
exception.FixedIpInvalid) as ex:
raise webob.exc.HTTPNotFound(explanation=ex.format_message())
fixed_ip_info = {"fixed_ip": {}}
if fixed_ip[1] is None:
msg = _("Fixed IP %s has been deleted") % id
raise webob.exc.HTTPNotFound(explanation=msg)
fixed_ip_info['fixed_ip']['cidr'] = fixed_ip[1]['cidr']
fixed_ip_info['fixed_ip']['address'] = fixed_ip[0]['address']
if fixed_ip[2]:
fixed_ip_info['fixed_ip']['hostname'] = fixed_ip[2]['hostname']
fixed_ip_info['fixed_ip']['host'] = fixed_ip[2]['host']
else:
fixed_ip_info['fixed_ip']['hostname'] = None
fixed_ip_info['fixed_ip']['host'] = None
return fixed_ip_info
def action(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
if 'reserve' in body:
return self._set_reserved(context, id, True)
elif 'unreserve' in body:
return self._set_reserved(context, id, False)
else:
raise webob.exc.HTTPBadRequest(
explanation="No valid action specified")
def _set_reserved(self, context, address, reserved):
try:
fixed_ip = db.fixed_ip_get_by_address(context, address)
db.fixed_ip_update(context, fixed_ip['address'],
{'reserved': reserved})
except (exception.FixedIpNotFoundForAddress, exception.FixedIpInvalid):
msg = _("Fixed IP %s not found") % address
raise webob.exc.HTTPNotFound(explanation=msg)
return webob.exc.HTTPAccepted()
class Fixed_ips(extensions.ExtensionDescriptor):
"""Fixed IPs support."""
name = "FixedIPs"
alias = "os-fixed-ips"
namespace = "http://docs.openstack.org/compute/ext/fixed_ips/api/v2"
updated = "2012-10-18T19:25:27Z"
def get_resources(self):
member_actions = {'action': 'POST'}
resources = []
resource = extensions.ResourceExtension('os-fixed-ips',
FixedIPController(),
member_actions=member_actions)
resources.append(resource)
return resources
|
{
"content_hash": "de3595db646b6d9a46272fad175cd445",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 35.78481012658228,
"alnum_prop": 0.5886098337460205,
"repo_name": "tanglei528/nova",
"id": "23647f51b3adee8beb1fc9253ea15e7163e76f15",
"size": "3429",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/contrib/fixed_ips.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13998720"
},
{
"name": "Shell",
"bytes": "17451"
}
],
"symlink_target": ""
}
|
default_app_config = 'course.apps.CourseConfig'
|
{
"content_hash": "a81d294a89014e50783f0768a1f72fde",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 47,
"avg_line_length": 48,
"alnum_prop": 0.7916666666666666,
"repo_name": "OpenCourseProject/OpenCourse",
"id": "10308e50109453da49fbc2072499f878365a0e17",
"size": "48",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "course/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "68920"
},
{
"name": "HTML",
"bytes": "458887"
},
{
"name": "JavaScript",
"bytes": "137559"
},
{
"name": "Python",
"bytes": "198465"
}
],
"symlink_target": ""
}
|
from modules.setup.app import app
from modules.views.login import facebook, google, github
from flask import redirect, url_for, flash, session as login_session
@app.route('/logout/')
def logout():
if 'provider' in login_session:
if login_session['provider'] == 'facebook':
facebook.fbdisconnect()
del login_session['facebook_id']
if login_session['provider'] == 'google':
google.gdisconnect()
del login_session['gplus_id']
del login_session['credentials']
if login_session['provider'] == 'github':
github.ghdisconnect()
del login_session['provider']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
flash('You have successfully been logged out')
else:
flash('!E!You weren\'t logged in to begin with')
return redirect(url_for('front'))
|
{
"content_hash": "b53c3b593912156656a6021d829bcd60",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 68,
"avg_line_length": 37.19230769230769,
"alnum_prop": 0.6204756980351603,
"repo_name": "stonescar/item-catalog",
"id": "fffcba4ab36d87a0a67e5425676ba715bc3636a0",
"size": "967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/views/login/logout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "24756"
},
{
"name": "JavaScript",
"bytes": "1293"
},
{
"name": "Python",
"bytes": "29252"
},
{
"name": "Shell",
"bytes": "358"
}
],
"symlink_target": ""
}
|
import os
from settings import *
INSTALLED_APPS += [
'simple_tests',
]
HAYSTACK_SEARCH_ENGINE = 'simple'
|
{
"content_hash": "a00d789ef2f85dcd94851ad6bbdac080",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 33,
"avg_line_length": 13.875,
"alnum_prop": 0.6846846846846847,
"repo_name": "calvinchengx/django-haystack",
"id": "b21dcac55b05ca87bfb3ac691b656439b120ec5c",
"size": "111",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/simple_settings.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "480175"
},
{
"name": "Shell",
"bytes": "539"
}
],
"symlink_target": ""
}
|
import errno
import imp
import marshal
import os
import py_compile
import random
import stat
import struct
import sys
import unittest
import textwrap
import shutil
from test.test_support import (unlink, TESTFN, unload, run_unittest, rmtree,
is_jython, check_warnings, EnvironmentVarGuard,
impl_detail, check_impl_detail)
from test import symlink_support
from test import script_helper
def _files(name):
return (name + os.extsep + "py",
name + os.extsep + "pyc",
name + os.extsep + "pyo",
name + os.extsep + "pyw",
name + "$py.class")
def chmod_files(name):
for f in _files(name):
try:
os.chmod(f, 0600)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def remove_files(name):
for f in _files(name):
unlink(f)
class ImportTests(unittest.TestCase):
def tearDown(self):
unload(TESTFN)
setUp = tearDown
def test_case_sensitivity(self):
# Brief digression to test that import is case-sensitive: if we got
# this far, we know for sure that "random" exists.
try:
import RAnDoM
except ImportError:
pass
else:
self.fail("import of RAnDoM should have failed (case mismatch)")
def test_double_const(self):
# Another brief digression to test the accuracy of manifest float
# constants.
from test import double_const # don't blink -- that *was* the test
def test_import(self):
def test_with_extension(ext):
# The extension is normally ".py", perhaps ".pyw".
source = TESTFN + ext
pyo = TESTFN + os.extsep + "pyo"
if is_jython:
pyc = TESTFN + "$py.class"
else:
pyc = TESTFN + os.extsep + "pyc"
with open(source, "w") as f:
print >> f, ("# This tests Python's ability to import a", ext,
"file.")
a = random.randrange(1000)
b = random.randrange(1000)
print >> f, "a =", a
print >> f, "b =", b
try:
mod = __import__(TESTFN)
except ImportError, err:
self.fail("import from %s failed: %s" % (ext, err))
else:
self.assertEqual(mod.a, a,
"module loaded (%s) but contents invalid" % mod)
self.assertEqual(mod.b, b,
"module loaded (%s) but contents invalid" % mod)
finally:
if check_impl_detail(pypy=False):
unlink(source)
try:
if not sys.dont_write_bytecode:
imp.reload(mod)
except ImportError, err:
self.fail("import from .pyc/.pyo failed: %s" % err)
finally:
unlink(pyc)
unlink(pyo)
unload(TESTFN)
sys.path.insert(0, os.curdir)
try:
test_with_extension(os.extsep + "py")
if sys.platform.startswith("win"):
for ext in [".PY", ".Py", ".pY", ".pyw", ".PYW", ".pYw"]:
test_with_extension(ext)
finally:
del sys.path[0]
@unittest.skipUnless(os.name == 'posix',
"test meaningful only on posix systems")
@unittest.skipIf(sys.dont_write_bytecode,
"test meaningful only when writing bytecode")
def test_execute_bit_not_copied(self):
# Issue 6070: under posix .pyc files got their execute bit set if
# the .py file had the execute bit set, but they aren't executable.
oldmask = os.umask(022)
sys.path.insert(0, os.curdir)
try:
fname = TESTFN + os.extsep + "py"
f = open(fname, 'w').close()
os.chmod(fname, (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))
__import__(TESTFN)
fn = fname + 'c'
if not os.path.exists(fn):
fn = fname + 'o'
if not os.path.exists(fn):
self.fail("__import__ did not result in creation of "
"either a .pyc or .pyo file")
s = os.stat(fn)
self.assertEqual(stat.S_IMODE(s.st_mode),
stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
finally:
os.umask(oldmask)
remove_files(TESTFN)
unload(TESTFN)
del sys.path[0]
@unittest.skipIf(sys.dont_write_bytecode,
"test meaningful only when writing bytecode")
def test_rewrite_pyc_with_read_only_source(self):
# Issue 6074: a long time ago on posix, and more recently on Windows,
# a read only source file resulted in a read only pyc file, which
# led to problems with updating it later
sys.path.insert(0, os.curdir)
fname = TESTFN + os.extsep + "py"
try:
# Write a Python file, make it read-only and import it
with open(fname, 'w') as f:
f.write("x = 'original'\n")
# Tweak the mtime of the source to ensure pyc gets updated later
s = os.stat(fname)
os.utime(fname, (s.st_atime, s.st_mtime-100000000))
os.chmod(fname, 0400)
m1 = __import__(TESTFN)
self.assertEqual(m1.x, 'original')
# Change the file and then reimport it
os.chmod(fname, 0600)
with open(fname, 'w') as f:
f.write("x = 'rewritten'\n")
unload(TESTFN)
m2 = __import__(TESTFN)
self.assertEqual(m2.x, 'rewritten')
# Now delete the source file and check the pyc was rewritten
if check_impl_detail(pypy=False):
unlink(fname)
unload(TESTFN)
m3 = __import__(TESTFN)
self.assertEqual(m3.x, 'rewritten')
finally:
chmod_files(TESTFN)
remove_files(TESTFN)
unload(TESTFN)
del sys.path[0]
def test_imp_module(self):
# Verify that the imp module can correctly load and find .py files
# XXX (ncoghlan): It would be nice to use test_support.CleanImport
# here, but that breaks because the os module registers some
# handlers in copy_reg on import. Since CleanImport doesn't
# revert that registration, the module is left in a broken
# state after reversion. Reinitialising the module contents
# and just reverting os.environ to its previous state is an OK
# workaround
orig_path = os.path
orig_getenv = os.getenv
with EnvironmentVarGuard():
x = imp.find_module("os")
new_os = imp.load_module("os", *x)
self.assertIs(os, new_os)
self.assertIs(orig_path, new_os.path)
self.assertIsNot(orig_getenv, new_os.getenv)
def test_module_with_large_stack(self, module='longlist'):
# Regression test for http://bugs.python.org/issue561858.
filename = module + os.extsep + 'py'
# Create a file with a list of 65000 elements.
with open(filename, 'w+') as f:
f.write('d = [\n')
for i in range(65000):
f.write('"",\n')
f.write(']')
# Compile & remove .py file, we only need .pyc (or .pyo).
with open(filename, 'r') as f:
py_compile.compile(filename)
if check_impl_detail(pypy=False):
# pypy refuses to import a .pyc if the .py does not exist
unlink(filename)
# Need to be able to load from current dir.
sys.path.append('')
# This used to crash.
exec 'import ' + module
reload(longlist)
# Cleanup.
del sys.path[-1]
unlink(filename + 'c')
unlink(filename + 'o')
def test_failing_import_sticks(self):
source = TESTFN + os.extsep + "py"
with open(source, "w") as f:
print >> f, "a = 1 // 0"
# New in 2.4, we shouldn't be able to import that no matter how often
# we try.
sys.path.insert(0, os.curdir)
try:
for i in [1, 2, 3]:
self.assertRaises(ZeroDivisionError, __import__, TESTFN)
self.assertNotIn(TESTFN, sys.modules,
"damaged module in sys.modules on %i try" % i)
finally:
del sys.path[0]
remove_files(TESTFN)
def test_failing_reload(self):
# A failing reload should leave the module object in sys.modules.
source = TESTFN + os.extsep + "py"
with open(source, "w") as f:
print >> f, "a = 1"
print >> f, "b = 2"
sys.path.insert(0, os.curdir)
try:
mod = __import__(TESTFN)
self.assertIn(TESTFN, sys.modules)
self.assertEqual(mod.a, 1, "module has wrong attribute values")
self.assertEqual(mod.b, 2, "module has wrong attribute values")
# On WinXP, just replacing the .py file wasn't enough to
# convince reload() to reparse it. Maybe the timestamp didn't
# move enough. We force it to get reparsed by removing the
# compiled file too.
remove_files(TESTFN)
# Now damage the module.
with open(source, "w") as f:
print >> f, "a = 10"
print >> f, "b = 20//0"
self.assertRaises(ZeroDivisionError, imp.reload, mod)
# But we still expect the module to be in sys.modules.
mod = sys.modules.get(TESTFN)
self.assertIsNot(mod, None, "expected module to be in sys.modules")
# We should have replaced a w/ 10, but the old b value should
# stick.
self.assertEqual(mod.a, 10, "module has wrong attribute values")
self.assertEqual(mod.b, 2, "module has wrong attribute values")
finally:
del sys.path[0]
remove_files(TESTFN)
unload(TESTFN)
def test_infinite_reload(self):
# http://bugs.python.org/issue742342 reports that Python segfaults
# (infinite recursion in C) when faced with self-recursive reload()ing.
sys.path.insert(0, os.path.dirname(__file__))
try:
import infinite_reload
finally:
del sys.path[0]
def test_import_name_binding(self):
# import x.y.z binds x in the current namespace.
import test as x
import test.test_support
self.assertIs(x, test, x.__name__)
self.assertTrue(hasattr(test.test_support, "__file__"))
# import x.y.z as w binds z as w.
import test.test_support as y
self.assertIs(y, test.test_support, y.__name__)
def test_import_initless_directory_warning(self):
with check_warnings(('', ImportWarning)):
# Just a random non-package directory we always expect to be
# somewhere in sys.path...
self.assertRaises(ImportError, __import__, "site-packages")
def test_import_by_filename(self):
path = os.path.abspath(TESTFN)
with self.assertRaises(ImportError) as c:
__import__(path)
self.assertEqual("Import by filename is not supported.",
c.exception.args[0])
def test_import_in_del_does_not_crash(self):
# Issue 4236
testfn = script_helper.make_script('', TESTFN, textwrap.dedent("""\
import sys
class C:
def __del__(self):
import imp
sys.argv.insert(0, C())
"""))
try:
script_helper.assert_python_ok(testfn)
finally:
unlink(testfn)
def test_bug7732(self):
source = TESTFN + '.py'
os.mkdir(source)
try:
self.assertRaises((ImportError, IOError),
imp.find_module, TESTFN, ["."])
finally:
os.rmdir(source)
def test_timestamp_overflow(self):
# A modification timestamp larger than 2**32 should not be a problem
# when importing a module (issue #11235).
sys.path.insert(0, os.curdir)
try:
source = TESTFN + ".py"
compiled = source + ('c' if __debug__ else 'o')
with open(source, 'w') as f:
pass
try:
os.utime(source, (2 ** 33 - 5, 2 ** 33 - 5))
except OverflowError:
self.skipTest("cannot set modification time to large integer")
except OSError as e:
if e.errno != getattr(errno, 'EOVERFLOW', None):
raise
self.skipTest("cannot set modification time to large integer ({})".format(e))
__import__(TESTFN)
# The pyc file was created.
os.stat(compiled)
finally:
del sys.path[0]
remove_files(TESTFN)
def test_pyc_mtime(self):
# Test for issue #13863: .pyc timestamp sometimes incorrect on Windows.
sys.path.insert(0, os.curdir)
try:
# Jan 1, 2012; Jul 1, 2012.
mtimes = 1325376000, 1341100800
# Different names to avoid running into import caching.
tails = "spam", "eggs"
for mtime, tail in zip(mtimes, tails):
module = TESTFN + tail
source = module + ".py"
compiled = source + ('c' if __debug__ else 'o')
# Create a new Python file with the given mtime.
with open(source, 'w') as f:
f.write("# Just testing\nx=1, 2, 3\n")
os.utime(source, (mtime, mtime))
# Generate the .pyc/o file; if it couldn't be created
# for some reason, skip the test.
m = __import__(module)
if not os.path.exists(compiled):
unlink(source)
self.skipTest("Couldn't create .pyc/.pyo file.")
# Actual modification time of .py file.
mtime1 = int(os.stat(source).st_mtime) & 0xffffffff
# mtime that was encoded in the .pyc file.
with open(compiled, 'rb') as f:
mtime2 = struct.unpack('<L', f.read(8)[4:])[0]
unlink(compiled)
unlink(source)
self.assertEqual(mtime1, mtime2)
finally:
sys.path.pop(0)
class PycRewritingTests(unittest.TestCase):
# Test that the `co_filename` attribute on code objects always points
# to the right file, even when various things happen (e.g. both the .py
# and the .pyc file are renamed).
module_name = "unlikely_module_name"
module_source = """
import sys
code_filename = sys._getframe().f_code.co_filename
module_filename = __file__
constant = 1
def func():
pass
func_filename = func.func_code.co_filename
"""
dir_name = os.path.abspath(TESTFN)
file_name = os.path.join(dir_name, module_name) + os.extsep + "py"
compiled_name = file_name + ("c" if __debug__ else "o")
def setUp(self):
self.sys_path = sys.path[:]
self.orig_module = sys.modules.pop(self.module_name, None)
os.mkdir(self.dir_name)
with open(self.file_name, "w") as f:
f.write(self.module_source)
sys.path.insert(0, self.dir_name)
def tearDown(self):
sys.path[:] = self.sys_path
if self.orig_module is not None:
sys.modules[self.module_name] = self.orig_module
else:
unload(self.module_name)
unlink(self.file_name)
unlink(self.compiled_name)
rmtree(self.dir_name)
def import_module(self):
ns = globals()
__import__(self.module_name, ns, ns)
return sys.modules[self.module_name]
def test_basics(self):
mod = self.import_module()
self.assertEqual(mod.module_filename, self.file_name)
self.assertEqual(mod.code_filename, self.file_name)
self.assertEqual(mod.func_filename, self.file_name)
del sys.modules[self.module_name]
mod = self.import_module()
if not sys.dont_write_bytecode:
self.assertEqual(mod.module_filename, self.compiled_name)
self.assertEqual(mod.code_filename, self.file_name)
self.assertEqual(mod.func_filename, self.file_name)
def test_incorrect_code_name(self):
py_compile.compile(self.file_name, dfile="another_module.py")
mod = self.import_module()
self.assertEqual(mod.module_filename, self.compiled_name)
self.assertEqual(mod.code_filename, self.file_name)
self.assertEqual(mod.func_filename, self.file_name)
@impl_detail("pypy refuses to import without a .py source", pypy=False)
def test_module_without_source(self):
target = "another_module.py"
py_compile.compile(self.file_name, dfile=target)
os.remove(self.file_name)
mod = self.import_module()
self.assertEqual(mod.module_filename, self.compiled_name)
self.assertEqual(mod.code_filename, target)
self.assertEqual(mod.func_filename, target)
def test_foreign_code(self):
py_compile.compile(self.file_name)
with open(self.compiled_name, "rb") as f:
header = f.read(8)
code = marshal.load(f)
constants = list(code.co_consts)
foreign_code = test_main.func_code
pos = constants.index(1)
constants[pos] = foreign_code
code = type(code)(code.co_argcount, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, tuple(constants),
code.co_names, code.co_varnames, code.co_filename,
code.co_name, code.co_firstlineno, code.co_lnotab,
code.co_freevars, code.co_cellvars)
with open(self.compiled_name, "wb") as f:
f.write(header)
marshal.dump(code, f)
mod = self.import_module()
self.assertEqual(mod.constant.co_filename, foreign_code.co_filename)
class PathsTests(unittest.TestCase):
path = TESTFN
def setUp(self):
os.mkdir(self.path)
self.syspath = sys.path[:]
def tearDown(self):
rmtree(self.path)
sys.path[:] = self.syspath
# Regression test for http://bugs.python.org/issue1293.
def test_trailing_slash(self):
with open(os.path.join(self.path, 'test_trailing_slash.py'), 'w') as f:
f.write("testdata = 'test_trailing_slash'")
sys.path.append(self.path+'/')
mod = __import__("test_trailing_slash")
self.assertEqual(mod.testdata, 'test_trailing_slash')
unload("test_trailing_slash")
# Regression test for http://bugs.python.org/issue3677.
def _test_UNC_path(self):
with open(os.path.join(self.path, 'test_trailing_slash.py'), 'w') as f:
f.write("testdata = 'test_trailing_slash'")
# Create the UNC path, like \\myhost\c$\foo\bar.
path = os.path.abspath(self.path)
import socket
hn = socket.gethostname()
drive = path[0]
unc = "\\\\%s\\%s$"%(hn, drive)
unc += path[2:]
try:
os.listdir(unc)
except OSError as e:
if e.errno in (errno.EPERM, errno.EACCES):
# See issue #15338
self.skipTest("cannot access administrative share %r" % (unc,))
raise
sys.path.append(path)
mod = __import__("test_trailing_slash")
self.assertEqual(mod.testdata, 'test_trailing_slash')
unload("test_trailing_slash")
if sys.platform == "win32":
test_UNC_path = _test_UNC_path
class RelativeImportTests(unittest.TestCase):
def tearDown(self):
unload("test.relimport")
setUp = tearDown
def test_relimport_star(self):
# This will import * from .test_import.
from . import relimport
self.assertTrue(hasattr(relimport, "RelativeImportTests"))
def test_issue3221(self):
# Regression test for http://bugs.python.org/issue3221.
def check_absolute():
exec "from os import path" in ns
def check_relative():
exec "from . import relimport" in ns
# Check both OK with __package__ and __name__ correct
ns = dict(__package__='test', __name__='test.notarealmodule')
check_absolute()
check_relative()
# Check both OK with only __name__ wrong
ns = dict(__package__='test', __name__='notarealpkg.notarealmodule')
check_absolute()
check_relative()
# Check relative fails with only __package__ wrong
ns = dict(__package__='foo', __name__='test.notarealmodule')
with check_warnings(('.+foo', RuntimeWarning)):
check_absolute()
self.assertRaises(SystemError, check_relative)
# Check relative fails with __package__ and __name__ wrong
ns = dict(__package__='foo', __name__='notarealpkg.notarealmodule')
with check_warnings(('.+foo', RuntimeWarning)):
check_absolute()
self.assertRaises(SystemError, check_relative)
# Check both fail with package set to a non-string
ns = dict(__package__=object())
self.assertRaises(ValueError, check_absolute)
self.assertRaises(ValueError, check_relative)
def test_absolute_import_without_future(self):
# If explicit relative import syntax is used, then do not try
# to perform an absolute import in the face of failure.
# Issue #7902.
with self.assertRaises(ImportError):
from .os import sep
self.fail("explicit relative import triggered an "
"implicit absolute import")
class TestSymbolicallyLinkedPackage(unittest.TestCase):
package_name = 'sample'
def setUp(self):
if os.path.exists(self.tagged):
shutil.rmtree(self.tagged)
if os.path.exists(self.package_name):
symlink_support.remove_symlink(self.package_name)
self.orig_sys_path = sys.path[:]
# create a sample package; imagine you have a package with a tag and
# you want to symbolically link it from its untagged name.
os.mkdir(self.tagged)
init_file = os.path.join(self.tagged, '__init__.py')
open(init_file, 'w').close()
assert os.path.exists(init_file)
# now create a symlink to the tagged package
# sample -> sample-tagged
symlink_support.symlink(self.tagged, self.package_name)
assert os.path.isdir(self.package_name)
assert os.path.isfile(os.path.join(self.package_name, '__init__.py'))
@property
def tagged(self):
return self.package_name + '-tagged'
# regression test for issue6727
@unittest.skipUnless(
not hasattr(sys, 'getwindowsversion')
or sys.getwindowsversion() >= (6, 0),
"Windows Vista or later required")
@symlink_support.skip_unless_symlink
def test_symlinked_dir_importable(self):
# make sure sample can only be imported from the current directory.
sys.path[:] = ['.']
# and try to import the package
__import__(self.package_name)
def tearDown(self):
# now cleanup
if os.path.exists(self.package_name):
symlink_support.remove_symlink(self.package_name)
if os.path.exists(self.tagged):
shutil.rmtree(self.tagged)
sys.path[:] = self.orig_sys_path
def test_main(verbose=None):
run_unittest(ImportTests, PycRewritingTests, PathsTests,
RelativeImportTests, TestSymbolicallyLinkedPackage)
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
from test.test_import import test_main
test_main()
|
{
"content_hash": "42178d39df1cf1faa239c9a55602ae03",
"timestamp": "",
"source": "github",
"line_count": 657,
"max_line_length": 93,
"avg_line_length": 36.81430745814308,
"alnum_prop": 0.5629056931409435,
"repo_name": "jedie/pypyjs-standalone",
"id": "4da6a3121726703f21facf89df94317d1359d677",
"size": "24187",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "website/js/pypy.js-0.3.0/lib/modules/test/test_import.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4486"
},
{
"name": "HTML",
"bytes": "5517"
},
{
"name": "JavaScript",
"bytes": "34356"
},
{
"name": "Python",
"bytes": "14806609"
},
{
"name": "Shell",
"bytes": "50"
}
],
"symlink_target": ""
}
|
class Printer(object):
def __init__(self, owning_particle):
super(Printer, self).__init__()
self.owning_particle = owning_particle
def collide(self, another_particle):
print str(another_particle.contents)
def __str__(self):
return 'Printer'
class Reader(object):
"""docstring for Reader"""
def __init__(self, arg):
super(Reader, self).__init__()
self.arg = arg
class FileReader(object):
"""docstring for FileReader"""
def __init__(self, arg):
super(FileReader, self).__init__()
self.arg = arg
class FileWriter(object):
"""docstring for FileWritter"""
def __init__(self, arg):
super(FileWritter, self).__init__()
self.arg = arg
class SocketReader(object):
"""docstring for SocketReader"""
def __init__(self, arg):
super(SocketReader, self).__init__()
self.arg = arg
class SocketWriter(object):
"""docstring for SocketWriter"""
def __init__(self, arg):
super(SocketWriter, self).__init__()
self.arg = arg
|
{
"content_hash": "d7d4a6ee284d7976ebfacea18ce39f1f",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 46,
"avg_line_length": 25.61904761904762,
"alnum_prop": 0.5855018587360595,
"repo_name": "autowitch/moop",
"id": "0de3b480b9cdaa044cf8e761d47529e97a4e5fda",
"size": "1077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/behaviours/io.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33273"
}
],
"symlink_target": ""
}
|
import spirit.spiritlib as spiritlib
import ctypes
### Load Library
_spirit = spiritlib.LoadSpiritLibrary()
# The Bohr Magneton [meV / T]
_mu_B = _spirit.Constants_mu_B
_mu_B.argtypes = None
_mu_B.restype = scalar
def mu_B():
return _mu_B()
# The Boltzmann constant [meV / K]
_k_B = _spirit.Constants_k_B
_k_B.argtypes = None
_k_B.restype = scalar
def k_B():
return _k_B()
|
{
"content_hash": "a74cd8d70e2e853da560411a7c5a92d9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 42,
"avg_line_length": 21.1,
"alnum_prop": 0.6113744075829384,
"repo_name": "Disselkamp/spirit",
"id": "2234bc04e8bf3d51009c2e1c53f86236718965d3",
"size": "422",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/python/spirit/constants.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "16865"
},
{
"name": "C++",
"bytes": "1263385"
},
{
"name": "CMake",
"bytes": "83656"
},
{
"name": "CSS",
"bytes": "2638"
},
{
"name": "Cuda",
"bytes": "54310"
},
{
"name": "HTML",
"bytes": "37357"
},
{
"name": "JavaScript",
"bytes": "97435"
},
{
"name": "Julia",
"bytes": "990"
},
{
"name": "Python",
"bytes": "51142"
},
{
"name": "Shell",
"bytes": "846"
}
],
"symlink_target": ""
}
|
import six
from rally.common.i18n import _
class RallyException(Exception):
"""Base Rally Exception
To correctly use this class, inherit from it and define
a "msg_fmt" property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("%(message)s")
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if "%(message)s" in self.msg_fmt:
kwargs.update({"message": message})
super(RallyException, self).__init__(self.msg_fmt % kwargs)
def format_message(self):
return six.text_type(self)
class ImmutableException(RallyException):
msg_fmt = _("This object is immutable.")
class InvalidArgumentsException(RallyException):
msg_fmt = _("Invalid arguments: '%(message)s'")
class InvalidConfigException(RallyException):
msg_fmt = _("This config has invalid schema: `%(message)s`")
class InvalidRunnerResult(RallyException):
msg_fmt = _("Type of result of `%(name)s` runner should be"
" `base.ScenarioRunnerResult`. Got: `%(results_type)s`")
class InvalidTaskException(InvalidConfigException):
msg_fmt = _("Task config is invalid: `%(message)s`")
class NotFoundScenarios(InvalidTaskException):
msg_fmt = _("There are no benchmark scenarios with names: `%(names)s`.")
class InvalidTaskConfig(InvalidTaskException):
msg_fmt = _("Input task is invalid!\n\n"
"Subtask %(name)s[%(pos)s] has wrong configuration"
"\Subtask configuration:\n%(config)s\n"
"\nReason:\n %(reason)s")
class NotFoundException(RallyException):
msg_fmt = _("The resource can not be found: %(message)s")
class ThreadTimeoutException(RallyException):
msg_fmt = _("Iteration interrupted due to timeout.")
class PluginNotFound(NotFoundException):
msg_fmt = _("There is no plugin with name: `%(name)s` in "
"%(namespace)s namespace.")
class PluginWithSuchNameExists(RallyException):
msg_fmt = _("Plugin with such name: %(name)s already exists in "
"%(namespace)s namespace. It's module allocates at "
"%(existing_path)s. You are trying to add plugin whose module "
"allocates at %(new_path)s.")
class NoSuchConfigField(NotFoundException):
msg_fmt = _("There is no field in the task config with name `%(name)s`.")
class NoSuchRole(NotFoundException):
msg_fmt = _("There is no role with name `%(role)s`.")
class TaskNotFound(NotFoundException):
msg_fmt = _("Task with uuid=%(uuid)s not found.")
class DeploymentNotFound(NotFoundException):
msg_fmt = _("Deployment %(deployment)s not found.")
class DeploymentNameExists(RallyException):
msg_fmt = _("Deployment name '%(deployment)s' already registered.")
class DeploymentIsBusy(RallyException):
msg_fmt = _("There are allocated resources for the deployment with "
"uuid=%(uuid)s.")
class RallyAssertionError(RallyException):
msg_fmt = _("Assertion error: %(message)s")
class ResourceNotFound(NotFoundException):
msg_fmt = _("Resource with id=%(id)s not found.")
class TimeoutException(RallyException):
msg_fmt = _("Rally tired waiting for %(resource_type)s %(resource_name)s:"
"%(resource_id)s to become %(desired_status)s current "
"status %(resource_status)s")
class GetResourceFailure(RallyException):
msg_fmt = _("Failed to get the resource %(resource)s: %(err)s")
class GetResourceNotFound(GetResourceFailure):
msg_fmt = _("Resource %(resource)s is not found.")
class GetResourceErrorStatus(GetResourceFailure):
msg_fmt = _("Resource %(resource)s has %(status)s status.\n"
"Fault: %(fault)s")
class ScriptError(RallyException):
msg_fmt = _("Script execution failed: %(message)s")
class TaskInvalidStatus(RallyException):
msg_fmt = _("Task `%(uuid)s` in `%(actual)s` status but `%(require)s` is "
"required.")
class ChecksumMismatch(RallyException):
msg_fmt = _("Checksum mismatch for image: %(url)s")
class InvalidAdminException(InvalidArgumentsException):
msg_fmt = _("user '%(username)s' doesn't have 'admin' role")
class InvalidEndpointsException(InvalidArgumentsException):
msg_fmt = _("wrong keystone credentials specified in your endpoint"
" properties. (HTTP 401)")
class HostUnreachableException(InvalidArgumentsException):
msg_fmt = _("unable to establish connection to the remote host: %(url)s")
class InvalidScenarioArgument(RallyException):
msg_fmt = _("Invalid scenario argument: '%(message)s'")
class BenchmarkSetupFailure(RallyException):
msg_fmt = _("Unable to setup benchmark: '%(message)s'")
class ContextSetupFailure(RallyException):
msg_fmt = _("Unable to setup context '%(ctx_name)s': '%(msg)s'")
class ValidationError(RallyException):
msg_fmt = _("Validation error: %(message)s")
class NoNodesFound(RallyException):
msg_fmt = _("There is no nodes matching filters: %(filters)r")
class UnknownRelease(RallyException):
msg_fmt = _("Unknown release '%(release)s'")
class CleanUpException(RallyException):
msg_fmt = _("Cleanup failed.")
class ImageCleanUpException(CleanUpException):
msg_fmt = _("Image Deletion Failed")
class IncompatiblePythonVersion(RallyException):
msg_fmt = _("Incompatible python version found '%(version)s', "
"required '%(required_version)s'")
class WorkerNotFound(NotFoundException):
msg_fmt = _("Worker %(worker)s could not be found")
class WorkerAlreadyRegistered(RallyException):
msg_fmt = _("Worker %(worker)s already registered")
class SaharaClusterFailure(RallyException):
msg_fmt = _("Sahara cluster %(name)s has failed to %(action)s. "
"Reason: '%(reason)s'")
class LiveMigrateException(RallyException):
msg_fmt = _("Live Migration failed: %(message)s")
class MigrateException(RallyException):
msg_fmt = _("Migration failed: %(message)s")
class InvalidHostException(RallyException):
msg_fmt = _("Live Migration failed: %(message)s")
class MultipleMatchesFound(RallyException):
msg_fmt = _("Found multiple %(needle)s: %(haystack)s")
def __init__(self, **kwargs):
if "hint" in kwargs:
self.msg_fmt += ". Hint: %(hint)s"
super(MultipleMatchesFound, self).__init__(**kwargs)
class TempestConfigCreationFailure(RallyException):
msg_fmt = _("Unable to create Tempest config file: %(message)s")
class SSHTimeout(RallyException):
pass
class SSHError(RallyException):
pass
class InvalidConnectionString(RallyException):
msg_fmt = _("The connection string is not valid: %(message)s. Please "
"check your connection string.")
class DowngradeNotSupported(RallyException):
msg_fmt = _("Database schema downgrade is not supported.")
|
{
"content_hash": "a1396db25003da65e9d049c005346d3d",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 79,
"avg_line_length": 28.040485829959515,
"alnum_prop": 0.674126479930696,
"repo_name": "varuntiwari27/rally",
"id": "df739466ed2fcc8ef1ced3353d3d73dcc6818d31",
"size": "7556",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rally/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "452687"
},
{
"name": "C++",
"bytes": "26115"
},
{
"name": "CSS",
"bytes": "6231"
},
{
"name": "HTML",
"bytes": "51546"
},
{
"name": "JavaScript",
"bytes": "14187"
},
{
"name": "Makefile",
"bytes": "68380"
},
{
"name": "Mako",
"bytes": "17949"
},
{
"name": "Python",
"bytes": "8478187"
},
{
"name": "Shell",
"bytes": "61579"
}
],
"symlink_target": ""
}
|
from pkg_resources import resource_filename
import pytest
import watershed
def test(*args):
options = [resource_filename('watershed', '')]
options.extend(list(args))
return pytest.main(options)
|
{
"content_hash": "1ecea82141351069f4a13dfc0c645fa8",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 50,
"avg_line_length": 19.09090909090909,
"alnum_prop": 0.7238095238095238,
"repo_name": "phobson/watershed",
"id": "ecfb8ee20c2df31e5244ed457a0c64c8f39f3e06",
"size": "210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watershed/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "28898"
}
],
"symlink_target": ""
}
|
"""Tests for losses.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from language.labs.consistent_zero_shot_nmt.models import losses
import tensorflow.compat.v1 as tf
class LossesTest(tf.test.TestCase):
"""Tests for losses."""
def test_l2_distance(self):
"""Tests l2 distance."""
with tf.Graph().as_default():
x = [1.0, 2.0]
y = [3.0, 4.0]
dist = losses.l2_distance(x=x, y=y)
normalize_dist = losses.l2_distance(x=x, y=y, normalize=True)
with tf.Session("") as sess:
tf_dist, tf_normalize_dist = sess.run([dist, normalize_dist])
self.assertAllClose([tf_dist, tf_normalize_dist], [8.0, 0.0322602])
if __name__ == "__main__":
tf.test.main()
|
{
"content_hash": "e923f4ac7ebde585a382176f780e9ae1",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 75,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.6382428940568475,
"repo_name": "google-research/language",
"id": "9dd280f4439353994ab0a216758663066e1fc49f",
"size": "1389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "language/labs/consistent_zero_shot_nmt/models/losses_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "9834"
},
{
"name": "CSS",
"bytes": "602"
},
{
"name": "HTML",
"bytes": "25162"
},
{
"name": "JavaScript",
"bytes": "8857"
},
{
"name": "Jupyter Notebook",
"bytes": "1505066"
},
{
"name": "Python",
"bytes": "7139472"
},
{
"name": "Shell",
"bytes": "183709"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/pet_deed/shared_dewback_deed.iff"
result.attribute_template_id = 2
result.stfName("pet_deed","dewback")
#### BEGIN MODIFICATIONS ####
result.setStringAttribute("radial_filename", "radials/deed_datapad.py")
result.setStringAttribute("deed_pcd", "object/intangible/pet/shared_dewback_hue.iff")
result.setStringAttribute("deed_mobile", "object/mobile/shared_dewback_hue.iff")
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "707eed8a4d98436a26bb4a6c62175c94",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 86,
"avg_line_length": 33.875,
"alnum_prop": 0.7380073800738007,
"repo_name": "anhstudios/swganh",
"id": "6b35b52c0a6aefb560b63bf04bd0178f0d3a618f",
"size": "687",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/deed/pet_deed/shared_dewback_deed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
from .resource import Resource
class VirtualMachineExtensionImage(Resource):
"""Describes a Virtual Machine Extension Image.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Required. Resource location
:type location: str
:param tags: Resource tags
:type tags: dict[str, str]
:param operating_system: Required. The operating system this extension
supports.
:type operating_system: str
:param compute_role: Required. The type of role (IaaS or PaaS) this
extension supports.
:type compute_role: str
:param handler_schema: Required. The schema defined by publisher, where
extension consumers should provide settings in a matching schema.
:type handler_schema: str
:param vm_scale_set_enabled: Whether the extension can be used on xRP
VMScaleSets. By default existing extensions are usable on scalesets, but
there might be cases where a publisher wants to explicitly indicate the
extension is only enabled for CRP VMs but not VMSS.
:type vm_scale_set_enabled: bool
:param supports_multiple_extensions: Whether the handler can support
multiple extensions.
:type supports_multiple_extensions: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'operating_system': {'required': True},
'compute_role': {'required': True},
'handler_schema': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'operating_system': {'key': 'properties.operatingSystem', 'type': 'str'},
'compute_role': {'key': 'properties.computeRole', 'type': 'str'},
'handler_schema': {'key': 'properties.handlerSchema', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'properties.vmScaleSetEnabled', 'type': 'bool'},
'supports_multiple_extensions': {'key': 'properties.supportsMultipleExtensions', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(VirtualMachineExtensionImage, self).__init__(**kwargs)
self.operating_system = kwargs.get('operating_system', None)
self.compute_role = kwargs.get('compute_role', None)
self.handler_schema = kwargs.get('handler_schema', None)
self.vm_scale_set_enabled = kwargs.get('vm_scale_set_enabled', None)
self.supports_multiple_extensions = kwargs.get('supports_multiple_extensions', None)
|
{
"content_hash": "25ea8c32b041ea7e2ad7af7c99a4a49e",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 105,
"avg_line_length": 42.5,
"alnum_prop": 0.6440336134453781,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "7273e2b847542395497089873cf0fd2c5e3bd757",
"size": "3449",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/virtual_machine_extension_image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
}
|
"""
Ax_Metrics - Query component for ghost comparisons specification
------------------------------------------------------------------------------
Author: Dan Kamins <dos at axonchisel dot net>
Copyright (c) 2014 Dan Kamins, AxonChisel.net
"""
# ----------------------------------------------------------------------------
from axonchisel.metrics.foundation.ax.obj import AxObj
from axonchisel.metrics.foundation.chrono.ghost import Ghost
# ----------------------------------------------------------------------------
class QGhosts(AxObj):
"""
Query component for ghosts comparison specification.
Contains list of Ghost references (relative time specs).
"""
def __init__(self):
self._ghosts = list()
#
# Public Methods
#
def add_ghost(self, ghost):
"""Add a Ghost to the list."""
if not isinstance(ghost, Ghost):
raise TypeError("QData expected Ghost, got: {t}".
format(t=type(ghost)))
self._ghosts.append(ghost)
def count_ghosts(self):
"""Return number of Ghosts included."""
return len(self._ghosts)
def get_ghosts(self):
"""Get (shallow copy of) list of Ghosts."""
return list(self._ghosts)
#
# Internal Methods
#
def __getitem__(self, key):
"""Allow indexing like a list itself"""
return self._ghosts[key]
def __unicode__(self):
return (u"QGhosts({ghosts})"
).format(self=self,
ghosts=u", ".join(map(unicode, self._ghosts))
)
|
{
"content_hash": "2f2c0e8c7f3ad573e8e698c66e8ab6d4",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 78,
"avg_line_length": 23.772727272727273,
"alnum_prop": 0.5079668578712556,
"repo_name": "axonchisel/ax_metrics",
"id": "18d09a52770b9e5fac4fb87680c609ffe53be405",
"size": "1569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py/axonchisel/metrics/foundation/query/qghosts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "341761"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
}
|
"""
Base functionality useful to various parts of Twisted Names.
"""
import socket
from twisted.names import dns
from twisted.names.error import DNSFormatError, DNSServerError, DNSNameError
from twisted.names.error import DNSNotImplementedError, DNSQueryRefusedError
from twisted.names.error import DNSUnknownError
from twisted.internet import defer, error
from twisted.python import failure
EMPTY_RESULT = (), (), ()
class ResolverBase:
"""
L{ResolverBase} is a base class for L{IResolver} implementations which
deals with a lot of the boilerplate of implementing all of the lookup
methods.
@cvar _errormap: A C{dict} mapping DNS protocol failure response codes
to exception classes which will be used to represent those failures.
"""
_errormap = {
dns.EFORMAT: DNSFormatError,
dns.ESERVER: DNSServerError,
dns.ENAME: DNSNameError,
dns.ENOTIMP: DNSNotImplementedError,
dns.EREFUSED: DNSQueryRefusedError}
typeToMethod = None
def __init__(self):
self.typeToMethod = {}
for (k, v) in typeToMethod.items():
self.typeToMethod[k] = getattr(self, v)
def exceptionForCode(self, responseCode):
"""
Convert a response code (one of the possible values of
L{dns.Message.rCode} to an exception instance representing it.
@since: 10.0
"""
return self._errormap.get(responseCode, DNSUnknownError)
def query(self, query, timeout = None):
try:
return self.typeToMethod[query.type](str(query.name), timeout)
except KeyError, e:
return defer.fail(failure.Failure(NotImplementedError(str(self.__class__) + " " + str(query.type))))
def _lookup(self, name, cls, type, timeout):
return defer.fail(NotImplementedError("ResolverBase._lookup"))
def lookupAddress(self, name, timeout = None):
"""
@see: twisted.names.client.lookupAddress
"""
return self._lookup(name, dns.IN, dns.A, timeout)
def lookupIPV6Address(self, name, timeout = None):
"""
@see: twisted.names.client.lookupIPV6Address
"""
return self._lookup(name, dns.IN, dns.AAAA, timeout)
def lookupAddress6(self, name, timeout = None):
"""
@see: twisted.names.client.lookupAddress6
"""
return self._lookup(name, dns.IN, dns.A6, timeout)
def lookupMailExchange(self, name, timeout = None):
"""
@see: twisted.names.client.lookupMailExchange
"""
return self._lookup(name, dns.IN, dns.MX, timeout)
def lookupNameservers(self, name, timeout = None):
"""
@see: twisted.names.client.lookupNameservers
"""
return self._lookup(name, dns.IN, dns.NS, timeout)
def lookupCanonicalName(self, name, timeout = None):
"""
@see: twisted.names.client.lookupCanonicalName
"""
return self._lookup(name, dns.IN, dns.CNAME, timeout)
def lookupMailBox(self, name, timeout = None):
"""
@see: twisted.names.client.lookupMailBox
"""
return self._lookup(name, dns.IN, dns.MB, timeout)
def lookupMailGroup(self, name, timeout = None):
"""
@see: twisted.names.client.lookupMailGroup
"""
return self._lookup(name, dns.IN, dns.MG, timeout)
def lookupMailRename(self, name, timeout = None):
"""
@see: twisted.names.client.lookupMailRename
"""
return self._lookup(name, dns.IN, dns.MR, timeout)
def lookupPointer(self, name, timeout = None):
"""
@see: twisted.names.client.lookupPointer
"""
return self._lookup(name, dns.IN, dns.PTR, timeout)
def lookupAuthority(self, name, timeout = None):
"""
@see: twisted.names.client.lookupAuthority
"""
return self._lookup(name, dns.IN, dns.SOA, timeout)
def lookupNull(self, name, timeout = None):
"""
@see: twisted.names.client.lookupNull
"""
return self._lookup(name, dns.IN, dns.NULL, timeout)
def lookupWellKnownServices(self, name, timeout = None):
"""
@see: twisted.names.client.lookupWellKnownServices
"""
return self._lookup(name, dns.IN, dns.WKS, timeout)
def lookupService(self, name, timeout = None):
"""
@see: twisted.names.client.lookupService
"""
return self._lookup(name, dns.IN, dns.SRV, timeout)
def lookupHostInfo(self, name, timeout = None):
"""
@see: twisted.names.client.lookupHostInfo
"""
return self._lookup(name, dns.IN, dns.HINFO, timeout)
def lookupMailboxInfo(self, name, timeout = None):
"""
@see: twisted.names.client.lookupMailboxInfo
"""
return self._lookup(name, dns.IN, dns.MINFO, timeout)
def lookupText(self, name, timeout = None):
"""
@see: twisted.names.client.lookupText
"""
return self._lookup(name, dns.IN, dns.TXT, timeout)
def lookupResponsibility(self, name, timeout = None):
"""
@see: twisted.names.client.lookupResponsibility
"""
return self._lookup(name, dns.IN, dns.RP, timeout)
def lookupAFSDatabase(self, name, timeout = None):
"""
@see: twisted.names.client.lookupAFSDatabase
"""
return self._lookup(name, dns.IN, dns.AFSDB, timeout)
def lookupZone(self, name, timeout = None):
"""
@see: twisted.names.client.lookupZone
"""
return self._lookup(name, dns.IN, dns.AXFR, timeout)
def lookupNamingAuthorityPointer(self, name, timeout=None):
"""
@see: twisted.names.client.lookupNamingAuthorityPointer
"""
return self._lookup(name, dns.IN, dns.NAPTR, timeout)
def lookupAllRecords(self, name, timeout = None):
"""
@see: twisted.names.client.lookupAllRecords
"""
return self._lookup(name, dns.IN, dns.ALL_RECORDS, timeout)
def getHostByName(self, name, timeout = None, effort = 10):
"""
@see: twisted.names.client.getHostByName
"""
# XXX - respect timeout
return self.lookupAllRecords(name, timeout
).addCallback(self._cbRecords, name, effort
)
def _cbRecords(self, (ans, auth, add), name, effort):
result = extractRecord(self, dns.Name(name), ans + auth + add, effort)
if not result:
raise error.DNSLookupError(name)
return result
def extractRecord(resolver, name, answers, level = 10):
if not level:
return None
if hasattr(socket, 'inet_ntop'):
for r in answers:
if r.name == name and r.type == dns.A6:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.AAAA:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.A:
return socket.inet_ntop(socket.AF_INET, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.CNAME:
result = extractRecord(resolver, r.payload.name, answers, level - 1)
if not result:
return resolver.getHostByName(str(r.payload.name), effort=level-1)
return result
# No answers, but maybe there's a hint at who we should be asking about this
for r in answers:
if r.type == dns.NS:
from twisted.names import client
r = client.Resolver(servers=[(str(r.payload.name), dns.PORT)])
return r.lookupAddress(str(name)
).addCallback(lambda (ans, auth, add): extractRecord(r, name, ans + auth + add, level - 1)
).addBoth(lambda passthrough: (r.protocol.transport.stopListening(), passthrough)[1])
typeToMethod = {
dns.A: 'lookupAddress',
dns.AAAA: 'lookupIPV6Address',
dns.A6: 'lookupAddress6',
dns.NS: 'lookupNameservers',
dns.CNAME: 'lookupCanonicalName',
dns.SOA: 'lookupAuthority',
dns.MB: 'lookupMailBox',
dns.MG: 'lookupMailGroup',
dns.MR: 'lookupMailRename',
dns.NULL: 'lookupNull',
dns.WKS: 'lookupWellKnownServices',
dns.PTR: 'lookupPointer',
dns.HINFO: 'lookupHostInfo',
dns.MINFO: 'lookupMailboxInfo',
dns.MX: 'lookupMailExchange',
dns.TXT: 'lookupText',
dns.RP: 'lookupResponsibility',
dns.AFSDB: 'lookupAFSDatabase',
dns.SRV: 'lookupService',
dns.NAPTR: 'lookupNamingAuthorityPointer',
dns.AXFR: 'lookupZone',
dns.ALL_RECORDS: 'lookupAllRecords',
}
|
{
"content_hash": "38a3bbf7239ad17e75f01159d8f6d7a8",
"timestamp": "",
"source": "github",
"line_count": 261,
"max_line_length": 112,
"avg_line_length": 33.55555555555556,
"alnum_prop": 0.6141813199360585,
"repo_name": "sorenh/cc",
"id": "c76b2adf7607a98301259b62937821528aa62f7b",
"size": "8886",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vendor/Twisted-10.0.0/twisted/names/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "707"
},
{
"name": "Python",
"bytes": "398663"
},
{
"name": "Shell",
"bytes": "12374"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class TickvalssrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name="tickvalssrc",
parent_name="scattergeo.marker.colorbar",
**kwargs
):
super(TickvalssrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
{
"content_hash": "6687cac706e57278105154d836a3437b",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 70,
"avg_line_length": 29.823529411764707,
"alnum_prop": 0.5798816568047337,
"repo_name": "plotly/python-api",
"id": "4d4469e6521d096386b643c0d60e0bdfadcf7c62",
"size": "507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scattergeo/marker/colorbar/_tickvalssrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
import string
import copy
import sys
import re
import pyDDDParameters
import pprint
# ===============================================================
# ==========================================
# Find the type of a parametere, that we understand.
# Inputs:
# name - the type name
# tag - the pass mechanism
# resval - 1 for function return value, 0 for parameters
# The return value is a dict with the following entries:
# tag - the input parameter type. the following are supported
# simple - simple pass by value: (TYPE x)
# const - const pass by value: (const TYPE x)
# ptr - pass by pointer: (TYPE* x)
# cptr - pass by const pointer: (const TYPE* x)
# pptr - pass by pointer2: (TYPE** x)
# cpptr - pass by const pointer2: (const TYPE** x)
# ref - pass by reference: (TYPE& x)
# cref - const pass by reference (const TYPE& x)
# ptrref - ponter reference (TYPE*& x)
# cptrref - const ponter reference (const TYPE*& x)
# sig - the signature of the parameter= tag-name
# iscls - 1 for a class type, 0 otherwise
# isdb - 1 for a db class, 0 otherwise
def FindTypeName(self,name,tag,resval):
dct={
'tag':tag,
'utag':tag,
'cls':None,
'iscls':0,
'isdb':0,
}
dct['sig']='%s-%s'%(tag,name)
if (self.mClasses.has_key(name)):
db=self.mClasses[name]['db']
dct['isdb']=db
#print "=== ",name,br
if (db):
if (tag=='ptr'):
dct['utag']='simple'
elif (tag=='cptr'):
if (resval):
dct['utag']='simpleconst'
else:
dct['utag']='simple'
elif (tag=='ptrref'):
dct['utag']='ref'
elif (tag=='pptr'):
dct['utag']='ptr'
elif (tag=='cpptr'):
if (resval): return dct # NA
dct['utag']='cptr'
elif (tag=='cptrref'):
if (resval): return dct # NA
dct['utag']='cref'
else:
return dct
else:
#if (tag=='ptrref' and not resval): return dct
if (tag=='ptrref'): return dct
if (tag=='cptrref'): return dct
#if (tag=='cptr' and resval and name!='char'):
# return dct
dct['cls']=self.mClasses[name]
dct['iscls']=1
return dct
if (self.mEnums.has_key(name)):
if (tag!='simple' and tag!='const'): return dct
dct['cls']=self.mEnums[name]
return dct
return dct
# ==========================================
# Find the a type reference
# decode the parameter description into its tag, type, and name
def FindTypeRef(self,name,resval=0):
name=string.replace(name,'*',' * ')
name=string.replace(name,'&',' & ')
parts=string.split(name)
dct={
'tag':'simple',
'utag':'simple',
'sig':'simple-'+name,
'cls':None,
'iscls':0,
'isdb':0,
}
if (len(parts)==0):
print "** Bad Name",name
return dct
if (parts[0]=='virtual'):
parts=parts[1:]
name=name[8:]
# This allows 'char *' as builtin
#result=FindTypeName(self,name,'simple',resval)
#if (result['cls']!=None): return result
# Check by parts
if (len(parts)==1):
if (parts[0]=='void' and resval):
dct={
'tag':'void',
'utag':'void',
'sig':'void-void',
'cls':None,
'iscls':0,
'isdb':0,
}
return dct
return FindTypeName(self,parts[0],'simple',resval)
if (len(parts)==2):
if (parts[1]=='*'):
return FindTypeName(self,parts[0],'ptr',resval)
if (parts[1]=='&'):
return FindTypeName(self,parts[0],'ref',resval)
if (parts[0]=='const'):
return FindTypeName(self,parts[1],'simple',resval)
return dct
if (len(parts)==3):
if (parts[0]=='const' and parts[2]=='&'):
return FindTypeName(self,parts[1],'cref',resval)
if (parts[0]=='const' and parts[2]=='*'):
return FindTypeName(self,parts[1],'cptr',resval)
if (parts[1]=='*' and parts[2]=='*'):
return FindTypeName(self,parts[0],'pptr',resval)
if (parts[1]=='*' and parts[2]=='&'):
return FindTypeName(self,parts[0],'ptrref',resval)
return dct
if (len(parts)==4):
if (parts[0]=='const' and parts[2]=='*' and parts[3]=='&'):
return FindTypeName(self,parts[1],'cptrref',resval)
if (parts[0]=='const' and parts[2]=='*' and parts[3]=='*'):
return FindTypeName(self,parts[1],'cpptr',resval)
return dct
# ===============================================================
# ==========================================
# Find the return type and parameter type of a global function
def LinkGlobalFunction(data,fun):
linked=1
linkerror=[]
funname=fun['name']
if (fun.has_key('template')):
linked=0
linkerror.append("Templated functions not supported")
#-------------------------------------------
# Find result type
rettype=fun['type']
rvalue=FindTypeRef(data,rettype,1)
sig=funname+'|'
sig+=rvalue['sig']+'|'
fun['type_link']=rvalue
if (rvalue['tag']!='void' and rvalue['cls']==None):
linked=0
linkerror.append("Return value: %s"%rvalue['tag'])
#-------------------------------------------
# Find parameter type
index=1
for param in fun['param']:
paramtype=param['type']
value=FindTypeRef(data,paramtype)
sig+=(value['sig']+',')
param['type_link']=value
#if (value['cls']==None and not param.has_key('defval')):
if (value['cls']==None):
linked=0
linkerror.append("Parameter "+str(index))
index+=1
#-------------------------------------------
# Set Manual
if (linked and data.vIsManualGlobalFunction(sig)):
fun['manual']=1
linked=1
skip=data.vIsSkipGlobalFunction(sig)
if (skip): linked=0
if (not linked and not skip):
print "-- Function Link Error: %s, sig: %s %s"%(funname,sig,linkerror)
fun['linked']=linked
fun['sig']=sig
return linked
# ==========================================
# Find the return type and parameter type of a class function
def LinkClassFunction(data,fun,cls):
linked=1
linkerror=[]
clsname=cls['name']
desname='~'+clsname
funname=fun['name']
funnames=string.split(funname)
rettype=fun['type']
sig=funname+'|'
if (fun.has_key('template')):
linked=0
linkerror.append("Templated functions not supported")
#-------------------------------------------
# Find result type
if (funname==clsname or funname==desname): # Const/Dest
sig+='|'
else:
if (len(funnames)>=2 and funnames[0]=='operator' and rettype==''): # TypeCast
rettype=string.join(funnames[1:])
rvalue=FindTypeRef(data,rettype,1)
if (rvalue==None):
print "** No return type for function:",funname
sig+=rvalue['sig']+'|'
fun['type_link']=rvalue
if (rvalue['tag']!='void' and rvalue['cls']==None):
linked=0
linkerror.append("Return value: %s"%rettype)
#-------------------------------------------
# Find parameter type
index=1
for param in fun['param']:
paramtype=param['type']
value=FindTypeRef(data,paramtype,0)
sig+=(value['sig']+',')
param['type_link']=value
#if (value['cls']==None and not param.has_key('defval')):
if (value['cls']==None):
linked=0
linkerror.append("Parameter "+str(index))
index+=1
#-------------------------------------------
# Set Manual
if (linked and data.vIsManualClassFunction(clsname,sig)):
fun['manual']=1
linked=1
#-------------------------------------------
# Record data
skip=data.vIsSkipClassFunction(clsname,sig)
if (skip): linked=0
if (not linked and not skip):
print "-- Function Link Error: %s::%s, sig: %s %s"%(clsname,funname,sig,linkerror)
if (data.vAllowVirtualClass(clsname)):
clsvirt=fun.get('virt','')
if (clsvirt=='virtual' or clsvirt=='pure-virtual'):
cls['virt']=1
if (clsvirt=='pure-virtual'):
cls['pure-virt']=1
if (fun['virt']=='virtual' and not data.vAllowVirtualFunction(clsname,sig)):
fun['virt']='non-virtual'
fun['linked']=linked
fun['sig']=sig
return linked
# ==========================================
# Find the return type and parameter type of all functions
def LinkAllFunctions(data):
print "Linking Global Functions"
sys.stderr.write("Linking Global Functions:\n")
newfuns=[]
for fun in data.mFunctions:
if (LinkGlobalFunction(data,fun)>0):
newfuns.append(fun)
data.mFunctions=newfuns
sys.stderr.write("Linking Class Functions:\n")
clsnames=data.mClasses.keys()
clsnames.sort()
for clsname in clsnames:
cls=data.mClasses[clsname]
clsname=cls['name']
if (cls['category']=='manual'): continue
print "Linking Class Functions:",clsname
#---------------------------------
# Remove hidden functions
for md in ['functions','sfunctions']:
newlst=[]
for fun in cls[md]:
#print " Check Protect Class Function:",fun['name']
if (fun['protect']=='public'):
newlst.append(fun)
cls[md]=newlst
#---------------------------------
# Link the functions
for md in ['functions','sfunctions']:
newfuns=[]
for fun in cls[md]:
#print " Link Class Function:",fun['name']
if (LinkClassFunction(data,fun,cls)>0):
newfuns.append(fun)
cls[md]=newfuns
#---------------------------------
# Remove Duplicate operator[]
rop=None
cop=None
for fun in cls['functions']:
if (fun['name']=='operator[]'):
rtag=fun['type_link']['utag']
if (rtag=='ref'): rop=fun
elif (rtag=='cref' or rtag=='simple'): cop=fun
if (rop!=None and cop!=None):
newfuns=[]
for fun in cls['functions']:
if (fun['name']=='operator[]'):
rtag=fun['type_link']['utag']
if (rtag=='cref' or rtag=='simple'): continue
newfuns.append(fun)
cls['functions']=newfuns
#-------------------------------------------
# Find missing copy constructors
if (not cls.get('db',0)):
needed=0
# need a constructor if the class is not db and
# it contains pointer member variables.
for var in cls['vars'].values():
rvalue=FindTypeRef(data,var['type'],0)
if (rvalue['tag']!='void' and rvalue['cls']==None):
needed=1
break
if (rvalue['cls']['name']==clsname): continue # skips ptrs to self in callbacks
if (rvalue['cls'].get('db',0)): continue # ptrs to db classes are ok
if (rvalue['tag']=='ptr' or rvalue['tag']=='cptr'):
needed=1
break
# Look for constructors
has_copy=0
has_eq=0
has_constr=0
for fun in cls['functions']:
#if (clsname=='oaComplexArray_oaDouble'):
# print "FUN:",
# pprint.pprint(fun)
if (fun['name']=='operator='): has_eq=1
if (fun['name']!=clsname): continue
has_constr=1
params=fun['param']
if (len(params)!=1): continue
type=params[0]['type_link']
if (type['cls']!=None and
type['cls']['name']==clsname and
(type['tag']=='ref' or type['tag']=='cref')):
has_copy=1
if (has_copy):
cls['has_copy']=1
skip=data.vIsIgnoreClassConstructor(clsname)
if (needed):
if (not has_copy and has_constr):
if (not skip):
print "-- Missing CopyConstructor: %s"%(clsname)
cls['no_copy']=1
# The missing_eq check is not working
#if (missing_eq):
# print "** Missing operator=: %s"%(clsname)
# cls['no_copy']=1
if (not has_constr):
if (not skip):
print "-- Missing any constructor: %s"%(clsname)
cls['no_constr']=1
cls['no_copy']=1
no_assign=data.vIsNoAssignClass(clsname)
if (no_assign):
cls['no_assign']=1
print "-- Skipping assign: %s"%(clsname)
#-------------------------------------------
# Update Virtuals from base classes
for clsname in clsnames:
cls=data.mClasses[clsname]
clsname=cls['name']
if (cls['category']=='manual'): continue
if (data.vAllowVirtualClass(clsname)):
base=cls
while base!=None:
if (base.has_key('virt')):
cls['virt']=1
break
base=base['base']
|
{
"content_hash": "932eaf94390e13cd95ef00600bbaf241",
"timestamp": "",
"source": "github",
"line_count": 398,
"max_line_length": 88,
"avg_line_length": 32.04271356783919,
"alnum_prop": 0.518779894926684,
"repo_name": "henjo/pyoa",
"id": "abe966f7499329128997c96ee09ff5731d54a2c7",
"size": "13739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scripts/pyDDDFunction.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "68965810"
},
{
"name": "Python",
"bytes": "538924"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from djangobmf.dashboards import Warehouse
from djangobmf.sites import Category
class LocationCategory(Category):
name = _('Locations')
slug = "locations"
dashboard = Warehouse
|
{
"content_hash": "16a4e2440a139268b14e6c9d7904802d",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 55,
"avg_line_length": 24.083333333333332,
"alnum_prop": 0.7612456747404844,
"repo_name": "django-bmf/django-bmf",
"id": "59b3f0821c61ec0cde55442f81b303ac49c504a3",
"size": "337",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "djangobmf/contrib/location/categories.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "11420"
},
{
"name": "CoffeeScript",
"bytes": "3197"
},
{
"name": "HTML",
"bytes": "117091"
},
{
"name": "JavaScript",
"bytes": "80435"
},
{
"name": "Python",
"bytes": "774167"
},
{
"name": "Shell",
"bytes": "736"
}
],
"symlink_target": ""
}
|
from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("GradientBoostingRegressor" , "freidman2" , "hive")
|
{
"content_hash": "7a506da5450e342e113bca48ab497a3a",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 70,
"avg_line_length": 35,
"alnum_prop": 0.7857142857142857,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "73cb79af6d3756b12cdd90dfd92665fb7d8018b0",
"size": "140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regression/freidman2/ws_freidman2_GradientBoostingRegressor_hive_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
}
|
import yaml
class Enemy:
enemy_file = file('data/enemies.yml')
enemy_dat = yaml.load(enemy_file)
# Player Profile
Name = ''
Is_Person = ''
Level = 0
# Player Stats
hp = 0
str = 0
agi = 0
acc = 0
end = 0
spd = 0
armor = 0
# Equipped Melee
weapon_name = ''
weapon_alt_name = ''
weapon_damage = 0
weapon_range = 1
damage_type = ''
# Equipped Ranged
ranged_name = ''
ranged_alt_name = ''
ranged_damage = 0
ranged_ranged = 8
max_ammo = 0
ammo_count = 0
reload_time = 0
reload_count = 0
# Inventory and Statuses
Inventory = {}
Condition = {}
# Combat Status
location = 7
defended = False
dodged = False
def __init__(self, enemy_id):
self.id = enemy_id
self.enemy = Enemy.enemy_dat[self.id]
def load_enemy(self):
Enemy.Name = self.enemy['Name']
Enemy.Level = self.enemy['Level']
Enemy.armor = self.enemy['stats']['armor']
Enemy.hp = self.enemy['stats']['hp']
Enemy.str = self.enemy['stats']['str']
Enemy.agi = self.enemy['stats']['agi']
Enemy.acc = self.enemy['stats']['agi']
Enemy.end = self.enemy['stats']['end']
Enemy.spd = self.enemy['stats']['spd']
Enemy.armor = self.enemy['stats']['armor']
# Enemy.Inventory = self.enemy['Inventory']
# Enemy.condition = self.enemy['Condition']
# Attack Stats
Enemy.weapon_name = self.enemy['attack']['name']
Enemy.weapon_alt_name = self.enemy['attack']['alt name']
Enemy.weapon_damage = self.enemy['attack']['damage']
Enemy.weapon_range = self.enemy['attack']['range']
Enemy.damage_type = self.enemy['attack']['type']
|
{
"content_hash": "574420de9e80706baf636c28de75ea17",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 64,
"avg_line_length": 27.625,
"alnum_prop": 0.5571266968325792,
"repo_name": "DinkWerks/Kivy-Game",
"id": "2f2977b10d03b5b8cff8cde119147a7039cf9eb7",
"size": "1768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/enemy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18700"
}
],
"symlink_target": ""
}
|
from html5lib import html5parser, sanitizer
import markdown
def parse(text):
# First run through the Markdown parser
text = markdown.markdown(text, extensions=["extra"], safe_mode=False)
# Sanitize using html5lib
bits = []
parser = html5parser.HTMLParser(tokenizer=sanitizer.HTMLSanitizer)
for token in parser.parseFragment(text).childNodes:
bits.append(token.toxml())
return "".join(bits)
|
{
"content_hash": "3604da66356e715dab6c588eebdcb0cf",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 73,
"avg_line_length": 26.9375,
"alnum_prop": 0.7146171693735499,
"repo_name": "toulibre/symposion",
"id": "178708f655f9824bf2c6cfca7be4f0d82a3d9748",
"size": "431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "symposion/markdown_parser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "13131"
},
{
"name": "JavaScript",
"bytes": "79954"
},
{
"name": "Python",
"bytes": "167052"
}
],
"symlink_target": ""
}
|
from atom.api import Bool, Unicode, Enum, Typed, ForwardTyped, observe
from enaml.core.declarative import d_
from .container import Container, ProxyContainer
class ProxyGroupBox(ProxyContainer):
""" The abstract definition of a proxy GroupBox object.
"""
#: A reference to the GroupBox declaration.
declaration = ForwardTyped(lambda: GroupBox)
def set_title(self, title):
raise NotImplementedError
def set_flat(self, flat):
raise NotImplementedError
def set_title_align(self, align):
raise NotImplementedError
class GroupBox(Container):
""" The GroupBox container, which introduces a group of widgets with
a title and usually has a border.
"""
#: The title displayed at the top of the box.
title = d_(Unicode())
#: The flat parameter determines if the GroupBox is displayed with
#: just the title and a header line (True) or with a full border
#: (False, the default).
flat = d_(Bool(False))
#: The alignment of the title text.
title_align = d_(Enum('left', 'right', 'center'))
#: A reference to the ProxyGroupBox object.
proxy = Typed(ProxyGroupBox)
#--------------------------------------------------------------------------
# Observers
#--------------------------------------------------------------------------
@observe(('title', 'flat', 'title_align'))
def _update_proxy(self, change):
""" An observer which sends state change to the proxy.
"""
# The superclass handler implementation is sufficient.
super(GroupBox, self)._update_proxy(change)
|
{
"content_hash": "2030eedab482b589667a394e787fdca3",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 30.58490566037736,
"alnum_prop": 0.6051819864281308,
"repo_name": "ContinuumIO/ashiba",
"id": "e67343e7b063343f7f83ad84d9a4f9e6fd579bdc",
"size": "1971",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enaml/enaml/widgets/group_box.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4560"
},
{
"name": "C",
"bytes": "738"
},
{
"name": "C++",
"bytes": "77464"
},
{
"name": "CSS",
"bytes": "2286"
},
{
"name": "Emacs Lisp",
"bytes": "1210"
},
{
"name": "HTML",
"bytes": "4891"
},
{
"name": "JavaScript",
"bytes": "17243"
},
{
"name": "Makefile",
"bytes": "4590"
},
{
"name": "Python",
"bytes": "3241535"
},
{
"name": "Shell",
"bytes": "119"
},
{
"name": "VimL",
"bytes": "1821"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.contrib.auth import models as auth_models
from django.utils.timezone import now
class SuggestionManager(models.Manager):
def get_query_set(self):
return super(SuggestionManager, self).get_query_set().filter(in_reply_to=None)
class Comment(models.Model):
author = models.ForeignKey(auth_models.User, blank=True, null=True)
title = models.CharField(max_length="200")
description = models.TextField()
created_at = models.DateTimeField()
in_reply_to = models.ForeignKey("Comment", blank=True, null=True, related_name='children')
top_of_tree = models.ForeignKey("Comment", blank=True, null=True, related_name='related')
def __unicode__(self):
return 'author=%r, title=%r, description=%r' % (self.author, self.title, self.description)
def most_recent_comment(self):
if self.related.count():
return self.related.order_by('created_at')[0]
else:
return None
def descendant_count(self):
if self == self.top_of_tree: # Easy case
return self.related.count() - 1
else:
return (self.children.count() +
sum(map(lambda x:x.descendant_count(), self.children.all())))
def save(self):
if self.pk is None:
self.created_at = now()
ptr = self
while ptr.in_reply_to is not None:
ptr = ptr.in_reply_to
# This ensures that the first time a top-level comment is saved,
# its top_of_tree attribute can be saved correctly.
if ptr == self:
super(Comment, self).save()
self.top_of_tree = ptr
return super(Comment, self).save()
objects = models.Manager()
suggestions = SuggestionManager()
|
{
"content_hash": "521c9e9791372b51b999ffdcd1aa8df3",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 98,
"avg_line_length": 35.58,
"alnum_prop": 0.6312535132096684,
"repo_name": "sorenh/python-django-suggestionbox",
"id": "82169f88ed831fff5e86a7c53701a9427b38a730",
"size": "1779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "suggestionbox/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7000"
}
],
"symlink_target": ""
}
|
"""Privileges that can be granted by user to tasks and pools"""
from typing import Dict
class Privileges(object):
_exportApiAndStorageCredentialsInEnvironment: bool = False # Export the Api and Storage credentials to the task/pool environment
def __init__(self, exportCredentialsInEnv: bool = False):
"""Create a new :class:``Privileges``"""
self._exportApiAndStorageCredentialsInEnvironment = exportCredentialsInEnv
@classmethod
def from_json(cls, json: Dict[str, str]):
"""Create the privileges from json.
:param dict json: Dictionary representing the privileges
:returns: The created :class:``Privileges``
"""
shouldExportCredentialsInEnvironment: bool = json["exportApiAndStorageCredentialsInEnvironment"]
return Privileges(shouldExportCredentialsInEnvironment)
def to_json(self) -> Dict[str, object]:
"""Get a dict ready to be json packed.
:return: the json elements of the class.
:rtype: `dict`
"""
return {
"exportApiAndStorageCredentialsInEnvironment": self._exportApiAndStorageCredentialsInEnvironment
}
def __eq__(self, other):
if other is None:
return False
return self._exportApiAndStorageCredentialsInEnvironment == other._exportApiAndStorageCredentialsInEnvironment
def __str__(self) -> str:
return "privileges: exportCredentialsInEnvironnement {}.".format(self._exportApiAndStorageCredentialsInEnvironment)
def __repr__(self) -> str:
return "privileges.Privileges(exportCredentialsInEnv: {})".format(self._exportApiAndStorageCredentialsInEnvironment)
|
{
"content_hash": "56ab21c58c5523bb40b57b00d9ad3233",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 133,
"avg_line_length": 40.90243902439025,
"alnum_prop": 0.7006559332140727,
"repo_name": "qarnot/qarnot-sdk-python",
"id": "1665ef4fe5591f75cbd362ac05fd720eb3201721",
"size": "1677",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qarnot/privileges.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2281"
},
{
"name": "Python",
"bytes": "412896"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import json
import six
import re
from moto.core.responses import BaseResponse
from moto.core.utils import camelcase_to_underscores, amzn_request_id
from .models import dynamodb_backends, dynamo_json_dump
def has_empty_keys_or_values(_dict):
if _dict == "":
return True
if not isinstance(_dict, dict):
return False
return any(
key == '' or value == '' or
has_empty_keys_or_values(value)
for key, value in _dict.items()
)
def get_empty_str_error():
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return (400,
{'server': 'amazon.com'},
dynamo_json_dump({'__type': er,
'message': ('One or more parameter values were '
'invalid: An AttributeValue may not '
'contain an empty string')}
))
class DynamoHandler(BaseResponse):
def get_endpoint_name(self, headers):
"""Parses request headers and extracts part od the X-Amz-Target
that corresponds to a method of DynamoHandler
ie: X-Amz-Target: DynamoDB_20111205.ListTables -> ListTables
"""
# Headers are case-insensitive. Probably a better way to do this.
match = headers.get('x-amz-target') or headers.get('X-Amz-Target')
if match:
return match.split(".")[1]
def error(self, type_, message, status=400):
return status, self.response_headers, dynamo_json_dump({'__type': type_, 'message': message})
@property
def dynamodb_backend(self):
"""
:return: DynamoDB2 Backend
:rtype: moto.dynamodb2.models.DynamoDBBackend
"""
return dynamodb_backends[self.region]
@amzn_request_id
def call_action(self):
self.body = json.loads(self.body or '{}')
endpoint = self.get_endpoint_name(self.headers)
if endpoint:
endpoint = camelcase_to_underscores(endpoint)
response = getattr(self, endpoint)()
if isinstance(response, six.string_types):
return 200, self.response_headers, response
else:
status_code, new_headers, response_content = response
self.response_headers.update(new_headers)
return status_code, self.response_headers, response_content
else:
return 404, self.response_headers, ""
def list_tables(self):
body = self.body
limit = body.get('Limit', 100)
if body.get("ExclusiveStartTableName"):
last = body.get("ExclusiveStartTableName")
start = list(self.dynamodb_backend.tables.keys()).index(last) + 1
else:
start = 0
all_tables = list(self.dynamodb_backend.tables.keys())
if limit:
tables = all_tables[start:start + limit]
else:
tables = all_tables[start:]
response = {"TableNames": tables}
if limit and len(all_tables) > start + limit:
response["LastEvaluatedTableName"] = tables[-1]
return dynamo_json_dump(response)
def create_table(self):
body = self.body
# get the table name
table_name = body['TableName']
# get the throughput
throughput = body["ProvisionedThroughput"]
# getting the schema
key_schema = body['KeySchema']
# getting attribute definition
attr = body["AttributeDefinitions"]
# getting the indexes
global_indexes = body.get("GlobalSecondaryIndexes", [])
local_secondary_indexes = body.get("LocalSecondaryIndexes", [])
table = self.dynamodb_backend.create_table(table_name,
schema=key_schema,
throughput=throughput,
attr=attr,
global_indexes=global_indexes,
indexes=local_secondary_indexes)
if table is not None:
return dynamo_json_dump(table.describe())
else:
er = 'com.amazonaws.dynamodb.v20111205#ResourceInUseException'
return self.error(er, 'Resource in use')
def delete_table(self):
name = self.body['TableName']
table = self.dynamodb_backend.delete_table(name)
if table is not None:
return dynamo_json_dump(table.describe())
else:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
def tag_resource(self):
table_arn = self.body['ResourceArn']
tags = self.body['Tags']
self.dynamodb_backend.tag_resource(table_arn, tags)
return ''
def untag_resource(self):
table_arn = self.body['ResourceArn']
tags = self.body['TagKeys']
self.dynamodb_backend.untag_resource(table_arn, tags)
return ''
def list_tags_of_resource(self):
try:
table_arn = self.body['ResourceArn']
all_tags = self.dynamodb_backend.list_tags_of_resource(table_arn)
all_tag_keys = [tag['Key'] for tag in all_tags]
marker = self.body.get('NextToken')
if marker:
start = all_tag_keys.index(marker) + 1
else:
start = 0
max_items = 10 # there is no default, but using 10 to make testing easier
tags_resp = all_tags[start:start + max_items]
next_marker = None
if len(all_tags) > start + max_items:
next_marker = tags_resp[-1]['Key']
if next_marker:
return json.dumps({'Tags': tags_resp,
'NextToken': next_marker})
return json.dumps({'Tags': tags_resp})
except AttributeError:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
def update_table(self):
name = self.body['TableName']
if 'GlobalSecondaryIndexUpdates' in self.body:
table = self.dynamodb_backend.update_table_global_indexes(
name, self.body['GlobalSecondaryIndexUpdates'])
if 'ProvisionedThroughput' in self.body:
throughput = self.body["ProvisionedThroughput"]
table = self.dynamodb_backend.update_table_throughput(name, throughput)
return dynamo_json_dump(table.describe())
def describe_table(self):
name = self.body['TableName']
try:
table = self.dynamodb_backend.tables[name]
except KeyError:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
return dynamo_json_dump(table.describe(base_key='Table'))
def put_item(self):
name = self.body['TableName']
item = self.body['Item']
if has_empty_keys_or_values(item):
return get_empty_str_error()
overwrite = 'Expected' not in self.body
if not overwrite:
expected = self.body['Expected']
else:
expected = None
# Attempt to parse simple ConditionExpressions into an Expected
# expression
if not expected:
condition_expression = self.body.get('ConditionExpression')
if condition_expression and 'OR' not in condition_expression:
cond_items = [c.strip()
for c in condition_expression.split('AND')]
if cond_items:
expected = {}
overwrite = False
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
not_exists_re = re.compile(
'^attribute_not_exists\s*\((.*)\)$')
for cond in cond_items:
exists_m = exists_re.match(cond)
not_exists_m = not_exists_re.match(cond)
if exists_m:
expected[exists_m.group(1)] = {'Exists': True}
elif not_exists_m:
expected[not_exists_m.group(1)] = {'Exists': False}
try:
result = self.dynamodb_backend.put_item(name, item, expected, overwrite)
except ValueError:
er = 'com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException'
return self.error(er, 'A condition specified in the operation could not be evaluated.')
if result:
item_dict = result.to_json()
item_dict['ConsumedCapacity'] = {
'TableName': name,
'CapacityUnits': 1
}
return dynamo_json_dump(item_dict)
else:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
def batch_write_item(self):
table_batches = self.body['RequestItems']
for table_name, table_requests in table_batches.items():
for table_request in table_requests:
request_type = list(table_request.keys())[0]
request = list(table_request.values())[0]
if request_type == 'PutRequest':
item = request['Item']
self.dynamodb_backend.put_item(table_name, item)
elif request_type == 'DeleteRequest':
keys = request['Key']
item = self.dynamodb_backend.delete_item(table_name, keys)
response = {
"ConsumedCapacity": [
{
'TableName': table_name,
'CapacityUnits': 1.0,
'Table': {'CapacityUnits': 1.0}
} for table_name, table_requests in table_batches.items()
],
"ItemCollectionMetrics": {},
"UnprocessedItems": {}
}
return dynamo_json_dump(response)
def get_item(self):
name = self.body['TableName']
key = self.body['Key']
try:
item = self.dynamodb_backend.get_item(name, key)
except ValueError:
er = 'com.amazon.coral.validate#ValidationException'
return self.error(er, 'Validation Exception')
if item:
item_dict = item.describe_attrs(attributes=None)
item_dict['ConsumedCapacity'] = {
'TableName': name,
'CapacityUnits': 0.5
}
return dynamo_json_dump(item_dict)
else:
# Item not found
return 200, self.response_headers, '{}'
def batch_get_item(self):
table_batches = self.body['RequestItems']
results = {
"ConsumedCapacity": [],
"Responses": {
},
"UnprocessedKeys": {
}
}
for table_name, table_request in table_batches.items():
keys = table_request['Keys']
attributes_to_get = table_request.get('AttributesToGet')
results["Responses"][table_name] = []
for key in keys:
item = self.dynamodb_backend.get_item(table_name, key)
if item:
item_describe = item.describe_attrs(attributes_to_get)
results["Responses"][table_name].append(
item_describe["Item"])
results["ConsumedCapacity"].append({
"CapacityUnits": len(keys),
"TableName": table_name
})
return dynamo_json_dump(results)
def query(self):
name = self.body['TableName']
# {u'KeyConditionExpression': u'#n0 = :v0', u'ExpressionAttributeValues': {u':v0': {u'S': u'johndoe'}}, u'ExpressionAttributeNames': {u'#n0': u'username'}}
key_condition_expression = self.body.get('KeyConditionExpression')
projection_expression = self.body.get('ProjectionExpression')
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
filter_expression = self.body.get('FilterExpression')
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
if projection_expression and expression_attribute_names:
expressions = [x.strip() for x in projection_expression.split(',')]
for expression in expressions:
if expression in expression_attribute_names:
projection_expression = projection_expression.replace(expression, expression_attribute_names[expression])
filter_kwargs = {}
if key_condition_expression:
value_alias_map = self.body.get('ExpressionAttributeValues', {})
table = self.dynamodb_backend.get_table(name)
# If table does not exist
if table is None:
return self.error('com.amazonaws.dynamodb.v20120810#ResourceNotFoundException',
'Requested resource not found')
index_name = self.body.get('IndexName')
if index_name:
all_indexes = (table.global_indexes or []) + \
(table.indexes or [])
indexes_by_name = dict((i['IndexName'], i)
for i in all_indexes)
if index_name not in indexes_by_name:
raise ValueError('Invalid index: %s for table: %s. Available indexes are: %s' % (
index_name, name, ', '.join(indexes_by_name.keys())
))
index = indexes_by_name[index_name]['KeySchema']
else:
index = table.schema
reverse_attribute_lookup = dict((v, k) for k, v in
six.iteritems(self.body.get('ExpressionAttributeNames', {})))
if " AND " in key_condition_expression:
expressions = key_condition_expression.split(" AND ", 1)
index_hash_key = [key for key in index if key['KeyType'] == 'HASH'][0]
hash_key_var = reverse_attribute_lookup.get(index_hash_key['AttributeName'],
index_hash_key['AttributeName'])
hash_key_regex = r'(^|[\s(]){0}\b'.format(hash_key_var)
i, hash_key_expression = next((i, e) for i, e in enumerate(expressions)
if re.search(hash_key_regex, e))
hash_key_expression = hash_key_expression.strip('()')
expressions.pop(i)
# TODO implement more than one range expression and OR operators
range_key_expression = expressions[0].strip('()')
range_key_expression_components = range_key_expression.split()
range_comparison = range_key_expression_components[1]
if 'AND' in range_key_expression:
range_comparison = 'BETWEEN'
range_values = [
value_alias_map[range_key_expression_components[2]],
value_alias_map[range_key_expression_components[4]],
]
elif 'begins_with' in range_key_expression:
range_comparison = 'BEGINS_WITH'
range_values = [
value_alias_map[range_key_expression_components[1]],
]
else:
range_values = [value_alias_map[
range_key_expression_components[2]]]
else:
hash_key_expression = key_condition_expression
range_comparison = None
range_values = []
hash_key_value_alias = hash_key_expression.split("=")[1].strip()
# Temporary fix until we get proper KeyConditionExpression function
hash_key = value_alias_map.get(hash_key_value_alias, {'S': hash_key_value_alias})
else:
# 'KeyConditions': {u'forum_name': {u'ComparisonOperator': u'EQ', u'AttributeValueList': [{u'S': u'the-key'}]}}
key_conditions = self.body.get('KeyConditions')
query_filters = self.body.get("QueryFilter")
if key_conditions:
hash_key_name, range_key_name = self.dynamodb_backend.get_table_keys_name(
name, key_conditions.keys())
for key, value in key_conditions.items():
if key not in (hash_key_name, range_key_name):
filter_kwargs[key] = value
if hash_key_name is None:
er = "'com.amazonaws.dynamodb.v20120810#ResourceNotFoundException"
return self.error(er, 'Requested resource not found')
hash_key = key_conditions[hash_key_name][
'AttributeValueList'][0]
if len(key_conditions) == 1:
range_comparison = None
range_values = []
else:
if range_key_name is None and not filter_kwargs:
er = "com.amazon.coral.validate#ValidationException"
return self.error(er, 'Validation Exception')
else:
range_condition = key_conditions.get(range_key_name)
if range_condition:
range_comparison = range_condition[
'ComparisonOperator']
range_values = range_condition[
'AttributeValueList']
else:
range_comparison = None
range_values = []
if query_filters:
filter_kwargs.update(query_filters)
index_name = self.body.get('IndexName')
exclusive_start_key = self.body.get('ExclusiveStartKey')
limit = self.body.get("Limit")
scan_index_forward = self.body.get("ScanIndexForward")
items, scanned_count, last_evaluated_key = self.dynamodb_backend.query(
name, hash_key, range_comparison, range_values, limit,
exclusive_start_key, scan_index_forward, projection_expression, index_name=index_name,
expr_names=expression_attribute_names, expr_values=expression_attribute_values,
filter_expression=filter_expression, **filter_kwargs
)
if items is None:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
result = {
"Count": len(items),
'ConsumedCapacity': {
'TableName': name,
'CapacityUnits': 1,
},
"ScannedCount": scanned_count
}
if self.body.get('Select', '').upper() != 'COUNT':
result["Items"] = [item.attrs for item in items]
if last_evaluated_key is not None:
result["LastEvaluatedKey"] = last_evaluated_key
return dynamo_json_dump(result)
def scan(self):
name = self.body['TableName']
filters = {}
scan_filters = self.body.get('ScanFilter', {})
for attribute_name, scan_filter in scan_filters.items():
# Keys are attribute names. Values are tuples of (comparison,
# comparison_value)
comparison_operator = scan_filter["ComparisonOperator"]
comparison_values = scan_filter.get("AttributeValueList", [])
filters[attribute_name] = (comparison_operator, comparison_values)
filter_expression = self.body.get('FilterExpression')
expression_attribute_values = self.body.get('ExpressionAttributeValues', {})
expression_attribute_names = self.body.get('ExpressionAttributeNames', {})
exclusive_start_key = self.body.get('ExclusiveStartKey')
limit = self.body.get("Limit")
try:
items, scanned_count, last_evaluated_key = self.dynamodb_backend.scan(name, filters,
limit,
exclusive_start_key,
filter_expression,
expression_attribute_names,
expression_attribute_values)
except ValueError as err:
er = 'com.amazonaws.dynamodb.v20111205#ValidationError'
return self.error(er, 'Bad Filter Expression: {0}'.format(err))
except Exception as err:
er = 'com.amazonaws.dynamodb.v20111205#InternalFailure'
return self.error(er, 'Internal error. {0}'.format(err))
# Items should be a list, at least an empty one. Is None if table does not exist.
# Should really check this at the beginning
if items is None:
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
return self.error(er, 'Requested resource not found')
result = {
"Count": len(items),
"Items": [item.attrs for item in items],
'ConsumedCapacity': {
'TableName': name,
'CapacityUnits': 1,
},
"ScannedCount": scanned_count
}
if last_evaluated_key is not None:
result["LastEvaluatedKey"] = last_evaluated_key
return dynamo_json_dump(result)
def delete_item(self):
name = self.body['TableName']
keys = self.body['Key']
return_values = self.body.get('ReturnValues', '')
table = self.dynamodb_backend.get_table(name)
if not table:
er = 'com.amazonaws.dynamodb.v20120810#ConditionalCheckFailedException'
return self.error(er, 'A condition specified in the operation could not be evaluated.')
item = self.dynamodb_backend.delete_item(name, keys)
if item and return_values == 'ALL_OLD':
item_dict = item.to_json()
else:
item_dict = {'Attributes': {}}
item_dict['ConsumedCapacityUnits'] = 0.5
return dynamo_json_dump(item_dict)
def update_item(self):
name = self.body['TableName']
key = self.body['Key']
update_expression = self.body.get('UpdateExpression')
attribute_updates = self.body.get('AttributeUpdates')
expression_attribute_names = self.body.get(
'ExpressionAttributeNames', {})
expression_attribute_values = self.body.get(
'ExpressionAttributeValues', {})
existing_item = self.dynamodb_backend.get_item(name, key)
if has_empty_keys_or_values(expression_attribute_values):
return get_empty_str_error()
if 'Expected' in self.body:
expected = self.body['Expected']
else:
expected = None
# Attempt to parse simple ConditionExpressions into an Expected
# expression
if not expected:
condition_expression = self.body.get('ConditionExpression')
if condition_expression and 'OR' not in condition_expression:
cond_items = [c.strip()
for c in condition_expression.split('AND')]
if cond_items:
expected = {}
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
not_exists_re = re.compile(
'^attribute_not_exists\s*\((.*)\)$')
for cond in cond_items:
exists_m = exists_re.match(cond)
not_exists_m = not_exists_re.match(cond)
if exists_m:
expected[exists_m.group(1)] = {'Exists': True}
elif not_exists_m:
expected[not_exists_m.group(1)] = {'Exists': False}
# Support spaces between operators in an update expression
# E.g. `a = b + c` -> `a=b+c`
if update_expression:
update_expression = re.sub(
'\s*([=\+-])\s*', '\\1', update_expression)
try:
item = self.dynamodb_backend.update_item(
name, key, update_expression, attribute_updates, expression_attribute_names,
expression_attribute_values, expected
)
except ValueError:
er = 'com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException'
return self.error(er, 'A condition specified in the operation could not be evaluated.')
except TypeError:
er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
return self.error(er, 'Validation Exception')
item_dict = item.to_json()
item_dict['ConsumedCapacity'] = {
'TableName': name,
'CapacityUnits': 0.5
}
if not existing_item:
item_dict['Attributes'] = {}
return dynamo_json_dump(item_dict)
def describe_limits(self):
return json.dumps({
'AccountMaxReadCapacityUnits': 20000,
'TableMaxWriteCapacityUnits': 10000,
'AccountMaxWriteCapacityUnits': 20000,
'TableMaxReadCapacityUnits': 10000
})
def update_time_to_live(self):
name = self.body['TableName']
ttl_spec = self.body['TimeToLiveSpecification']
self.dynamodb_backend.update_ttl(name, ttl_spec)
return json.dumps({'TimeToLiveSpecification': ttl_spec})
def describe_time_to_live(self):
name = self.body['TableName']
ttl_spec = self.dynamodb_backend.describe_ttl(name)
return json.dumps({'TimeToLiveDescription': ttl_spec})
|
{
"content_hash": "0c227be32a32575f7f65af9a9b046306",
"timestamp": "",
"source": "github",
"line_count": 620,
"max_line_length": 163,
"avg_line_length": 42.325806451612905,
"alnum_prop": 0.5459568630439753,
"repo_name": "rocky4570/moto",
"id": "e2f1ef1cc5bbf122eecdc2ea6498e9fc6addd530",
"size": "26242",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moto/dynamodb2/responses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1143"
},
{
"name": "Python",
"bytes": "4654762"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
}
|
from jsonschema import exceptions as js_exceptions
from jsonschema import validate
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from stevedore import driver as stevedore_driver
from octavia_lib.api.drivers import data_models as driver_dm
from octavia_lib.api.drivers import exceptions
from octavia_lib.api.drivers import provider_base as driver_base
from octavia_lib.common import constants as lib_consts
from octavia.api.drivers.amphora_driver import availability_zone_schema
from octavia.api.drivers.amphora_driver import flavor_schema
from octavia.api.drivers import utils as driver_utils
from octavia.common import constants as consts
from octavia.common import data_models
from octavia.common import rpc
from octavia.common import utils
from octavia.db import api as db_apis
from octavia.db import repositories
from octavia.network import base as network_base
CONF = cfg.CONF
CONF.import_group('oslo_messaging', 'octavia.common.config')
LOG = logging.getLogger(__name__)
AMPHORA_SUPPORTED_LB_ALGORITHMS = [
consts.LB_ALGORITHM_ROUND_ROBIN,
consts.LB_ALGORITHM_SOURCE_IP,
consts.LB_ALGORITHM_LEAST_CONNECTIONS]
AMPHORA_SUPPORTED_PROTOCOLS = [
lib_consts.PROTOCOL_TCP,
lib_consts.PROTOCOL_HTTP,
lib_consts.PROTOCOL_HTTPS,
lib_consts.PROTOCOL_TERMINATED_HTTPS,
lib_consts.PROTOCOL_PROXY,
lib_consts.PROTOCOL_PROXYV2,
lib_consts.PROTOCOL_UDP,
lib_consts.PROTOCOL_SCTP,
lib_consts.PROTOCOL_PROMETHEUS,
]
VALID_L7POLICY_LISTENER_PROTOCOLS = [
lib_consts.PROTOCOL_HTTP,
lib_consts.PROTOCOL_TERMINATED_HTTPS
]
class AmphoraProviderDriver(driver_base.ProviderDriver):
def __init__(self):
super().__init__()
topic = cfg.CONF.oslo_messaging.topic
self.target = messaging.Target(
namespace=consts.RPC_NAMESPACE_CONTROLLER_AGENT,
topic=topic, version="1.0", fanout=False)
self.client = rpc.get_client(self.target)
self.repositories = repositories.Repositories()
def _validate_pool_algorithm(self, pool):
if pool.lb_algorithm not in AMPHORA_SUPPORTED_LB_ALGORITHMS:
msg = ('Amphora provider does not support %s algorithm.'
% pool.lb_algorithm)
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
def _validate_listener_protocol(self, listener):
if listener.protocol not in AMPHORA_SUPPORTED_PROTOCOLS:
msg = ('Amphora provider does not support %s protocol. '
'Supported: %s'
% (listener.protocol,
", ".join(AMPHORA_SUPPORTED_PROTOCOLS)))
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
def _validate_alpn_protocols(self, obj):
if not obj.alpn_protocols:
return
supported = consts.AMPHORA_SUPPORTED_ALPN_PROTOCOLS
not_supported = set(obj.alpn_protocols) - set(supported)
if not_supported:
msg = ('Amphora provider does not support %s ALPN protocol(s). '
'Supported: %s'
% (", ".join(not_supported), ", ".join(supported)))
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
# Load Balancer
def create_vip_port(self, loadbalancer_id, project_id, vip_dictionary,
additional_vip_dicts=None):
if additional_vip_dicts:
msg = ('Amphora v1 driver does not support additional_vips.')
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
vip_obj = driver_utils.provider_vip_dict_to_vip_obj(vip_dictionary)
lb_obj = data_models.LoadBalancer(id=loadbalancer_id,
project_id=project_id, vip=vip_obj)
network_driver = utils.get_network_driver()
vip_network = network_driver.get_network(
vip_dictionary[lib_consts.VIP_NETWORK_ID])
if not vip_network.port_security_enabled:
message = "Port security must be enabled on the VIP network."
raise exceptions.DriverError(user_fault_string=message,
operator_fault_string=message)
try:
# allocated_vip returns (vip, add_vips), skipping the 2nd element
# as amphorav1 doesn't support add_vips
vip = network_driver.allocate_vip(lb_obj)[0]
except network_base.AllocateVIPException as e:
message = str(e)
if getattr(e, 'orig_msg', None) is not None:
message = e.orig_msg
raise exceptions.DriverError(user_fault_string=message,
operator_fault_string=message)
LOG.info('Amphora provider created VIP port %s for load balancer %s.',
vip.port_id, loadbalancer_id)
return driver_utils.vip_dict_to_provider_dict(vip.to_dict()), []
# TODO(johnsom) convert this to octavia_lib constant flavor
# once octavia is transitioned to use octavia_lib
def loadbalancer_create(self, loadbalancer):
if loadbalancer.flavor == driver_dm.Unset:
loadbalancer.flavor = None
if loadbalancer.availability_zone == driver_dm.Unset:
loadbalancer.availability_zone = None
payload = {consts.LOAD_BALANCER_ID: loadbalancer.loadbalancer_id,
consts.FLAVOR: loadbalancer.flavor,
consts.AVAILABILITY_ZONE: loadbalancer.availability_zone}
self.client.cast({}, 'create_load_balancer', **payload)
def loadbalancer_delete(self, loadbalancer, cascade=False):
loadbalancer_id = loadbalancer.loadbalancer_id
payload = {consts.LOAD_BALANCER_ID: loadbalancer_id,
'cascade': cascade}
self.client.cast({}, 'delete_load_balancer', **payload)
def loadbalancer_failover(self, loadbalancer_id):
payload = {consts.LOAD_BALANCER_ID: loadbalancer_id}
self.client.cast({}, 'failover_load_balancer', **payload)
def loadbalancer_update(self, old_loadbalancer, new_loadbalancer):
# Adapt the provider data model to the queue schema
lb_dict = new_loadbalancer.to_dict()
if 'admin_state_up' in lb_dict:
lb_dict['enabled'] = lb_dict.pop('admin_state_up')
lb_id = lb_dict.pop('loadbalancer_id')
# Put the qos_policy_id back under the vip element the controller
# expects
vip_qos_policy_id = lb_dict.pop('vip_qos_policy_id', None)
if vip_qos_policy_id:
vip_dict = {"qos_policy_id": vip_qos_policy_id}
lb_dict["vip"] = vip_dict
payload = {consts.LOAD_BALANCER_ID: lb_id,
consts.LOAD_BALANCER_UPDATES: lb_dict}
self.client.cast({}, 'update_load_balancer', **payload)
# Listener
def listener_create(self, listener):
self._validate_listener_protocol(listener)
self._validate_alpn_protocols(listener)
payload = {consts.LISTENER_ID: listener.listener_id}
self.client.cast({}, 'create_listener', **payload)
def listener_delete(self, listener):
listener_id = listener.listener_id
payload = {consts.LISTENER_ID: listener_id}
self.client.cast({}, 'delete_listener', **payload)
def listener_update(self, old_listener, new_listener):
self._validate_alpn_protocols(new_listener)
listener_dict = new_listener.to_dict()
if 'admin_state_up' in listener_dict:
listener_dict['enabled'] = listener_dict.pop('admin_state_up')
listener_id = listener_dict.pop('listener_id')
if 'client_ca_tls_container_ref' in listener_dict:
listener_dict['client_ca_tls_container_id'] = listener_dict.pop(
'client_ca_tls_container_ref')
listener_dict.pop('client_ca_tls_container_data', None)
if 'client_crl_container_ref' in listener_dict:
listener_dict['client_crl_container_id'] = listener_dict.pop(
'client_crl_container_ref')
listener_dict.pop('client_crl_container_data', None)
payload = {consts.LISTENER_ID: listener_id,
consts.LISTENER_UPDATES: listener_dict}
self.client.cast({}, 'update_listener', **payload)
# Pool
def pool_create(self, pool):
self._validate_pool_algorithm(pool)
self._validate_alpn_protocols(pool)
payload = {consts.POOL_ID: pool.pool_id}
self.client.cast({}, 'create_pool', **payload)
def pool_delete(self, pool):
pool_id = pool.pool_id
payload = {consts.POOL_ID: pool_id}
self.client.cast({}, 'delete_pool', **payload)
def pool_update(self, old_pool, new_pool):
self._validate_alpn_protocols(new_pool)
if new_pool.lb_algorithm:
self._validate_pool_algorithm(new_pool)
pool_dict = new_pool.to_dict()
if 'admin_state_up' in pool_dict:
pool_dict['enabled'] = pool_dict.pop('admin_state_up')
pool_id = pool_dict.pop('pool_id')
if 'tls_container_ref' in pool_dict:
pool_dict['tls_certificate_id'] = pool_dict.pop(
'tls_container_ref')
pool_dict.pop('tls_container_data', None)
if 'ca_tls_container_ref' in pool_dict:
pool_dict['ca_tls_certificate_id'] = pool_dict.pop(
'ca_tls_container_ref')
pool_dict.pop('ca_tls_container_data', None)
if 'crl_container_ref' in pool_dict:
pool_dict['crl_container_id'] = pool_dict.pop('crl_container_ref')
pool_dict.pop('crl_container_data', None)
payload = {consts.POOL_ID: pool_id,
consts.POOL_UPDATES: pool_dict}
self.client.cast({}, 'update_pool', **payload)
# Member
def member_create(self, member):
pool_id = member.pool_id
db_pool = self.repositories.pool.get(db_apis.get_session(),
id=pool_id)
self._validate_members(db_pool, [member])
payload = {consts.MEMBER_ID: member.member_id}
self.client.cast({}, 'create_member', **payload)
def member_delete(self, member):
member_id = member.member_id
payload = {consts.MEMBER_ID: member_id}
self.client.cast({}, 'delete_member', **payload)
def member_update(self, old_member, new_member):
member_dict = new_member.to_dict()
if 'admin_state_up' in member_dict:
member_dict['enabled'] = member_dict.pop('admin_state_up')
member_id = member_dict.pop('member_id')
payload = {consts.MEMBER_ID: member_id,
consts.MEMBER_UPDATES: member_dict}
self.client.cast({}, 'update_member', **payload)
def member_batch_update(self, pool_id, members):
# The DB should not have updated yet, so we can still use the pool
db_pool = self.repositories.pool.get(db_apis.get_session(), id=pool_id)
self._validate_members(db_pool, members)
old_members = db_pool.members
old_member_ids = [m.id for m in old_members]
# The driver will always pass objects with IDs.
new_member_ids = [m.member_id for m in members]
# Find members that are brand new or updated
new_members = []
updated_members = []
for m in members:
if m.member_id not in old_member_ids:
new_members.append(m)
else:
member_dict = m.to_dict(render_unsets=False)
member_dict['id'] = member_dict.pop('member_id')
if 'address' in member_dict:
member_dict['ip_address'] = member_dict.pop('address')
if 'admin_state_up' in member_dict:
member_dict['enabled'] = member_dict.pop('admin_state_up')
updated_members.append(member_dict)
# Find members that are deleted
deleted_members = []
for m in old_members:
if m.id not in new_member_ids:
deleted_members.append(m)
payload = {'old_member_ids': [m.id for m in deleted_members],
'new_member_ids': [m.member_id for m in new_members],
'updated_members': updated_members}
self.client.cast({}, 'batch_update_members', **payload)
def _validate_members(self, db_pool, members):
if db_pool.protocol in consts.LVS_PROTOCOLS:
# For SCTP/UDP LBs, check that we are not mixing IPv4 and IPv6
for member in members:
member_is_ipv6 = utils.is_ipv6(member.address)
for listener in db_pool.listeners:
lb = listener.load_balancer
vip_is_ipv6 = utils.is_ipv6(lb.vip.ip_address)
if member_is_ipv6 != vip_is_ipv6:
msg = ("This provider doesn't support mixing IPv4 and "
"IPv6 addresses for its VIP and members in {} "
"load balancers.".format(db_pool.protocol))
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
# Health Monitor
def health_monitor_create(self, healthmonitor):
payload = {consts.HEALTH_MONITOR_ID: healthmonitor.healthmonitor_id}
self.client.cast({}, 'create_health_monitor', **payload)
def health_monitor_delete(self, healthmonitor):
healthmonitor_id = healthmonitor.healthmonitor_id
payload = {consts.HEALTH_MONITOR_ID: healthmonitor_id}
self.client.cast({}, 'delete_health_monitor', **payload)
def health_monitor_update(self, old_healthmonitor, new_healthmonitor):
healthmon_dict = new_healthmonitor.to_dict()
if 'admin_state_up' in healthmon_dict:
healthmon_dict['enabled'] = healthmon_dict.pop('admin_state_up')
if 'max_retries_down' in healthmon_dict:
healthmon_dict['fall_threshold'] = healthmon_dict.pop(
'max_retries_down')
if 'max_retries' in healthmon_dict:
healthmon_dict['rise_threshold'] = healthmon_dict.pop(
'max_retries')
healthmon_id = healthmon_dict.pop('healthmonitor_id')
payload = {consts.HEALTH_MONITOR_ID: healthmon_id,
consts.HEALTH_MONITOR_UPDATES: healthmon_dict}
self.client.cast({}, 'update_health_monitor', **payload)
# L7 Policy
def l7policy_create(self, l7policy):
db_listener = self.repositories.listener.get(db_apis.get_session(),
id=l7policy.listener_id)
if db_listener.protocol not in VALID_L7POLICY_LISTENER_PROTOCOLS:
msg = ('%s protocol listeners do not support L7 policies' % (
db_listener.protocol))
raise exceptions.UnsupportedOptionError(
user_fault_string=msg,
operator_fault_string=msg)
payload = {consts.L7POLICY_ID: l7policy.l7policy_id}
self.client.cast({}, 'create_l7policy', **payload)
def l7policy_delete(self, l7policy):
l7policy_id = l7policy.l7policy_id
payload = {consts.L7POLICY_ID: l7policy_id}
self.client.cast({}, 'delete_l7policy', **payload)
def l7policy_update(self, old_l7policy, new_l7policy):
l7policy_dict = new_l7policy.to_dict()
if 'admin_state_up' in l7policy_dict:
l7policy_dict['enabled'] = l7policy_dict.pop('admin_state_up')
l7policy_id = l7policy_dict.pop('l7policy_id')
payload = {consts.L7POLICY_ID: l7policy_id,
consts.L7POLICY_UPDATES: l7policy_dict}
self.client.cast({}, 'update_l7policy', **payload)
# L7 Rule
def l7rule_create(self, l7rule):
payload = {consts.L7RULE_ID: l7rule.l7rule_id}
self.client.cast({}, 'create_l7rule', **payload)
def l7rule_delete(self, l7rule):
l7rule_id = l7rule.l7rule_id
payload = {consts.L7RULE_ID: l7rule_id}
self.client.cast({}, 'delete_l7rule', **payload)
def l7rule_update(self, old_l7rule, new_l7rule):
l7rule_dict = new_l7rule.to_dict()
if 'admin_state_up' in l7rule_dict:
l7rule_dict['enabled'] = l7rule_dict.pop('admin_state_up')
l7rule_id = l7rule_dict.pop('l7rule_id')
payload = {consts.L7RULE_ID: l7rule_id,
consts.L7RULE_UPDATES: l7rule_dict}
self.client.cast({}, 'update_l7rule', **payload)
# Flavor
def get_supported_flavor_metadata(self):
"""Returns the valid flavor metadata keys and descriptions.
This extracts the valid flavor metadata keys and descriptions
from the JSON validation schema and returns it as a dictionary.
:return: Dictionary of flavor metadata keys and descriptions.
:raises DriverError: An unexpected error occurred.
"""
try:
props = flavor_schema.SUPPORTED_FLAVOR_SCHEMA['properties']
return {k: v.get('description', '') for k, v in props.items()}
except Exception as e:
raise exceptions.DriverError(
user_fault_string='Failed to get the supported flavor '
'metadata due to: {}'.format(str(e)),
operator_fault_string='Failed to get the supported flavor '
'metadata due to: {}'.format(str(e)))
def validate_flavor(self, flavor_dict):
"""Validates flavor profile data.
This will validate a flavor profile dataset against the flavor
settings the amphora driver supports.
:param flavor_dict: The flavor dictionary to validate.
:type flavor: dict
:return: None
:raises DriverError: An unexpected error occurred.
:raises UnsupportedOptionError: If the driver does not support
one of the flavor settings.
"""
try:
validate(flavor_dict, flavor_schema.SUPPORTED_FLAVOR_SCHEMA)
except js_exceptions.ValidationError as e:
error_object = ''
if e.relative_path:
error_object = '{} '.format(e.relative_path[0])
raise exceptions.UnsupportedOptionError(
user_fault_string='{0}{1}'.format(error_object, e.message),
operator_fault_string=str(e))
except Exception as e:
raise exceptions.DriverError(
user_fault_string='Failed to validate the flavor metadata '
'due to: {}'.format(str(e)),
operator_fault_string='Failed to validate the flavor metadata '
'due to: {}'.format(str(e)))
compute_flavor = flavor_dict.get(consts.COMPUTE_FLAVOR, None)
if compute_flavor:
compute_driver = stevedore_driver.DriverManager(
namespace='octavia.compute.drivers',
name=CONF.controller_worker.compute_driver,
invoke_on_load=True
).driver
# TODO(johnsom) Fix this to raise a NotFound error
# when the octavia-lib supports it.
compute_driver.validate_flavor(compute_flavor)
amp_image_tag = flavor_dict.get(consts.AMP_IMAGE_TAG, None)
if amp_image_tag:
image_driver = stevedore_driver.DriverManager(
namespace='octavia.image.drivers',
name=CONF.controller_worker.image_driver,
invoke_on_load=True
).driver
try:
image_driver.get_image_id_by_tag(
amp_image_tag, CONF.controller_worker.amp_image_owner_id)
except Exception as e:
raise exceptions.NotFound(
user_fault_string='Failed to find an image with tag {} '
'due to: {}'.format(
amp_image_tag, str(e)),
operator_fault_string='Failed to find an image with tag '
'{} due to: {}'.format(
amp_image_tag, str(e)))
# Availability Zone
def get_supported_availability_zone_metadata(self):
"""Returns the valid availability zone metadata keys and descriptions.
This extracts the valid availability zone metadata keys and
descriptions from the JSON validation schema and returns it as a
dictionary.
:return: Dictionary of availability zone metadata keys and descriptions
:raises DriverError: An unexpected error occurred.
"""
try:
props = (
availability_zone_schema.SUPPORTED_AVAILABILITY_ZONE_SCHEMA[
'properties'])
return {k: v.get('description', '') for k, v in props.items()}
except Exception as e:
raise exceptions.DriverError(
user_fault_string='Failed to get the supported availability '
'zone metadata due to: {}'.format(str(e)),
operator_fault_string='Failed to get the supported '
'availability zone metadata due to: '
'{}'.format(str(e)))
def validate_availability_zone(self, availability_zone_dict):
"""Validates availability zone profile data.
This will validate an availability zone profile dataset against the
availability zone settings the amphora driver supports.
:param availability_zone_dict: The availability zone dict to validate.
:type availability_zone_dict: dict
:return: None
:raises DriverError: An unexpected error occurred.
:raises UnsupportedOptionError: If the driver does not support
one of the availability zone settings.
"""
try:
validate(
availability_zone_dict,
availability_zone_schema.SUPPORTED_AVAILABILITY_ZONE_SCHEMA)
except js_exceptions.ValidationError as e:
error_object = ''
if e.relative_path:
error_object = '{} '.format(e.relative_path[0])
raise exceptions.UnsupportedOptionError(
user_fault_string='{0}{1}'.format(error_object, e.message),
operator_fault_string=str(e))
except Exception as e:
raise exceptions.DriverError(
user_fault_string='Failed to validate the availability zone '
'metadata due to: {}'.format(str(e)),
operator_fault_string='Failed to validate the availability '
'zone metadata due to: {}'.format(str(e))
)
compute_zone = availability_zone_dict.get(consts.COMPUTE_ZONE, None)
if compute_zone:
compute_driver = stevedore_driver.DriverManager(
namespace='octavia.compute.drivers',
name=CONF.controller_worker.compute_driver,
invoke_on_load=True
).driver
# TODO(johnsom) Fix this to raise a NotFound error
# when the octavia-lib supports it.
compute_driver.validate_availability_zone(compute_zone)
|
{
"content_hash": "628ce8b90d295c8394778b187e84885f",
"timestamp": "",
"source": "github",
"line_count": 533,
"max_line_length": 79,
"avg_line_length": 44.36210131332083,
"alnum_prop": 0.6016071050962148,
"repo_name": "openstack/octavia",
"id": "420ca1b7789502bababb22d4e1bfb7c600a23b3f",
"size": "24259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "octavia/api/drivers/amphora_driver/v1/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "60600"
},
{
"name": "Mako",
"bytes": "922"
},
{
"name": "Python",
"bytes": "6651664"
},
{
"name": "Ruby",
"bytes": "531"
},
{
"name": "Shell",
"bytes": "117966"
}
],
"symlink_target": ""
}
|
import unittest
from walky.messenger import *
from _common import *
class Test(unittest.TestCase):
def test_messenger(self):
msg = Messenger()
q = msg.subscribe_all()
q2 = msg.subscribe_message_id(22)
q3 = msg.subscribe_message_id(123)
msg.put('msg',123)
(data,msg_id) = q.get()
self.assertIsInstance(q,MessengerSubscriber)
self.assertEqual(data,'msg')
self.assertEqual(msg_id,123)
self.assertTrue(q2.empty())
self.assertFalse(q3.empty())
msg.put('msg2',22)
self.assertFalse(q2.empty())
(data2,msg_id2) = q2.get()
self.assertEqual(data2,'msg2')
self.assertEqual(msg_id2,22)
self.assertFalse(q3.empty())
q3.flush()
self.assertTrue(q3.empty())
msg.put('msg3',123)
self.assertFalse(q3.empty())
data3 = q3.get_single_message()
self.assertEqual(data3,'msg3')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "a0621ccd2a83823543aa7851b19da8a6",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 52,
"avg_line_length": 21.847826086956523,
"alnum_prop": 0.5791044776119403,
"repo_name": "amimoto/walky",
"id": "17291c51e902f15ea28e971c84ff18f6747c4cb5",
"size": "1005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/081-messenger.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1415"
},
{
"name": "Python",
"bytes": "94380"
},
{
"name": "Shell",
"bytes": "465"
}
],
"symlink_target": ""
}
|
import wx
from cairis.core.armid import *
import WidgetFactory
__author__ = 'Shamal Faily'
class ChannelDialog(wx.Dialog):
def __init__(self,parent):
wx.Dialog.__init__(self,parent,CHANNEL_ID,'Add Channel',style=wx.DEFAULT_DIALOG_STYLE|wx.MAXIMIZE_BOX|wx.THICK_FRAME|wx.RESIZE_BORDER,size=(400,150))
self.theChannelName = ''
self.theDataType = ''
mainSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(WidgetFactory.buildTextSizer(self,'Channel',(87,30),CHANNEL_TEXTCHANNEL_ID),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildTextSizer(self,'Data Type',(87,30),CHANNEL_TEXTDATATYPE_ID),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildAddCancelButtonSizer(self,CHANNEL_BUTTONADD_ID),0,wx.ALIGN_CENTER)
self.SetSizer(mainSizer)
wx.EVT_BUTTON(self,CHANNEL_BUTTONADD_ID,self.onAdd)
def onAdd(self,evt):
cCtrl = self.FindWindowById(CHANNEL_TEXTCHANNEL_ID)
dtCtrl = self.FindWindowById(CHANNEL_TEXTDATATYPE_ID)
self.theChannelName = cCtrl.GetValue()
self.theDataType = dtCtrl.GetValue()
if len(self.theChannelName) == 0:
dlg = wx.MessageDialog(self,'No channel entered','Add Channel',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
elif (len(self.theDataType) == 0):
dlg = wx.MessageDialog(self,'No data type','Add Channel',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
else:
self.EndModal(CHANNEL_BUTTONADD_ID)
def channel(self): return self.theChannelName
def dataType(self): return self.theDataType
|
{
"content_hash": "dbd176ddf022067b6fd58d9d5aff4892",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 153,
"avg_line_length": 37.85,
"alnum_prop": 0.7120211360634082,
"repo_name": "nathanbjenx/cairis",
"id": "8a58863165e2bddffee82adac6a65e0032cea004",
"size": "2313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cairis/gui/ChannelDialog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "588306"
},
{
"name": "Dockerfile",
"bytes": "829"
},
{
"name": "Gherkin",
"bytes": "1615"
},
{
"name": "HTML",
"bytes": "1664076"
},
{
"name": "JavaScript",
"bytes": "416319"
},
{
"name": "Mako",
"bytes": "13226"
},
{
"name": "PLpgSQL",
"bytes": "1494775"
},
{
"name": "Python",
"bytes": "4006311"
},
{
"name": "Shell",
"bytes": "7035"
}
],
"symlink_target": ""
}
|
from django.db import models
# Create your models here.
class Word(models.Model):
word = models.CharField(max_length=30)
class Document(models.Model):
docid = models.IntegerField(default=-1)
class Entry(models.Model):
word = models.ForeignKey('Word', primary_key=True)
docid = models.ForeignKey('Document', primary_key=True)
freq = models.IntegerField(default=0)
|
{
"content_hash": "a4d17ccfce5ea19540d97393058fc4b7",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 56,
"avg_line_length": 22,
"alnum_prop": 0.7513368983957219,
"repo_name": "nh0815/PySearch",
"id": "97b293dbe0a2693b4ef931c57a6f3485249332a4",
"size": "374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "search/models.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "341"
},
{
"name": "JavaScript",
"bytes": "1476"
},
{
"name": "Python",
"bytes": "14552"
}
],
"symlink_target": ""
}
|
from os.path import dirname, join
import subprocess
from dxr.testing import DxrInstanceTestCaseMakeFirst
from nose import SkipTest
from nose.tools import ok_
class MercurialTests(DxrInstanceTestCaseMakeFirst):
"""Test our Mercurial integration, both core and omniglot."""
def test_diff_file1(self):
"""Make sure the diff link goes to the first after-initial commit."""
response = self.client().get('/code/source/ChangedInCommit1')
ok_('/diff/2e86c4e11a82f3ec17867468e499e85ec3cbf441/ChangedInCommit1" title="Diff" class="diff icon">Diff</a>' in response.data)
def test_diff_file2(self):
"""Make sure the diff link goes to the second after-initial commit."""
response = self.client().get('/code/source/ChangedInCommit2')
ok_('/diff/cd18424a4dab95361e25e86398e557d3d889e2c8/ChangedInCommit2" title="Diff" class="diff icon">Diff</a>' in response.data)
def test_diff_file3(self):
"""Make sure the diff link goes to the third after-initial commit."""
response = self.client().get('/code/source/Filename With Space')
ok_('/diff/1be3fc90ef0104cf186fac7bc0bbfea17ba6ebdc/Filename With Space" title="Diff" class="diff icon">Diff</a>' in response.data)
def test_blame(self):
"""Make sure the blame link goes to the right place."""
response = self.client().get('/code/source/ChangedInCommit1')
ok_('/annotate/84798105c9ab5897f8c7d630d133d9003b44a62f/ChangedInCommit1" title="Blame" class="blame icon">Blame</a>' in response.data)
def test_raw(self):
"""Make sure the raw link goes to the right place."""
response = self.client().get('/code/source/ChangedInCommit2')
ok_('/raw-file/84798105c9ab5897f8c7d630d133d9003b44a62f/ChangedInCommit2" title="Raw" class="raw icon">Raw</a>' in response.data)
def test_log(self):
"""Make sure the log link goes to the right place."""
response = self.client().get('/code/source/Filename With Space')
ok_('/filelog/84798105c9ab5897f8c7d630d133d9003b44a62f/Filename With Space" title="Log" class="log icon">Log</a>' in response.data)
def test_permalink(self):
"""Make sure the permalink exists, and that the response is ok."""
# Flask's url_for will escape the url, so spaces become %20
response = self.client().get('/code/source/Colon: name')
ok_('/rev/84798105c9ab5897f8c7d630d133d9003b44a62f/Colon:%20name" title="Permalink" class="permalink icon">Permalink</a>' in response.data)
response = self.client().get('/code/rev/84798105c9ab5897f8c7d630d133d9003b44a62f/Colon: name')
ok_(response.status_code, 200)
|
{
"content_hash": "2ffdd9a930898103df1819e583b39bf5",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 147,
"avg_line_length": 54.42857142857143,
"alnum_prop": 0.7022872140982377,
"repo_name": "bozzmob/dxr",
"id": "79c4927dd21f22c89b7fb01ebb4bfe11235b01d3",
"size": "2667",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_vcs_hg/test_vcs_hg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1473"
},
{
"name": "C++",
"bytes": "83030"
},
{
"name": "CSS",
"bytes": "23369"
},
{
"name": "HTML",
"bytes": "42260"
},
{
"name": "IDL",
"bytes": "8448"
},
{
"name": "JavaScript",
"bytes": "48549"
},
{
"name": "Makefile",
"bytes": "5494"
},
{
"name": "Python",
"bytes": "644550"
},
{
"name": "Rust",
"bytes": "11539"
},
{
"name": "Shell",
"bytes": "5890"
}
],
"symlink_target": ""
}
|
from debile.slave.wrappers.perlcritic import parse_perlcritic
from debile.slave.utils import cd
from debile.utils.commands import run_command
def perlcritic(dsc, analysis):
run_command(["dpkg-source", "-x", dsc, "source-perlcritic"])
with cd('source-perlcritic'):
out, _, ret = run_command([
'perlcritic', '--brutal', '.', '--verbose',
'%f:%l:%c %s %p %m\n'
])
if ret == 1:
raise Exception("Perlcritic had an internal error")
failed = ret == 2
for issue in parse_perlcritic(out.splitlines()):
analysis.results.append(issue)
return (analysis, out, failed, None, None)
def version():
out, _, ret = run_command(['perlcritic', '--version'])
if ret != 0:
raise Exception("perlcritic is not installed")
return ('perlcritic', out.strip())
|
{
"content_hash": "4c9b98aea92aba714420237e894f33d3",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 64,
"avg_line_length": 32.148148148148145,
"alnum_prop": 0.5956221198156681,
"repo_name": "opencollab/debile",
"id": "4b93e4d10eb6a63537b18b721eaf898c36958363",
"size": "2044",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "debile/slave/runners/perlcritic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1710"
},
{
"name": "Nginx",
"bytes": "1734"
},
{
"name": "Python",
"bytes": "366543"
},
{
"name": "Shell",
"bytes": "14590"
}
],
"symlink_target": ""
}
|
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import os.path as p
import swutils
import config
from subprocess import call
from functools import partial
from flask import current_app as app
from flask.ext.script import Manager
from tabutils.process import merge
from app import create_app, db, utils, models, __title__
manager = Manager(create_app)
manager.add_option('-m', '--mode', default='Development')
manager.main = manager.run
_basedir = p.dirname(__file__)
@manager.command
def check():
"""Check staged changes for lint errors"""
call(p.join(_basedir, 'bin', 'check-stage'), shell=True)
@manager.command
def lint():
"""Check style with flake8"""
call('flake8')
@manager.command
def pipme():
"""Install requirements.txt"""
call('sudo pip install -r requirements.txt', shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf dev-requirements.txt > requirements.txt'
call(cmd, shell=True)
@manager.command
def test():
"""Run nose and script tests"""
call('nosetests -xv', shell=True)
@manager.command
def createdb():
"""Creates database if it doesn't already exist"""
with app.app_context():
db.create_all()
print('Database created')
@manager.command
def cleardb():
"""Removes all content from database"""
with app.app_context():
db.drop_all()
print('Database cleared')
@manager.command
def setup():
"""Removes all content from database and creates new tables"""
with app.app_context():
cleardb()
createdb()
@manager.command
def run():
"""Populates all tables in db with most recent data"""
with app.app_context():
args = (config.RECIPIENT, app.config['LOGFILE'], __title__)
exception_handler = swutils.ExceptionHandler(*args).handler
kwargs = merge([app.config, {'models': models}])
job = partial(swutils.populate, utils.gen_data, db.engine, **kwargs)
swutils.run_or_schedule(job, app.config['SW'], exception_handler)
@manager.option(
'-s', '--stag', help='upload to staging site', action='store_true')
def upload(stag=False):
"""Upload files to HDX"""
call([p.join(_basedir, 'bin', 'upload'), 'stag' if stag else 'prod'])
@manager.option(
'-s', '--stag', help='upload to staging site', action='store_true')
def update(stag=False):
"""Update dataset metadata"""
call([p.join(_basedir, 'bin', 'update'), 'stag' if stag else 'prod'])
if __name__ == '__main__':
manager.run()
|
{
"content_hash": "c617646fcd713079ed6844b609d0413b",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 78,
"avg_line_length": 24.03669724770642,
"alnum_prop": 0.6541984732824427,
"repo_name": "reubano/hdxscraper-fao",
"id": "0cb9484762a3f135ca0fecf0f9b9201acd9018e7",
"size": "2642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1504"
},
{
"name": "Makefile",
"bytes": "614"
},
{
"name": "Python",
"bytes": "16367"
},
{
"name": "Shell",
"bytes": "6237"
}
],
"symlink_target": ""
}
|
"""
Example of client query
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import ga4gh.client.client as client
def runDemo():
httpClient = client.HttpClient("http://localhost:8000")
iterator = httpClient.search_variants(
"WyIxa2ctcDMtc3Vic2V0IiwidnMiLCJtdm5jYWxsIl0",
reference_name="1", start=45000, end=50000)
for variant in iterator:
print(
variant.reference_name, variant.start, variant.end,
variant.reference_bases, variant.alternate_bases, sep="\t")
if __name__ == '__main__':
runDemo()
|
{
"content_hash": "957a5cf704f4ddb7a72b934f38091485",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 26.291666666666668,
"alnum_prop": 0.6735340729001584,
"repo_name": "ga4gh/server",
"id": "206f88c404a2d585d84e87c0051770b205dfd770",
"size": "653",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scripts/demo_example.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7925"
},
{
"name": "Python",
"bytes": "1188360"
},
{
"name": "Shell",
"bytes": "973"
}
],
"symlink_target": ""
}
|
import os, sys
from pprint import pformat
from colourize import colourize, RED, BLACK, YELLOW
from properties import parse_properties
from ear import Ear, WebModule
### begin logging stuff
import logging
logging.basicConfig(format="%(message)s")
log = logging.getLogger(__name__)
def debug(s):
log.debug(colourize(s, BLACK))
def info(s):
log.info(s)
def error(s, code=1):
log.error(colourize(s, RED))
if code:
exit(code)
### end of logging stuff
def mkpath(*args):
return os.path.join(*args)
def p_not_empty_nor_jar(s):
return len(s) and not s.endswith('*.jar')
def resolve_paths(env):
if not env.has_key('CATALINA_HOME'):
error("CATALINA_HOME must be set!")
catalina_home = env.get('CATALINA_HOME')
catalina_base = env.get('CATALINA_BASE', catalina_home)
return {'catalina_home': catalina_home,
'catalina_base': catalina_base,
'catalina_deploy': env.get('CATALINA_DEPLOY', mkpath(catalina_base, 'webapps')),
'catalina.properties': mkpath(catalina_base, 'conf', 'catalina.properties'),}
YES = ["y", "yes"]
NO = ["n", "no"]
def prompt(message, validate_input, convert=str):
input = None
while not validate_input(input):
sys.stdout.write(message + " ")
try:
input = convert(raw_input())
except ValueError:
pass
return input
def overwrite_callback(dest, old_crc, new_crc):
if (old_crc == new_crc):
return False
else:
info("old_crc = %s, new_crc = %s" % (old_crc, new_crc))
return prompt("File %s already exists and differs, overwrite? (yes|no)" % dest,
lambda x: x in YES + NO) in YES
def write_obj(fn, obj, path, overwrite_callback):
wrote_file = fn(path, obj, overwrite_callback)
info("\t%s%s" % ("" if wrote_file else "SKIPPED ", obj))
if __name__ == "__main__":
log.setLevel(logging.INFO)
if len(sys.argv) != 2:
error("Script expects an EAR file as it's single argument.")
ear = Ear(sys.argv[1])
debug("EAR libraries: " + pformat(ear.libraries))
path = resolve_paths(os.environ)
debug("Paths: " + pformat(path))
# these need to be applied to property files
env = {'catalina.home': path['catalina_home'],
'catalina.base': path['catalina_base']}
with open(path['catalina.properties']) as f:
props = parse_properties(f, env)
debug(pformat(props))
commonl = props['common.loader']
sharedl = props['shared.loader']
serverl = props['server.loader']
library_paths = filter(p_not_empty_nor_jar,
set(list(commonl) + list(sharedl) + list(serverl)))
debug("Library paths: " + pformat(library_paths))
### begin of user input
print("These are the libraries contained in the EAR file, that need to be deployed:")
print(pformat(ear.libraries) + "\n")
print("Possible deployment targets are (read from catalina.properties):")
for tuple in zip(range(len(library_paths)), library_paths):
print("\t%s -> %s" % tuple)
index = prompt("Where do you want the libraries to be deployed?",
lambda x: x in range(len(library_paths)),
int)
library_path = library_paths[index]
# print summary and let user decide whether to continue
print("\nLibraries will be deployed here: %s" % colourize(library_path, YELLOW))
web_path = path['catalina_deploy']
print("Web modules will be deployed here: %s\n" % colourize(web_path, YELLOW))
print("If you want to deploy the WEB(s) to a different directory, please")
print("set the CATALINA_DEPLOY environment variable.\n")
if prompt("Do you want to continue? (yes|no)",
lambda x: x in YES + NO) in NO:
error("Exiting, user decided not to continue.")
# extract all library JARs
info("Extracting libraries to %s..." % library_path)
for library in ear.libraries:
write_obj(ear.extract_library, library, library_path, overwrite_callback)
# extract all WEBs
info("Extracting WEBs to %s..." % web_path)
for module in filter(lambda x: isinstance(x, WebModule), ear.modules):
write_obj(ear.extract_module, module, web_path, overwrite_callback)
|
{
"content_hash": "99c3583b42234f538ce6bf7abeb24eb1",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 92,
"avg_line_length": 33.224806201550386,
"alnum_prop": 0.6297246850209987,
"repo_name": "MnM/tomcat-ear",
"id": "611039744f2f4ee2362d16a8bad2e4425c026d24",
"size": "4286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/cli.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "14741"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
}
|
import sys
import inspect
from collections import OrderedDict
class TracebackFancy:
def __init__(self, traceback):
self.t = traceback
def getFrame(self):
return FrameFancy(self.t.tb_frame)
def getLineNumber(self):
return self.t.tb_lineno if self.t is not None else None
def getNext(self):
return TracebackFancy(self.t.tb_next)
def __str__(self):
if self.t is None:
return ""
str_self = "%s @ %s" % (
self.getFrame().getName(), self.getLineNumber())
return str_self + "\n" + self.getNext().__str__()
class ExceptionFancy:
def __init__(self, frame):
self.etraceback = frame.f_exc_traceback
self.etype = frame.exc_type
self.evalue = frame.f_exc_value
def __init__(self, tb, ty, va):
self.etraceback = tb
self.etype = ty
self.evalue = va
def getTraceback(self):
return TracebackFancy(self.etraceback)
def __nonzero__(self):
return self.etraceback is not None or self.etype is not None or self.evalue is not None
def getType(self):
return str(self.etype)
def getValue(self):
return self.evalue
class CodeFancy:
def __init__(self, code):
self.c = code
def getArgCount(self):
return self.c.co_argcount if self.c is not None else 0
def getFilename(self):
return self.c.co_filename if self.c is not None else ""
def getVariables(self):
return self.c.co_varnames if self.c is not None else []
def getName(self):
return self.c.co_name if self.c is not None else ""
def getFileName(self):
return self.c.co_filename if self.c is not None else ""
class ArgsFancy:
def __init__(self, frame, arginfo):
self.f = frame
self.a = arginfo
def __str__(self):
args, varargs, kwargs = self.getArgs(), self.getVarArgs(), self.getKWArgs()
ret = ""
count = 0
size = len(args)
for arg in args:
ret = ret + ("%s = %s" % (arg, args[arg]))
count = count + 1
if count < size:
ret = ret + ", "
if varargs:
if size > 0:
ret = ret + " "
ret = ret + "varargs are " + str(varargs)
if kwargs:
if size > 0:
ret = ret + " "
ret = ret + "kwargs are " + str(kwargs)
return ret
def getNumArgs(wantVarargs=False, wantKWArgs=False):
args, varargs, keywords, values = self.a
size = len(args)
if varargs and wantVarargs:
size = size + len(self.getVarArgs())
if keywords and wantKWArgs:
size = size + len(self.getKWArgs())
return size
def getArgs(self):
args, _, _, values = self.a
argWValues = OrderedDict()
for arg in args:
argWValues[arg] = values[arg]
return argWValues
def getVarArgs(self):
_, vargs, _, _ = self.a
if vargs:
return self.f.f_locals[vargs]
return ()
def getKWArgs(self):
_, _, kwargs, _ = self.a
if kwargs:
return self.f.f_locals[kwargs]
return {}
class FrameFancy:
def __init__(self, frame):
self.f = frame
def getCaller(self):
return FrameFancy(self.f.f_back)
def getLineNumber(self):
return self.f.f_lineno if self.f is not None else 0
def getCodeInformation(self):
return CodeFancy(self.f.f_code) if self.f is not None else None
def getExceptionInfo(self):
return ExceptionFancy(self.f) if self.f is not None else None
def getName(self):
return self.getCodeInformation().getName() if self.f is not None else ""
def getFileName(self):
return self.getCodeInformation().getFileName() if self.f is not None else ""
def getLocals(self):
return self.f.f_locals if self.f is not None else {}
def getArgumentInfo(self):
return ArgsFancy(
self.f, inspect.getargvalues(
self.f)) if self.f is not None else None
class TracerClass:
def callEvent(self, frame):
pass
def lineEvent(self, frame):
pass
def returnEvent(self, frame, retval):
pass
def exceptionEvent(self, frame, exception, value, traceback):
pass
def cCallEvent(self, frame, cfunct):
pass
def cReturnEvent(self, frame, cfunct):
pass
def cExceptionEvent(self, frame, cfunct):
pass
tracer_impl = TracerClass()
def the_tracer_entrypoint(frame, event, args):
if tracer_impl is None:
return None
if event == "call":
call_retval = tracer_impl.callEvent(FrameFancy(frame))
if not call_retval:
return None
return the_tracer_entrypoint
elif event == "line":
line_retval = tracer_impl.lineEvent(FrameFancy(frame))
if not line_retval:
return None
return the_tracer_entrypoint
elif event == "return":
tracer_impl.returnEvent(FrameFancy(frame), args)
elif event == "exception":
exty, exva, extb = args
exception_retval = tracer_impl.exceptionEvent(
FrameFancy(frame), ExceptionFancy(extb, exty, exva))
if not exception_retval:
return None
return the_tracer_entrypoint
elif event == "c_call":
tracer_impl.cCallEvent(FrameFancy(frame), args)
elif event == "c_return":
tracer_impl.cReturnEvent(FrameFancy(frame), args)
elif event == "c_exception":
tracer_impl.cExceptionEvent(FrameFancy(frame), args)
return None
def enable(t=None):
global tracer_impl
if t:
tracer_impl = t
sys.settrace(the_tracer_entrypoint)
def disable():
sys.settrace(None)
class LoggingTracer:
def callEvent(self, frame):
print "call " + frame.getName() + " from " + frame.getCaller().getName() + " @ " + str(frame.getCaller().getLineNumber()) + " args are " + str(frame.getArgumentInfo())
def lineEvent(self, frame):
print "running " + frame.getName() + " @ " + str(frame.getLineNumber()) + " locals are " + str(frame.getLocals()) + " in " + frame.getFileName()
def returnEvent(self, frame, retval):
print "return from " + frame.getName() + " value is " + str(retval) + " locals are " + str(frame.getLocals())
def exceptionEvent(self, frame, exception):
print "exception %s %s raised from %s @ %s" % (exception.getType(), str(exception.getValue()), frame.getName(), frame.getLineNumber())
print "tb: " + str(exception.getTraceback())
# the same functionality as LoggingTracer, but with a little more
# lldb-specific smarts
class LLDBAwareTracer:
def callEvent(self, frame):
if frame.getName() == "<module>":
return
if frame.getName() == "run_one_line":
print "call run_one_line(%s)" % (frame.getArgumentInfo().getArgs()["input_string"])
return
if "Python.framework" in frame.getFileName():
print "call into Python at " + frame.getName()
return
if frame.getName() == "__init__" and frame.getCaller().getName(
) == "run_one_line" and frame.getCaller().getLineNumber() == 101:
return False
strout = "call " + frame.getName()
if (frame.getCaller().getFileName() == ""):
strout += " from LLDB - args are "
args = frame.getArgumentInfo().getArgs()
for arg in args:
if arg == "dict" or arg == "internal_dict":
continue
strout = strout + ("%s = %s " % (arg, args[arg]))
else:
strout += " from " + frame.getCaller().getName() + " @ " + \
str(frame.getCaller().getLineNumber()) + " args are " + str(frame.getArgumentInfo())
print strout
def lineEvent(self, frame):
if frame.getName() == "<module>":
return
if frame.getName() == "run_one_line":
print "running run_one_line(%s) @ %s" % (frame.getArgumentInfo().getArgs()["input_string"], frame.getLineNumber())
return
if "Python.framework" in frame.getFileName():
print "running into Python at " + frame.getName() + " @ " + str(frame.getLineNumber())
return
strout = "running " + frame.getName() + " @ " + str(frame.getLineNumber()) + \
" locals are "
if (frame.getCaller().getFileName() == ""):
locals = frame.getLocals()
for local in locals:
if local == "dict" or local == "internal_dict":
continue
strout = strout + ("%s = %s " % (local, locals[local]))
else:
strout = strout + str(frame.getLocals())
strout = strout + " in " + frame.getFileName()
print strout
def returnEvent(self, frame, retval):
if frame.getName() == "<module>":
return
if frame.getName() == "run_one_line":
print "return from run_one_line(%s) return value is %s" % (frame.getArgumentInfo().getArgs()["input_string"], retval)
return
if "Python.framework" in frame.getFileName():
print "return from Python at " + frame.getName() + " return value is " + str(retval)
return
strout = "return from " + frame.getName() + " return value is " + \
str(retval) + " locals are "
if (frame.getCaller().getFileName() == ""):
locals = frame.getLocals()
for local in locals:
if local == "dict" or local == "internal_dict":
continue
strout = strout + ("%s = %s " % (local, locals[local]))
else:
strout = strout + str(frame.getLocals())
strout = strout + " in " + frame.getFileName()
print strout
def exceptionEvent(self, frame, exception):
if frame.getName() == "<module>":
return
print "exception %s %s raised from %s @ %s" % (exception.getType(), str(exception.getValue()), frame.getName(), frame.getLineNumber())
print "tb: " + str(exception.getTraceback())
def f(x, y=None):
if x > 0:
return 2 + f(x - 2)
return 35
def g(x):
return 1.134 / x
def print_keyword_args(**kwargs):
# kwargs is a dict of the keyword args passed to the function
for key, value in kwargs.iteritems():
print "%s = %s" % (key, value)
def total(initial=5, *numbers, **keywords):
count = initial
for number in numbers:
count += number
for key in keywords:
count += keywords[key]
return count
if __name__ == "__main__":
enable(LoggingTracer())
f(5)
f(5, 1)
print_keyword_args(first_name="John", last_name="Doe")
total(10, 1, 2, 3, vegetables=50, fruits=100)
try:
g(0)
except:
pass
disable()
|
{
"content_hash": "381acd8c88fe427ab8837c12356619b0",
"timestamp": "",
"source": "github",
"line_count": 360,
"max_line_length": 175,
"avg_line_length": 30.53611111111111,
"alnum_prop": 0.567361047939598,
"repo_name": "youtube/cobalt_sandbox",
"id": "a166f21e98392a6f8c06062c1c581e6545bce4c8",
"size": "10993",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "third_party/llvm-project/lldb/examples/python/pytracer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import pytest
from configuration.models import Configuration
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from accounts.models import User
from accounts.tests.test_views import accounts_route_questionnaires
from model_mommy import mommy
from functional_tests.base import FunctionalTest
from questionnaire.models import Questionnaire, QuestionnaireMembership, Lock
from sample.tests.test_views import (
route_questionnaire_details,
route_questionnaire_new,
get_categories)
from django.contrib.auth.models import Group
from accounts.tests.test_models import create_new_user
from functional_tests.pages.qcat import MyDataPage
from functional_tests.pages.sample import SampleDetailPage, SampleEditPage, \
SampleStepPage, SampleNewPage
def has_old_version_step(browser):
browser.findBy(
'xpath', '//p[contains(text(), "There is an old version with changes '
'in this section.")]')
def has_no_old_version_step(browser):
browser.findByNot(
'xpath', '//p[contains(text(), "There is an old version with changes '
'in this section.")]')
def has_old_version_overview(browser):
browser.findBy(
'xpath', '//p[contains(text(), "There is an old version which is '
'different than the current one.")]')
def has_no_old_version_overview(browser):
browser.findByNot(
'xpath', '//p[contains(text(), "There is an old version which is '
'different than the current one.")]')
def get_sample_4_5_options(testcase, index=0):
return testcase.findManyBy(
'xpath',
'//select[@id="id_qg_43-{}-key_58"]/option[not(@value="")]'.format(
index))
def get_sample_4_6_options(testcase, index=0):
return testcase.findManyBy(
'xpath',
'//select[@id="id_qg_46-{}-key_63"]/option[not(@value="")]'.format(
index))
def get_sample_5_4_options(testcase, index=0):
return testcase.findManyBy(
'xpath',
'//select[@id="id_qg_44-{}-key_60"]/option[not(@value="")]'.format(
index))
def get_sample_5_5_options(testcase, index=0):
return testcase.findManyBy(
'xpath',
'//select[@id="id_qg_45-{}-key_62"]/option[not(@value="")]'.format(
index))
class EditTest(FunctionalTest):
fixtures = [
'groups_permissions',
'global_key_values',
'sample',
'sample_questionnaire_status',
]
def test_creation_date_does_not_change(self):
# Alice logs in
user = User.objects.get(pk=102)
# She goes to the details of an existing questionnaire and takes
# note of the creation and update dates
detail_page = SampleDetailPage(self)
detail_page.route_kwargs = {'identifier': 'sample_3'}
detail_page.open(login=True, user=user)
creation_date = detail_page.get_el(detail_page.LOC_CREATION_DATE).text
update_date = detail_page.get_el(detail_page.LOC_UPDATE_DATE).text
# She creates a new version
detail_page.create_new_version()
# She notices that the creation date did not change while the
# update date changed.
creation_date_1 = detail_page.get_el(detail_page.LOC_CREATION_DATE).text
update_date_1 = detail_page.get_el(detail_page.LOC_UPDATE_DATE).text
assert creation_date == creation_date_1
assert update_date != update_date_1
# Alice logs in as a different user
# She also opens a draft version of a questionnaire and takes
# note of the creation and update dates
user = User.objects.get(pk=101)
detail_page.route_kwargs = {'identifier': 'sample_1'}
detail_page.open(login=True, user=user)
creation_date = detail_page.get_el(detail_page.LOC_CREATION_DATE).text
update_date = detail_page.get_el(detail_page.LOC_UPDATE_DATE).text
# She makes an edit
detail_page.edit_questionnaire()
edit_page = SampleEditPage(self)
edit_page.click_edit_category('cat_1')
step_page = SampleStepPage(self)
step_page.enter_text(step_page.LOC_FORM_INPUT_KEY_1, ' (changed)')
# She submits the questionnaire
step_page.submit_step()
# She sees the changes were submitted
assert edit_page.has_text(' (changed)')
# She notices that the creation date did not change while the
# update date changed.
creation_date_1 = edit_page.get_el(edit_page.LOC_CREATION_DATE).text
update_date_1 = edit_page.get_el(edit_page.LOC_UPDATE_DATE).text
assert creation_date == creation_date_1
assert update_date != update_date_1
def test_edit_draft(self):
code = 'sample_1'
# Alice logs in
user = User.objects.get(pk=101)
self.doLogin(user=user)
# She goes to the detail page of a "draft" Questionnaire
self.browser.get(self.live_server_url + reverse(
route_questionnaire_details, kwargs={'identifier': code}))
self.assertIn(code, self.browser.current_url)
self.findBy('xpath', '//*[text()[contains(.,"Foo 1")]]')
self.findByNot('xpath', '//*[text()[contains(.,"asdf")]]')
# She edits the Questionnaire and sees that the URL contains the
# code of the questionnaire
self.findBy('xpath', '//a[contains(text(), "Edit")]').click()
self.assertIn(code, self.browser.current_url)
# She edits a category and sees that the URL still contains the
# code of the Questionnaire
self.hide_notifications()
self.click_edit_section('cat_1')
self.assertIn(code, self.browser.current_url)
# She makes some changes and submits the category
key_1 = self.findBy('name', 'qg_1-0-original_key_1')
key_1.clear()
self.findBy('name', 'qg_1-0-original_key_1').send_keys('asdf')
self.findBy('id', 'button-submit').click()
# She is back on the overview page and sees that the URL still
# contains the code of the Questionnaire
self.assertIn(code, self.browser.current_url)
# She sees that no new code was created.
self.assertIn(code, self.browser.current_url)
# She sees that the value of Key 1 was updated
self.findByNot('xpath', '//*[text()[contains(.,"Foo 1")]]')
self.findBy('xpath', '//*[text()[contains(.,"asdf")]]')
# Also there was no additional version created in the database
self.assertEqual(Questionnaire.objects.count(), 10)
def test_edit_public(self):
code = 'sample_3'
user = User.objects.get(pk=101)
old_text = 'Faz 3'
new_text = 'asdf'
# User logs in and goes to the detail page of a "public" Questionnaire
detail_page = SampleDetailPage(self)
detail_page.route_kwargs = {'identifier': code}
detail_page.open(login=True, user=user)
assert code in self.browser.current_url
assert detail_page.has_text(old_text)
assert not detail_page.has_text(new_text)
# There is only one version of this questionnaire in the db
assert Questionnaire.objects.filter(code=code).count() == 1
# User uses the direct link to go to the edit page of the questionnaire
# and sees no new version of the questionnaire is created in the DB.
# This prevents the issue when new versions were created upon GET of the
# edit page, which should be fixed now.
edit_page = SampleEditPage(self)
edit_page.route_kwargs = {'identifier': code}
edit_page.open()
assert Questionnaire.objects.filter(code=code).count() == 1
# User edits the Questionnaire (creates a new version) and sees that
# the URL contains the code of the Questionnaire
detail_page.open()
detail_page.create_new_version()
assert code in self.browser.current_url
# User edits a category and sees that the URL still contains the
# code of the Questionnaire
edit_page.click_edit_category('cat_2')
assert code in self.browser.current_url
# User makes some changes and submits the category
step_page = SampleStepPage(self)
step_page.enter_text(
step_page.LOC_FORM_INPUT_KEY_5, new_text, clear=True)
step_page.submit_step()
# User is back on the overview page and sees that the URL still
# contains the code of the Questionnaire
assert code in self.browser.current_url
assert edit_page.has_text(code)
# She sees that the value of Key 1 was updated
assert not edit_page.has_text(old_text)
assert edit_page.has_text(new_text)
# Also there was an additional version created in the database
assert Questionnaire.objects.count() == 11
# The newly created version has the same code
assert Questionnaire.objects.filter(code=code).count() == 2
# She goes to see her own questionnaire and sees sample_3 appears only
# once
my_page = MyDataPage(self)
my_page.open()
my_page.wait_for_lists()
expected_list = [
{
'description': new_text,
},
{
# Just to check that description is there ...
'description': 'Faz 1'
},
# ... the rest does not really matter
]
assert my_page.count_list_results() == 6
my_page.check_list_results(expected_list, count=False)
# She clicks the first entry and sees that she is taken to the
# details page of the latest (pending) version.
my_page.click_list_entry(index=0)
assert detail_page.has_text(new_text)
def test_edit_questionnaire(self):
user = self.create_new_user(
email='mod@bar.com', groups=['Reviewers', 'Publishers'])
# Alice logs in
# She enters a Questionnaire
new_page = SampleNewPage(self)
new_page.open(login=True, user=user)
new_page.click_edit_category('cat_1')
step_page = SampleStepPage(self)
step_page.enter_text(step_page.LOC_FORM_INPUT_KEY_1, 'Foo')
step_page.enter_text(step_page.LOC_FORM_INPUT_KEY_3, 'Bar')
step_page.submit_step()
# The questionnaire is already saved as draft
# She submits it for review
edit_page = SampleEditPage(self)
edit_page.submit_questionnaire()
# She reviews it
detail_page = SampleDetailPage(self)
detail_page.review_questionnaire()
# She publishes it
detail_page.publish_questionnaire()
# She sees it is public and visible
assert detail_page.has_text('Foo')
assert detail_page.has_text('Bar')
url = self.browser.current_url
# She creates a new version
detail_page.create_new_version()
# She edits it
edit_page.click_edit_category('cat_1')
# She changes some values
step_page.enter_text(step_page.LOC_FORM_INPUT_KEY_1, 'asdf', clear=True)
step_page.submit_step()
# The questionnaire is already saved as draft
# She is taken to the overview page where she sees the latest
# (pending) changes of the draft
edit_page.check_status('draft')
assert not edit_page.has_text('Foo')
assert edit_page.has_text('asdf')
assert edit_page.has_text('Bar')
# She sees the edit buttons
assert edit_page.exists_el(edit_page.format_locator(
edit_page.LOC_BUTTON_EDIT_CATEGORY, keyword='cat_1'))
# She sees the possibility to view the questionnaire
edit_page.view_questionnaire()
self.assertIn(url, self.browser.current_url + '#top')
# All the changes are there
assert not detail_page.has_text('Foo')
assert detail_page.has_text('asdf')
assert detail_page.has_text('Bar')
# There are no buttons to edit the sections anymore
assert not detail_page.exists_el(detail_page.format_locator(
edit_page.LOC_BUTTON_EDIT_CATEGORY, keyword='cat_1'))
class CustomToOptionsTest(FunctionalTest):
fixtures = [
'sample_global_key_values',
'sample',
]
def test_custom_to_options(self):
# Alice logs in
self.doLogin()
# She goes to step 5 of the SAMPLE form
self.browser.get(
self.live_server_url + reverse(route_questionnaire_new))
self.click_edit_section('cat_5')
# She sees that no labels are selected in key_65 and key_67
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="-"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="-"]')
# She sees that the label fields are disabled
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen" and contains(@class, '
'"disabled")]/a/span[text()="-"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen" and contains(@class, '
'"disabled")]/a/span[text()="-"]')
# She selects Value 1 of key 64
self.select_chosen_element('id_qg_47_0_key_64_chosen', 'Value 64 1')
# She sees the labels were updated in key_65 and key_67
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 1 Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="'
'Value 66 1 Right"]')
# She deselects the value in key 64
self.select_chosen_element('id_qg_47_0_key_64_chosen', '-')
# She sees the labels were reset
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="-"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="-"]')
# She selects Value 1 of key 64 again
self.select_chosen_element('id_qg_47_0_key_64_chosen', 'Value 64 1')
# She submits the step and sees the values were submitted correctly
self.submit_form_step()
self.findBy('xpath',
'//span[contains(@class, "chart-measure-label-left") and '
'contains(text(), "Value 66 1 Left")]')
self.findBy('xpath',
'//span[contains(@class, "chart-measure-label-right") and '
'contains(text(), "Value 66 1 Right")]')
# She goes back to the form
self.click_edit_section('cat_5')
# She sees the fields were populated correctly
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen" and contains(@class, '
'"disabled")]/a/span[text()="Value 66 1 Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen" and contains(@class, '
'"disabled")]/a/span[text()="Value 66 1 Right"]')
# She sees the labels were updated in key_65 and key_67
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 1 Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="'
'Value 66 1 Right"]')
# She selects Value 2 of key 64
self.select_chosen_element('id_qg_47_0_key_64_chosen', 'Value 64 2')
# She sees the labels were updated in key_65 and key_67
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 2 Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="'
'Value 66 2 Right"]')
# She sees that the label fields are disabled
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen" and contains(@class, '
'"disabled")]/a/span[text()="Value 66 2 Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen" and contains(@class, '
'"disabled")]/a/span[text()="Value 66 2 Right"]')
# She selects Value 3 of key 64
self.select_chosen_element('id_qg_47_0_key_64_chosen', 'Value 64 3')
# She sees the labels were updated and show the first value possible
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 3A Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen"]/a/span[text()="'
'Value 66 3A Right"]')
# She sees the field is not disabled anymore
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen" and not(contains('
'@class, "disabled"))]/a/span[text()="Value 66 3A Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen" and not(contains('
'@class, "disabled"))]/a/span[text()="Value 66 3A Right"]')
# She sees she cannot select "Value 66 1 Left"
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]').click()
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]//ul[@class="chosen-'
'results"]/li[contains(text(), "Value 66 2 Left")]').click()
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 3A Left"]')
# However, she can select "Value 66 3B Left"
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]//ul[@class="chosen-'
'results"]/li[contains(text(), "Value 66 3B Left")]').click()
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen"]/a/span[text()="'
'Value 66 3B Left"]')
# She submits the step and sees the values were submitted correctly
self.submit_form_step()
self.findBy('xpath',
'//span[contains(@class, "chart-measure-label-left") and '
'contains(text(), "Value 66 3B Left")]')
self.findBy('xpath',
'//span[contains(@class, "chart-measure-label-right") and '
'contains(text(), "Value 66 3A Right")]')
# She goes back to step 5 of the form and sees the values were
# initialized correctly, the label fields are not disabled
self.click_edit_section('cat_5')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_65_chosen" and not(contains('
'@class, "disabled"))]/a/span[text()="Value 66 3B Left"]')
self.findBy('xpath',
'//div[@id="id_qg_47_0_key_67_chosen" and not(contains('
'@class, "disabled"))]/a/span[text()="Value 66 3A Right"]')
class LinkedChoicesTest(FunctionalTest):
fixtures = [
'sample_global_key_values',
'sample',
]
def test_linked_across_step(self):
# Alice logs in
self.doLogin()
# She goes to step 5 of the SAMPLE form
self.browser.get(
self.live_server_url + reverse(route_questionnaire_new))
self.click_edit_section('cat_5')
# She sees that there are no choices available for 5.4
self.assertEqual(len(get_sample_5_4_options(self)), 0)
# She goes to section 4 of the SAMPLE form
self.submit_form_step()
self.click_edit_section('cat_4')
# She selects some options in 4.4
self.findBy('xpath', '//input[@id="subcat_4_4"]').click()
self.findBy(
'xpath', '//input[@data-container="qg_40"]', wait=True).click()
xpath = '//select[@id="id_qg_40-0-key_57"]/option[@value="value_57_1"]'
self.findBy('xpath', xpath, wait=True).click()
self.findBy(
'xpath', '//input[@data-container="qg_41"]', wait=True).click()
xpath = '//select[@id="id_qg_41-0-key_57"]/option[@value="value_57_2"]'
self.findBy('xpath', xpath, wait=True).click()
self.findBy(
'xpath', '//input[@data-container="qg_42"]', wait=True).click()
xpath = '//select[@id="id_qg_42-0-key_57"]/option[@value="value_57_3"]'
self.findBy('xpath', xpath, wait=True).click()
# She submits the step and goes to step 5 again
self.submit_form_step()
self.click_edit_section('cat_5')
# She sees that in 5.4, there are now 3 choices available.
self.assertEqual(len(get_sample_5_4_options(self)), 3)
# She fills out a first questiongroup
self.select_chosen_element('id_qg_44_0_key_60_chosen', 'QG 40')
self.findBy('id', 'id_qg_44-0-original_key_61').send_keys('Foo')
# She also fills out a second questiongroup
self.form_click_add_more('qg_44')
self.assertEqual(len(get_sample_5_4_options(self, index=1)), 3)
self.findBy('xpath',
'//div[@id="id_qg_44_1_key_60_chosen"]').click()
self.findBy('xpath',
'//div[@id="id_qg_44_1_key_60_chosen"]//ul[@class="chosen-'
'results"]/li[contains(text(), "QG 42")]').click()
self.findBy('id', 'id_qg_44-1-original_key_61').send_keys('Bar')
# She sees that in 5.5, there are only 2 choices available (no qg_42)
self.assertEqual(len(get_sample_5_5_options(self)), 2)
# She submits the form step and sees that the values are there.
self.submit_form_step()
self.findBy('xpath',
'//h3[contains(text(), "Subcategory 5_4")]/following::p['
'contains(text(), "QG 40")]')
# She goes back to step 4
self.click_edit_section('cat_4')
# She deselects a value
xpath = '//select[@id="id_qg_40-0-key_57"]/option[@value=""]'
self.findBy('xpath', xpath, wait=True).click()
# She submits the step and goes back to step 5
self.submit_form_step()
self.findByNot('xpath',
'//h3[contains(text(), "Subcategory 5_4")]/following::p['
'contains(text(), "QG 40")]')
self.click_edit_section('cat_5')
# She sees that the option is not selected anymore, all the other
# values are there
self.assertEqual(len(get_sample_5_4_options(self)), 2)
self.assertEqual(len(get_sample_5_4_options(self, index=1)), 2)
self.findBy('xpath',
'//div[@id="id_qg_44_0_key_60_chosen"]/a/span[text()="-"]')
self.assertEqual(
self.findBy('id', 'id_qg_44-0-original_key_61').get_attribute(
'value'), 'Foo')
self.findBy('xpath',
'//div[@id="id_qg_44_1_key_60_chosen"]/a/span['
'text()="QG 42"]')
self.assertEqual(
self.findBy('id', 'id_qg_44-1-original_key_61').get_attribute(
'value'), 'Bar')
# She sees that in 5.5, there is only one option left
self.assertEqual(len(get_sample_5_5_options(self)), 1)
def test_linked_choices_within_step(self):
# Alice logs in
self.doLogin()
# She goes to step 4 of the SAMPLE form
self.browser.get(
self.live_server_url + reverse(route_questionnaire_new))
self.click_edit_section('cat_4')
# She sees that no extremes can be selected in 4.5
self.assertEqual(len(get_sample_4_5_options(self)), 0)
# She selects some questiongroups in 4.4 and sees that they are now
# available for selection in 4.5
self.findBy('xpath', '//input[@id="subcat_4_4"]').click()
self.findBy(
'xpath', '//input[@data-container="qg_40"]', wait=True).click()
# It is not sufficient to click the checkbox of the questiongroup, an
# actual value of the questiongroup must be selected.
self.assertEqual(len(get_sample_4_5_options(self)), 0)
self.findBy('xpath',
'//select[@id="id_qg_40-0-key_57"]/option['
'@value="value_57_1"]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
# The same option is also available in 4.6
self.assertEqual(len(get_sample_4_6_options(self)), 1)
# She deselects the value again and sees the option disappears in 4.5
self.findBy('xpath',
'//select[@id="id_qg_40-0-key_57"]/option['
'@value=""]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 0)
self.assertEqual(len(get_sample_4_6_options(self)), 0)
# She selects a value again, the option appears in 4.5
self.findBy('xpath',
'//select[@id="id_qg_40-0-key_57"]/option['
'@value="value_57_2"]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
# The same option is also available in 4.6
self.assertEqual(len(get_sample_4_6_options(self)), 1)
# She changes the value of 4.5, still the option appears only once
self.findBy('xpath',
'//select[@id="id_qg_40-0-key_57"]/option['
'@value="value_57_1"]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
self.assertEqual(len(get_sample_4_6_options(self)), 1)
# She also selects another value in 4.4
self.findBy('xpath', '//input[@data-container="qg_41"]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
xpath = '//select[@id="id_qg_41-0-key_57"]/option[@value="value_57_1"]'
self.findBy('xpath', xpath, wait=True).click()
self.assertEqual(len(get_sample_4_5_options(self)), 2)
# The same option is also available in 4.6
self.assertEqual(len(get_sample_4_6_options(self)), 2)
# She selects an option in 4.6
self.select_chosen_element('id_qg_46_0_key_63_chosen', 'QG 41')
# She also selects value 3 of 4.4, but sees that this one is not in the
# list of options for 4.5
self.findBy('xpath', '//input[@data-container="qg_42"]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 2)
xpath = '//select[@id="id_qg_42-0-key_57"]/option[@value="value_57_1"]'
self.findBy('xpath', xpath, wait=True).click()
self.assertEqual(len(get_sample_4_5_options(self)), 2)
# However, the same option appears in 4.6
self.assertEqual(len(get_sample_4_6_options(self)), 3)
# In 4.6, the selected option is still qg_41
self.findBy('xpath', '//div[@id="id_qg_46_0_key_63_chosen"]/a/span['
'text()="QG 41"]')
# She selects an option in 4.5 and fills out the additional key.
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]').click()
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]//ul[@class="chosen-'
'results"]/li[contains(text(), "QG 41")]').click()
self.findBy('id', 'id_qg_43-0-original_key_59').send_keys('Foo')
# She also adds another option in 4.5 by clicking "Add more".
self.form_click_add_more('qg_43')
self.assertEqual(len(get_sample_4_5_options(self, index=1)), 2)
self.findBy('xpath',
'//div[@id="id_qg_43_1_key_58_chosen"]').click()
self.findBy('xpath',
'//div[@id="id_qg_43_1_key_58_chosen"]//ul[@class="chosen-'
'results"]/li[contains(text(), "QG 40")]').click()
self.findBy('id', 'id_qg_43-1-original_key_59').send_keys('Bar')
# She deselects a value in 4.4
self.findBy('xpath',
'//select[@id="id_qg_41-0-key_57"]/option['
'@value=""]').click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
self.assertEqual(len(get_sample_4_5_options(self, index=1)), 1)
# She sees that the first questiongroup of 4.5 now has no value selected
# but the additional text field is still there.
self.findBy('xpath', '//div[@id="id_qg_43_0_key_58_chosen"]/a/span['
'text()="-"]')
self.assertEqual(
self.findBy('id', 'id_qg_43-0-original_key_59').get_attribute(
'value'), 'Foo')
# The second questiongroup of 4.5 is untouched
self.findBy('xpath', '//div[@id="id_qg_43_1_key_58_chosen"]/a/span['
'text()="QG 40"]')
self.assertEqual(
self.findBy('id', 'id_qg_43-1-original_key_59').get_attribute(
'value'), 'Bar')
# She submits the step and sees the values are submitted correctly
self.submit_form_step()
# She opens section 4 again and sees that she can still only select
# certain options in 4.5
self.click_edit_section('cat_4')
self.assertEqual(len(get_sample_4_5_options(self)), 1)
self.assertEqual(len(get_sample_4_5_options(self, index=1)), 1)
# The same option is also available in 4.6 (plus qg_42)
self.assertEqual(len(get_sample_4_6_options(self)), 2)
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]/a/span[text()="-"]')
self.assertEqual(
self.findBy('id', 'id_qg_43-0-original_key_59').get_attribute(
'value'), 'Foo')
self.findBy('xpath',
'//div[@id="id_qg_43_1_key_58_chosen"]/a/span['
'text()="QG 40"]')
self.assertEqual(
self.findBy('id', 'id_qg_43-1-original_key_59').get_attribute(
'value'), 'Bar')
def test_linked_choices_order(self):
# Alice logs in
self.doLogin()
# She goes to step 4 of the SAMPLE form
self.browser.get(
self.live_server_url + reverse(route_questionnaire_new))
self.click_edit_section('cat_4')
# She sees that no extremes can be selected in 4.5
self.assertEqual(len(get_sample_4_5_options(self)), 0)
# She selects some questiongroups in 4.4 and sees that they are now
# available for selection in 4.5
self.findBy('xpath', '//input[@id="subcat_4_4"]').click()
self.findBy(
'xpath', '//input[@data-container="qg_41"]', wait=True).click()
self.findBy('xpath',
'//select[@id="id_qg_41-0-key_57"]/option['
'@value="value_57_1"]', wait=True).click()
self.assertEqual(len(get_sample_4_5_options(self)), 1)
# She selects another option
self.findBy('xpath', '//input[@data-container="qg_40"]').click()
self.findBy('xpath',
'//select[@id="id_qg_40-0-key_57"]/option['
'@value="value_57_2"]', wait=True).click()
self.assertEqual(len(get_sample_4_5_options(self)), 2)
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]').click()
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]//ul[@class="chosen-'
'results"]/li[2][contains(text(), "QG 40")]')
self.findBy('xpath',
'//div[@id="id_qg_43_0_key_58_chosen"]//ul[@class="chosen-'
'results"]/li[3][contains(text(), "QG 41")]')
class LockTest(FunctionalTest):
"""
Tests for questionnaire locking.
"""
fixtures = [
'sample_global_key_values',
'sample',
]
def setUp(self):
super().setUp()
self.jay = mommy.make(
_model=get_user_model(),
firstname='jay',
email = 'jay@spam.com'
)
self.robin = mommy.make(
_model=get_user_model(),
firstname='robin',
email='robin@spam.com'
)
self.questionnaire = mommy.make(
_model=Questionnaire,
data={},
code='sample_1',
status=settings.QUESTIONNAIRE_DRAFT,
configuration=Configuration.objects.filter(code='sample').first()
)
# Create a valid questionnaire with the least required data.
mommy.make(
_model=QuestionnaireMembership,
user=self.jay,
questionnaire=self.questionnaire,
role='compiler'
)
mommy.make(
_model=QuestionnaireMembership,
user=self.robin,
questionnaire=self.questionnaire,
role='editor'
)
self.questionnaire_edit_url = '{}{}'.format(
self.live_server_url,
reverse('sample:questionnaire_edit', args=['sample_1'])
)
self.questionnaire_view_url = '{}{}'.format(
self.live_server_url,
self.questionnaire.get_absolute_url()
)
def test_edit_adds_lock(self):
# Jay loggs in and starts editing a section.
self.doLogin(user=self.jay)
self.browser.get(self.questionnaire_edit_url)
self.hide_notifications()
self.findManyBy('link_text', 'Edit this section')[0].click()
# Robin logs in and views the questionnaire
self.doLogin(user=self.robin)
self.browser.get(self.questionnaire_view_url)
# but the questionnaire is locked
self.assertTrue(
Lock.with_status.is_blocked('sample_1').exists()
)
# and the edit button has no url, but a message about the locked status
edit_button = self.findBy('link_text', 'Edit')
self.assertTrue(edit_button.get_attribute('disabled'))
self.findBy('xpath', '//*[text()[contains(.,"This questionnaire is '
'locked for editing by jay None.")]]')
# if the url is accessed directly, a notification is shown
self.browser.get(self.questionnaire_edit_url)
self.findBy('xpath', '//div[contains(@class, "notification") '
'and contains(@class, "warning")]')
# maybe: should the edit buttons be disabled?
def test_edit_locked(self):
# The questionnaire is locked for Jay
Lock.objects.create(
questionnaire_code='sample_1', user=self.jay
)
self.doLogin(user=self.robin)
# Viewing the questionnaire is fine
self.browser.get(self.questionnaire_view_url)
# When Robin tries to edit a section, the browser gets redirected
self.browser.get('{}cat_1'.format(self.questionnaire_edit_url))
self.browser.implicitly_wait(3)
self.assertEqual(self.browser.current_url, self.questionnaire_view_url)
def test_refresh_lock(self):
self.doLogin(user=self.jay)
self.browser.get(self.questionnaire_edit_url)
self.hide_notifications()
self.findManyBy('link_text', 'Edit this section')[0].click()
interval = (settings.QUESTIONNAIRE_LOCK_TIME - 1) * 60 * 1000
self.findBy('xpath', '//*[text()[contains(.,"{}")]]'.format(interval))
def test_delete_with_lock(self):
# The editor logs in and starts editing, this creates a lock.
detail_page = SampleDetailPage(self)
detail_page.route_kwargs = {'identifier': self.questionnaire.code}
detail_page.open(login=True, user=self.robin)
detail_page.edit_questionnaire()
edit_page = SampleEditPage(self)
edit_page.click_edit_category('cat_1')
# The compiler logs in and wants to delete the questionnaire
detail_page.open(login=True, user=self.jay)
assert Questionnaire.objects.get(
code=self.questionnaire.code).is_deleted is False
assert detail_page.has_text(self.questionnaire.code)
detail_page.delete_questionnaire(check_success=False)
# He receives an error message, the questionnaire was not deleted.
assert detail_page.has_warning_message(
f'This questionnaire is locked for editing by '
f'{self.robin.get_display_name()}.')
assert detail_page.has_text(self.questionnaire.code)
assert Questionnaire.objects.get(
code=self.questionnaire.code).is_deleted is False
# After a while, the lock expires
Lock.objects.filter(
questionnaire_code=self.questionnaire.code
).update(is_finished=True)
# Now the questionnaire can be deleted.
detail_page.delete_questionnaire(check_success=True)
assert Questionnaire.objects.get(
code=self.questionnaire.code).is_deleted is True
def test_delete_with_lock_by_own_user(self):
# The compiler (!) logs in and starts editing, this creates a lock.
detail_page = SampleDetailPage(self)
detail_page.route_kwargs = {'identifier': self.questionnaire.code}
detail_page.open(login=True, user=self.jay)
detail_page.edit_questionnaire()
edit_page = SampleEditPage(self)
edit_page.click_edit_category('cat_1')
# The compiler opens the detail page (= back without saving) and wants
# to delete the questionnaire while his own lock is still active. This
# works because his own lock is released when deleting the
# questionnaire.
detail_page.open()
detail_page.delete_questionnaire(check_success=True)
assert Questionnaire.objects.get(
code=self.questionnaire.code).is_deleted is True
|
{
"content_hash": "5fc185579c37b9e31dd303a14e2a9761",
"timestamp": "",
"source": "github",
"line_count": 935,
"max_line_length": 81,
"avg_line_length": 40.510160427807484,
"alnum_prop": 0.5793225440240779,
"repo_name": "CDE-UNIBE/qcat",
"id": "94d93d50216063494d1206818f2105dab8c831a9",
"size": "37877",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "functional_tests/sample/test_edit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1098"
},
{
"name": "HTML",
"bytes": "823938"
},
{
"name": "Handlebars",
"bytes": "224139"
},
{
"name": "JavaScript",
"bytes": "153067"
},
{
"name": "Python",
"bytes": "3515948"
},
{
"name": "SCSS",
"bytes": "165400"
},
{
"name": "Shell",
"bytes": "1943"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from bokeh.browserlib import view
from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid, Circle, HoverTool, BoxSelectTool
from bokeh.models.widgets import (
Select, DataTable, TableColumn, StringFormatter, HBox, VBox,
NumberFormatter, StringEditor, IntEditor, NumberEditor, SelectEditor)
# from bokeh.io import vplot, hplot
from bokeh.document import Document
from bokeh.plotting import curdoc
from bokeh.client import push_session
from bokeh.sampledata.autompg2 import autompg2 as mpg
class DataTables(object):
def __init__(self):
self.document = Document()
self.manufacturer_filter = None
self.model_filter = None
self.transmission_filter = None
self.drive_filter = None
self.class_filter = None
self.source = ColumnDataSource()
self.update_data()
self.document.add_root((self.create()))
self.session = push_session(self.document)
def create(self):
manufacturers = sorted(mpg["manufacturer"].unique())
models = sorted(mpg["model"].unique())
transmissions = sorted(mpg["trans"].unique())
drives = sorted(mpg["drv"].unique())
classes = sorted(mpg["class"].unique())
manufacturer_select = Select(title="Manufacturer:", value="All", options=["All"] + manufacturers)
manufacturer_select.on_change('value', self.on_manufacturer_change)
model_select = Select(title="Model:", value="All", options=["All"] + models)
model_select.on_change('value', self.on_model_change)
transmission_select = Select(title="Transmission:", value="All", options=["All"] + transmissions)
transmission_select.on_change('value', self.on_transmission_change)
drive_select = Select(title="Drive:", value="All", options=["All"] + drives)
drive_select.on_change('value', self.on_drive_change)
class_select = Select(title="Class:", value="All", options=["All"] + classes)
class_select.on_change('value', self.on_class_change)
columns = [
TableColumn(field="manufacturer", title="Manufacturer", editor=SelectEditor(options=manufacturers), formatter=StringFormatter(font_style="bold")),
TableColumn(field="model", title="Model", editor=StringEditor(completions=models)),
TableColumn(field="displ", title="Displacement", editor=NumberEditor(step=0.1), formatter=NumberFormatter(format="0.0")),
TableColumn(field="year", title="Year", editor=IntEditor()),
TableColumn(field="cyl", title="Cylinders", editor=IntEditor()),
TableColumn(field="trans", title="Transmission", editor=SelectEditor(options=transmissions)),
TableColumn(field="drv", title="Drive", editor=SelectEditor(options=drives)),
TableColumn(field="class", title="Class", editor=SelectEditor(options=classes)),
TableColumn(field="cty", title="City MPG", editor=IntEditor()),
TableColumn(field="hwy", title="Highway MPG", editor=IntEditor()),
]
data_table = DataTable(source=self.source, columns=columns, editable=True)
plot = Plot(title=None, x_range= DataRange1d(), y_range=DataRange1d(), plot_width=1000, plot_height=300)
# Set up x & y axis
plot.add_layout(LinearAxis(), 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
# Add Glyphs
cty_glyph = Circle(x="index", y="cty", fill_color="#396285", size=8, fill_alpha=0.5, line_alpha=0.5)
hwy_glyph = Circle(x="index", y="hwy", fill_color="#CE603D", size=8, fill_alpha=0.5, line_alpha=0.5)
cty = plot.add_glyph(self.source, cty_glyph)
hwy = plot.add_glyph(self.source, hwy_glyph)
# Add the tools
tooltips = [
("Manufacturer", "@manufacturer"),
("Model", "@model"),
("Displacement", "@displ"),
("Year", "@year"),
("Cylinders", "@cyl"),
("Transmission", "@trans"),
("Drive", "@drv"),
("Class", "@class"),
]
cty_hover_tool = HoverTool(renderers=[cty], tooltips=tooltips + [("City MPG", "@cty")])
hwy_hover_tool = HoverTool(renderers=[hwy], tooltips=tooltips + [("Highway MPG", "@hwy")])
select_tool = BoxSelectTool(renderers=[cty, hwy], dimensions=['width'])
plot.add_tools(cty_hover_tool, hwy_hover_tool, select_tool)
controls = VBox(children=[
manufacturer_select, model_select, transmission_select,
drive_select, class_select])
top_panel = HBox(children=[controls, plot])
layout = VBox(children=[top_panel, data_table])
return layout
def on_manufacturer_change(self, attr, _, value):
self.manufacturer_filter = None if value == "All" else value
self.update_data()
def on_model_change(self, attr, _, value):
self.model_filter = None if value == "All" else value
self.update_data()
def on_transmission_change(self, attr, _, value):
self.transmission_filter = None if value == "All" else value
self.update_data()
def on_drive_change(self, attr, _, value):
self.drive_filter = None if value == "All" else value
self.update_data()
def on_class_change(self, attr, _, value):
self.class_filter = None if value == "All" else value
self.update_data()
def update_data(self):
df = mpg
if self.manufacturer_filter:
df = df[df["manufacturer"] == self.manufacturer_filter]
if self.model_filter:
df = df[df["model"] == self.model_filter]
if self.transmission_filter:
df = df[df["trans"] == self.transmission_filter]
if self.drive_filter:
df = df[df["drv"] == self.drive_filter]
if self.class_filter:
df = df[df["class"] == self.class_filter]
self.source.data = ColumnDataSource.from_df(df)
def run(self, do_view=False, poll_interval=0.5):
if do_view:
self.session.show()
self.session.loop_until_closed()
if __name__ == "__main__":
data_tables = DataTables()
data_tables.run(True)
|
{
"content_hash": "f9e559eb394299046dc8c80ef08901fb",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 158,
"avg_line_length": 44.37241379310345,
"alnum_prop": 0.6114392290954305,
"repo_name": "htygithub/bokeh",
"id": "4d9c7e69a246f68dec7214ef7301fe940c73dc57",
"size": "6434",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/glyphs/data_tables_server.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5455"
},
{
"name": "CSS",
"bytes": "413523"
},
{
"name": "CoffeeScript",
"bytes": "2166306"
},
{
"name": "HTML",
"bytes": "72855"
},
{
"name": "JavaScript",
"bytes": "7847"
},
{
"name": "Makefile",
"bytes": "5894"
},
{
"name": "Python",
"bytes": "1555759"
},
{
"name": "Shell",
"bytes": "18074"
}
],
"symlink_target": ""
}
|
"""Tests for lmod."""
import unittest
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker.linux_packages import lmod
from tests import pkb_common_test_case
def MockVm():
return mock.Mock()
class LmodTest(pkb_common_test_case.PkbCommonTestCase):
def testYumInstall(self):
vm = MockVm()
lmod.YumInstall(vm)
vm.InstallPackages.assert_called_with('Lmod')
vm.RemoteCommand.assert_not_called()
def testAptInstall(self):
vm = MockVm()
vm.TryRemoteCommand.side_effect = [
True, # lua directory exists
True, # lua posix.so file exists
]
lmod.AptInstall(vm)
vm.InstallPackages.assert_called_with('lmod')
lua_dir = '/usr/lib/x86_64-linux-gnu/lua/5.2'
vm.RemoteCommand.assert_has_calls([
mock.call(f'sudo ln -s {lua_dir}/posix_c.so {lua_dir}/posix.so'),
])
def testAptInstallFails(self):
vm = MockVm()
vm.TryRemoteCommand.side_effect = [
False, # lua directory does not exist
]
with self.assertRaises(errors.Setup.InvalidSetupError):
lmod.AptInstall(vm)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "64d3ee5ad4177eef9f66525c6410a9d5",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 73,
"avg_line_length": 22.74,
"alnum_prop": 0.671943711521548,
"repo_name": "GoogleCloudPlatform/PerfKitBenchmarker",
"id": "1b2b1b3781771cbdebc2d97e32104aaac470c9d5",
"size": "1137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/linux_packages/lmod_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3420"
},
{
"name": "HTML",
"bytes": "113073"
},
{
"name": "Jinja",
"bytes": "62005"
},
{
"name": "Lua",
"bytes": "1547"
},
{
"name": "Python",
"bytes": "6076512"
},
{
"name": "R",
"bytes": "1017"
},
{
"name": "Shell",
"bytes": "76164"
},
{
"name": "Tcl",
"bytes": "14601"
}
],
"symlink_target": ""
}
|
import argparse
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(BASE_DIR)
from scripts.lib.setup_path import setup_path
setup_path()
os.environ['DJANGO_SETTINGS_MODULE'] = 'zproject.settings'
import django
django.setup()
from zerver.worker.queue_processors import get_active_worker_queues
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--queue-type', action='store', dest='queue_type', default=None,
help="Specify which types of queues to list")
args = parser.parse_args()
for worker in sorted(get_active_worker_queues(args.queue_type)):
print(worker)
|
{
"content_hash": "50d43ec85ef0eb17fd323c4ee586ecc9",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 88,
"avg_line_length": 28.8,
"alnum_prop": 0.6972222222222222,
"repo_name": "brainwane/zulip",
"id": "68b337c38a1f4c2368e8ef2f70f6593caff1acfa",
"size": "743",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scripts/lib/queue_workers.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "423578"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "647926"
},
{
"name": "JavaScript",
"bytes": "2886792"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "90558"
},
{
"name": "Python",
"bytes": "6000548"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "110849"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, url
from django.utils.translation import ugettext as _
from know.core.plugins import registry
from know.core.plugins.base import BasePlugin
from know.plugins.attachments import views
from know.plugins.attachments import models
from know.plugins.attachments import settings
from know.plugins.attachments.markdown_extensions import AttachmentExtension
from know.plugins.notifications.settings import ARTICLE_EDIT
from know.plugins.notifications.util import truncate_title
class AttachmentPlugin(BasePlugin):
#settings_form = 'wiki.plugins.notifications.forms.SubscriptionForm'
slug = settings.SLUG
urlpatterns = patterns('',
url(r'^$', views.AttachmentView.as_view(), name='attachments_index'),
url(r'^search/$', views.AttachmentSearchView.as_view(), name='attachments_search'),
url(r'^add/(?P<attachment_id>\d+)/$', views.AttachmentAddView.as_view(), name='attachments_add'),
url(r'^replace/(?P<attachment_id>\d+)/$', views.AttachmentReplaceView.as_view(), name='attachments_replace'),
url(r'^history/(?P<attachment_id>\d+)/$', views.AttachmentHistoryView.as_view(), name='attachments_history'),
url(r'^download/(?P<attachment_id>\d+)/$', views.AttachmentDownloadView.as_view(), name='attachments_download'),
url(r'^delete/(?P<attachment_id>\d+)/$', views.AttachmentDeleteView.as_view(), name='attachments_delete'),
url(r'^download/(?P<attachment_id>\d+)/revision/(?P<revision_id>\d+)/$', views.AttachmentDownloadView.as_view(), name='attachments_download'),
url(r'^change/(?P<attachment_id>\d+)/revision/(?P<revision_id>\d+)/$', views.AttachmentChangeRevisionView.as_view(), name='attachments_revision_change'),
)
article_tab = (_(u'Attachments'), "icon-file")
article_view = views.AttachmentView().dispatch
# List of notifications to construct signal handlers for. This
# is handled inside the notifications plugin.
notifications = [{'model': models.AttachmentRevision,
'message': lambda obj: (_(u"A file was changed: %s") if not obj.deleted else _(u"A file was deleted: %s")) % truncate_title(obj.get_filename()),
'key': ARTICLE_EDIT,
'created': True,
'get_article': lambda obj: obj.attachment.article}
]
markdown_extensions = [AttachmentExtension()]
def __init__(self):
#print "I WAS LOADED!"
pass
registry.register(AttachmentPlugin)
|
{
"content_hash": "d04b24f498c60a88e68cefad3d901518",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 166,
"avg_line_length": 52.625,
"alnum_prop": 0.6828978622327792,
"repo_name": "indexofire/gork",
"id": "7da3d4c8c3dd4317f0431355a1c48dc587f6452c",
"size": "2550",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/gork/application/know/plugins/attachments/wiki_plugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "199039"
},
{
"name": "JavaScript",
"bytes": "89817"
},
{
"name": "Python",
"bytes": "1120919"
},
{
"name": "Shell",
"bytes": "6713"
}
],
"symlink_target": ""
}
|
import contextlib
import errno
import os
from glance.openstack.common import excutils
from glance.openstack.common.gettextutils import _ # noqa
from glance.openstack.common import log as logging
LOG = logging.getLogger(__name__)
_FILE_CACHE = {}
def ensure_tree(path):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise
def read_cached_file(filename, force_reload=False):
"""Read from a file if it has been modified.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh
or not.
"""
global _FILE_CACHE
if force_reload and filename in _FILE_CACHE:
del _FILE_CACHE[filename]
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug(_("Reloading cached file %s") % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
reloaded = True
return (reloaded, cache_info['data'])
def delete_if_exists(path):
"""Delete a file, but ignore file not found error.
:param path: File to delete
"""
try:
os.unlink(path)
except OSError as e:
if e.errno == errno.ENOENT:
return
else:
raise
@contextlib.contextmanager
def remove_path_on_error(path):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
:param path: File to work with
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
delete_if_exists(path)
def file_open(*args, **kwargs):
"""Open file
see built-in file() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
return file(*args, **kwargs)
|
{
"content_hash": "7d410e8da3af7226b13016f9f618218f",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 68,
"avg_line_length": 24.967391304347824,
"alnum_prop": 0.629516760992599,
"repo_name": "SUSE-Cloud/glance",
"id": "72ae50c37e170da7478f2fe93d52934b1acc836c",
"size": "2980",
"binary": false,
"copies": "5",
"ref": "refs/heads/stable/havana",
"path": "glance/openstack/common/fileutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2557049"
},
{
"name": "Shell",
"bytes": "3488"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
class TestElementwiseAddOp(OpTest):
def init_kernel_type(self):
self.use_mkldnn = False
def setUp(self):
self.op_type = "elementwise_add"
self.dtype = np.float32
self.axis = -1
self.init_dtype()
self.init_input_output()
self.init_kernel_type()
self.init_axis()
self.inputs = {
'X': OpTest.np_dtype_to_fluid_dtype(self.x),
'Y': OpTest.np_dtype_to_fluid_dtype(self.y)
}
self.attrs = {'axis': self.axis, 'use_mkldnn': self.use_mkldnn}
self.outputs = {'Out': self.out}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
if self.dtype == np.float16:
return
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.005)
def test_check_grad_ingore_x(self):
if self.dtype == np.float16:
return
self.check_grad(
['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
if self.dtype == np.float16:
return
self.check_grad(
['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y'))
def init_input_output(self):
self.x = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
self.y = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
self.out = np.add(self.x, self.y)
def init_dtype(self):
pass
def init_axis(self):
pass
class TestFP16ElementwiseAddOp(TestElementwiseAddOp):
def init_dtype(self):
self.dtype = np.float16
def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, atol=1e-3)
class TestElementwiseAddOp_scalar(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_scalar(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_scalar2(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1, 1).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_scalar2(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(1, 1).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_Vector(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.random((32, )).astype(self.dtype)
self.y = np.random.random((32, )).astype(self.dtype)
self.out = np.add(self.x, self.y)
class TestFP16ElementwiseAddOp_Vector(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.random((32, )).astype(self.dtype)
self.y = np.random.random((32, )).astype(self.dtype)
self.out = np.add(self.x, self.y)
class TestElementwiseAddOp_broadcast_0(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1)
def init_axis(self):
self.axis = 0
class TestFP16ElementwiseAddOp_broadcast_0(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1)
def init_axis(self):
self.axis = 0
class TestElementwiseAddOp_broadcast_1(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_broadcast_1(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_broadcast_2(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1, 4)
class TestFP16ElementwiseAddOp_broadcast_2(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1, 4)
class TestElementwiseAddOp_broadcast_3(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_broadcast_3(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_broadcast_4(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 1).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1, 1)
def init_axis(self):
self.axis = 0
class TestFP16ElementwiseAddOp_broadcast_4(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 1).astype(self.dtype)
self.out = self.x + self.y.reshape(2, 1, 1, 1)
def init_axis(self):
self.axis = 0
class TestElementwiseAddOp_broadcast_5(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2, 1, 4).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_broadcast_5(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(2, 1, 4).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_broadcast_6(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 3, 1, 5).astype(self.dtype)
self.out = self.x + self.y
class TestFP16ElementwiseAddOp_broadcast_6(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4, 5).astype(self.dtype)
self.y = np.random.rand(2, 3, 1, 5).astype(self.dtype)
self.out = self.x + self.y
class TestElementwiseAddOp_rowwise_add_0(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_rowwise_add_0(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 3, 4).astype(self.dtype)
self.y = np.random.rand(3, 4).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 3, 4)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_rowwise_add_1(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 1).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1)
def init_axis(self):
self.axis = 1
class TestFP16ElementwiseAddOp_rowwise_add_1(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(2, 1).astype(self.dtype)
self.y = np.random.rand(1).astype(self.dtype)
self.out = self.x + self.y.reshape(1, 1)
def init_axis(self):
self.axis = 1
class TestElementwiseAddOp_channelwise_add(TestElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(3, 20, 20).astype(self.dtype)
self.y = np.random.rand(3, 1, 1).astype(self.dtype)
self.out = self.x + self.y
def init_axis(self):
self.axis = -1
class TestFP16ElementwiseAddOp_channelwise_add(TestFP16ElementwiseAddOp):
def init_input_output(self):
self.x = np.random.rand(3, 10, 20).astype(self.dtype)
self.y = np.random.rand(3, 1, 1).astype(self.dtype)
self.out = self.x + self.y
def init_axis(self):
self.axis = -1
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "ba13d322a353a31165fe77d02344d14d",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 73,
"avg_line_length": 32.273648648648646,
"alnum_prop": 0.6285983460692977,
"repo_name": "tensor-tang/Paddle",
"id": "5783048f5fb68217dfbad829b33449e7951d8b0b",
"size": "10164",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_elementwise_add_op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10161819"
},
{
"name": "CMake",
"bytes": "290828"
},
{
"name": "Cuda",
"bytes": "1183095"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7082088"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
}
|
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
{
"content_hash": "3b225f3465d710567c37f094a1a687d2",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 299,
"avg_line_length": 52.5,
"alnum_prop": 0.7238095238095238,
"repo_name": "fdr/tinytalk",
"id": "a49fbada81d23d475dde6f903a6239ce49eddef1",
"size": "543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webchat/manage.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "3713"
},
{
"name": "Python",
"bytes": "16938"
}
],
"symlink_target": ""
}
|
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import json
import sys
import webbrowser
import codecs
import csv
from string import punctuation
import matplotlib.pyplot as plt
import time
class tweetlistener(StreamListener):
def on_status(self,status):
global counter,Total_tweet_count,outfile,search_words_list,indiv,outfile
counter += 1
if counter >= Total_tweet_count:
search_words_list.pop(0)
outfile.close()
senti1 = Sentiment()
senti1.sentiment_analysis()
#time.sleep(15)
search_tweets()
try:
print("----------NEW TWEET ARRIVED!-----------")
print("Tweet Text : ", status.text)
outfile.write(status.text)
outfile.write(str("\n"))
print("Author's Screen name : ", status.author.screen_name)
print("Time of creation : ", status.created_at)
print("Source of Tweet : ", status.source)
except UnicodeEncodeError:
print("Skipping a tweet")
def on_error(self, status):
drawing()
print("Too soon reconnected . Will terminate the program")
print(status)
sys.exit()
class Sentiment():
def sentiment_analysis(self):
global file2,indiv,outfile,labels,colors,all_figs
pos_sent = open("positive_words.txt").read()
positive_words = pos_sent.split('\n')
positive_counts = []
neg_sent = open('negative_words.txt').read()
negative_words = neg_sent.split('\n')
outfile.close()
negative_counts = []
conclusion = []
tweets_list = []
tot_pos = 0
tot_neu = 0
tot_neg = 0
all_total = 0
#print file2
tweets = codecs.open(file2, 'r', "utf-8").read()
tweet_list_dup = []
tweets_list = tweets.split('\n')
#print tweets_list
for tweet in tweets_list:
positive_counter = 0
negative_counter = 0
tweet = tweet.encode("utf-8")
tweet_list_dup.append(tweet)
tweet_processed = tweet.lower()
for p in tuple(punctuation):
tweet_processed = tweet_processed.replace(p, '')
words = tweet_processed.split(' ')
word_count = len(words)
for word in words:
if word in positive_words:
positive_counter = positive_counter + 1
elif word in negative_words:
negative_counter = negative_counter + 1
positive_counts.append(positive_counter)
negative_counts.append(negative_counter)
if positive_counter > negative_counter:
conclusion.append("Positive")
tot_pos += 1
elif positive_counter == negative_counter:
conclusion.append("Neutral")
tot_neu += 0.5
else:
conclusion.append("Negative")
tot_neg +=1
#print len(positive_counts)
output = zip(tweet_list_dup, positive_counts, negative_counts,conclusion)
#output = output.encode('utf-8')
print("******** Overall Analysis **************")
if tot_pos > tot_neg and tot_pos > tot_neu:
print("Overall Sentiment - Positive")
elif tot_neg > tot_pos and tot_neg > tot_neu:
print("Overall Sentiment - Negative")
elif tot_neg == tot_neu and tot_neg > tot_pos:
print("Overall Sentiment - Negative")
elif tot_pos + tot_neg < tot_neu:
print("Overall Sentiment - Semi Positive ")
else:
print("Overall Sentiment - Neutral")
print("%%%%%%%%%%%% End of stream - " + indiv + " %%%%%%%%%%%%%%%%%%%%%")
file1 = 'tweet_sentiment_' + indiv + '.csv'
writer = csv.writer(open(file1, 'wb'))
writer.writerows(output)
draw_helper = []
draw_helper.append(tot_pos)
draw_helper.append(tot_neg)
draw_helper.append(tot_neu)
draw_helper.append(indiv)
all_figs.append(draw_helper)
#figs.append(drawing())
def drawing():
global all_figs
for one_fig in all_figs:
all_total = 0
sentiments = {}
sentiments["Positive"] = one_fig[0]
sentiments["Negative"] = one_fig[1]
sentiments["Neutral"] = one_fig[2]
all_total = one_fig[0] + one_fig[1] + one_fig[2]
sizes = []
sizes = [sentiments['Positive']/float(all_total), sentiments['Negative']/float(all_total), sentiments['Neutral']/float(all_total)]
plt.pie(sizes,labels=labels, colors=colors, autopct='%1.1f%%', shadow=True)
plt.axis('equal')
plt.title('sentiment for the word - ' + str(one_fig[3]))
fig_name = "fig_" + str(one_fig[3]) + ".png"
# Save the figures
plt.savefig(fig_name)
plt.close()
plt.show()
def main():
global Total_tweet_count,outfile,file,search_words_list,auth,labels,colors,all_figs
consumer_key = 'O9KXKiFmfzTNgF0eevXXXX'
consumer_secret = 'ozgNXFyi4A0rimGGPx8bGJHLGosJibGiFASZbXXXXX'
access_token = '300198545-EsrLh8Xh9OzkRUkjwubPomH0M4GS3pXOfGgBLXXX'
access_secret = 'NlqKNVpnbYK1T5WuOROjSdGrmSfxy8mluggN0w36uzxXXX'
search_words = str(input("Enter Search words - separate them by comma: "))
Total_tweet_count = int(input("Enter tweets to be pulled for each search word: "))
#print search_words
search_words_list = search_words.split(",")
Total_tweet_count = 10
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
labels = ['Positive','Negative','Neutral']
colors = ['yellowgreen','lightcoral','gold']
all_figs= []
search_tweets()
outfile = codecs.open("F:\\test_tweets1.txt", 'w', "utf-8")#iphone
def search_tweets():
global search_words_list,counter,auth,indiv,outfile,file2,plt,access
consumer_key = 'ZkIxjbsPacixuhTg7aclkQ'
consumer_secret = 'yme0jG3UDhG0CFgqlc50UQFSspo3EkUfPziUf2FFo'
access_token = '1635433267-29ZpqtvpBIzVOQTnz1wgCsaotyEBTgs4V4jkUEM'
access_secret = '33ZEGzs7pR1M0AYnD0mwOaZJ8JIF1Nc183VOFNkeug'
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
#auth.set_access_token(access_token, access_secret)
print(search_words_list)
for indiv in search_words_list:
#indiv = indiv.split()
print("Search Word - " + indiv + " - is being processed")
counter = 0
file2 = "test_" + str(indiv[0]) + ".txt"
outfile = codecs.open(file2, 'w', "utf-8")
twitterStream = Stream(auth, tweetlistener())
one_list = []
one_list.append(indiv)
print(one_list)
twitterStream.filter(track=one_list,languages = ["en"])
#for i in range(len(figs)):
drawing()
sys.exit()
main()
|
{
"content_hash": "ab77ae2584afca46553808824271f5a3",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 142,
"avg_line_length": 35.19900497512438,
"alnum_prop": 0.5858657243816254,
"repo_name": "Swaraj1998/MyCode",
"id": "0ecd54c9f3d6e72ab0977534d824887407ebb4d0",
"size": "7162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ML-Workshop/Data/Twitter_Sentiment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "303"
},
{
"name": "C",
"bytes": "26252"
},
{
"name": "C++",
"bytes": "44759"
},
{
"name": "HTML",
"bytes": "6910"
},
{
"name": "Java",
"bytes": "25674"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "71899"
},
{
"name": "Shell",
"bytes": "983"
}
],
"symlink_target": ""
}
|
import requests
import json
import datetime
INPUT_FILE = "tickers.txt"
FILENAME = "stocktwits.json"
REMOVE_OLDER_THAN = 21 # in days
KEEP_NULL_SENTIMENT = False
class Sentiment():
def __init__(self):
self.tickers = []
self.data = {}
# write to JSON file and properly formats the file
def write_to_file(self, nameOfFile, data):
with open(nameOfFile, 'w+') as f:
print("Dumping JSON to", nameOfFile)
json.dump(data, f, sort_keys=True, indent=4, separators=(',', ':'))
# get the data using StockTwits API
def get_twits(self, ticker):
url = "https://api.stocktwits.com/api/2/streams/symbol/{0}.json".format(ticker)
response = requests.get(url).json()
return response
# Get data for each ticker in the tickers list
def get_twits_list(self, tickers=None):
if tickers is None:
tickers = self.tickers
for ticker in tickers:
print("Getting data for", ticker)
try:
data = self.get_twits(ticker)
symbol = data['symbol']['symbol']
msgs = data['messages']
self.data.update({symbol: msgs})
except Exception as e:
print(e)
print("Error getting", ticker)
return self.data
def read_tickers(self, input_file = INPUT_FILE):
print("Reading tickers from", input_file)
f = open(input_file, 'r')
self.tickers = []
for line in f:
line = line.strip('\n\t')
line = line.upper()
self.tickers.append(line)
return self.tickers
# Removes data older than age_limit
def remove_old(self, original = None, age_limit=REMOVE_OLDER_THAN):
if original is None:
original = self.data
result = {}
print("Removing tweets that are more than", age_limit, "days old")
threshold = datetime.datetime.now() - datetime.timedelta(age_limit)
for ticker in list(original.keys()):
result[ticker] = []
for msg in original[ticker]:
dt = datetime.datetime.strptime(msg["created_at"], "%Y-%m-%dT%H:%M:%SZ")
sentiment = True if KEEP_NULL_SENTIMENT else msg["entities"]["sentiment"]
if dt >= threshold and sentiment != None:
result[ticker].append(msg)
return result
if __name__ == "__main__":
x = input("Do you want to specify name of output file? Type y or Y for yes.\n").lower()
if x.startswith("y"):
filename = input("Enter JSON file name:\n")
if not (filename.endswith(".json")):
filename = filename + ".json"
FILENAME = filename
sentiment = Sentiment()
codes = sentiment.read_tickers()
twitdata = sentiment.get_twits_list()
twitdata = sentiment.remove_old(twitdata)
sentiment.write_to_file(FILENAME, twitdata)
|
{
"content_hash": "70e8eb6512872afff651cff26d9a8114",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 91,
"avg_line_length": 34.2093023255814,
"alnum_prop": 0.5781781101291639,
"repo_name": "Vaibhav/Stock-Analysis",
"id": "24e7d91163a141e0bd0e534d94789a1bbf04fe03",
"size": "2960",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "Sentiment/stocktwits.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "72095"
}
],
"symlink_target": ""
}
|
"""Phonological operations that depend on iso graphemes."""
import pynini as p
from nisaba.scripts.natural_translit.brahmic import iso_inventory as gr
from nisaba.scripts.natural_translit.common import rewrite_functions as rw
from nisaba.scripts.natural_translit.common import util as u
from nisaba.scripts.natural_translit.phonology import phoneme_inventory as ph
from nisaba.scripts.natural_translit.phonology.operations import syllable as syl
# Vocalic liquids
def _vocalic(vcl: p.FstLike) -> p.Fst:
"""Pronunciation of the vowel part of the vocalic Rs and Ls."""
return rw.rewrite(ph.VCL, vcl)
VOCALIC_I = _vocalic(ph.I)
VOCALIC_U = _vocalic(ph.U)
VOCALIC_EC = _vocalic(ph.EC)
# Schwa handling
def _default_schwa(schwa: p.FstLike) -> p.Fst:
"""Pronounces unassigned schwas as the default phoneme for the langauge."""
return rw.rewrite(p.union(ph.SCHWA, ph.VCL_SCHWA), schwa)
SCHWA_A = _default_schwa(ph.A)
SCHWA_EC = _default_schwa(ph.EC)
def _vocal_schwa(
preceding: p.FstLike = u.EPSILON,
following: p.FstLike = u.EPSILON) -> p.Fst:
"""Pronounces schwa depending on the context."""
return rw.rewrite_by_context(
ph.SCHWA,
ph.VCL_SCHWA,
preceding,
following)
def _silent_schwa(
preceding: p.FstLike = u.EPSILON,
following: p.FstLike = u.EPSILON) -> p.Fst:
"""Deletes schwa depending on the context."""
return rw.rewrite_by_context(
ph.SCHWA,
ph.SIL,
preceding,
following)
# Schwa is pronounced before coda graphemes
_SCHWA_BEFORE_CODA = _vocal_schwa(following=gr.CODA)
_SCHWA_BEFORE_IND_VOWEL = _vocal_schwa(following=gr.VOWEL_I)
# Schwa is pronounced after {i}{y} and {i_l}{y}
_SCHWA_AFTER_IY = _vocal_schwa(rw.concat_r(p.union(ph.I, ph.I_L), ph.Y))
def _schwa_eow(coda_cl) -> p.Fst:
"""Deletes the word final schwa if it's preceded by a legal coda."""
return _silent_schwa(
syl.legal_coda(coda_cl),
u.EOS)
def _schwa_between_syllables(onset_cl, coda_cl) -> p.Fst:
"""Deletes schwa between two well-formed syllables."""
return _silent_schwa(
syl.legal_coda(coda_cl),
syl.legal_onset(onset_cl))
def process_schwa(
onset_cl: p.FstLike = u.EPSILON,
coda_cl: p.FstLike = u.EPSILON
) -> p.Fst:
"""Compose fsts for schwa handling."""
return (
_SCHWA_BEFORE_CODA @
_SCHWA_BEFORE_IND_VOWEL @
_SCHWA_AFTER_IY @
_schwa_eow(coda_cl) @
_schwa_between_syllables(onset_cl, coda_cl)).optimize()
# Anusvara place of articulation assimilation functions
def _assign_anusvara(
phoneme: p.FstLike,
place: p.FstLike = u.EPSILON) -> p.Fst:
"""Pronunciation of anusvara.
Anusvara is mapped to nasalisation by default. The pronunciation of it
can change across languages and it can be assimilated to the place of
articulation of the following phoneme.
Args:
phoneme: Pronuncation of <ans>.
place: Following phoneme.
Returns:
Rewrite fst.
Following call:
```
_assign_anusvara(ph.M, ph.LABIAL)
```
would return:
```
p.cdrewrite(
p.cross('<ans>{nsl}', '<ans>{m}')
'',
p.union(ph.M, ph.P, ph.B),
u.BYTE_STAR)
"""
return rw.reassign_by_context(
gr.ANS,
ph.NSL,
phoneme,
following=place)
DEFAULT_ANUSVARA_LABIAL = _assign_anusvara(ph.M)
DEFAULT_ANUSVARA_DENTAL = _assign_anusvara(ph.NI)
ANUSVARA_ASSIMILATION_LABIAL = _assign_anusvara(ph.M, ph.LABIAL)
ANUSVARA_ASSIMILATION_DENTAL = _assign_anusvara(ph.NI, ph.DENTAL)
ANUSVARA_ASSIMILATION_ALVEOLAR = _assign_anusvara(ph.N, ph.ALVEOLAR)
ANUSVARA_ASSIMILATION_PALATAL = _assign_anusvara(ph.NY, ph.PALATAL)
ANUSVARA_ASSIMILATION_RETROFLEX = _assign_anusvara(ph.NN, ph.RETROFLEX)
ANUSVARA_ASSIMILATION_VELAR = _assign_anusvara(ph.NG, ph.VELAR)
FINAL_ANUSVARA_NASALIZATION = rw.reassign_word_final(gr.ANS, ph.NASAL, ph.NSL)
# Composes anusvara assimilation for all places of articulation.
ANUSVARA_ASSIMILATION = (ANUSVARA_ASSIMILATION_LABIAL @
ANUSVARA_ASSIMILATION_DENTAL @
ANUSVARA_ASSIMILATION_ALVEOLAR @
ANUSVARA_ASSIMILATION_PALATAL @
ANUSVARA_ASSIMILATION_RETROFLEX @
ANUSVARA_ASSIMILATION_VELAR).optimize()
# JNY clusters
def _rewrite_jny(
j: p.FstLike,
ny: p.FstLike) -> p.Fst:
"""Jny clusters are pronounced differently across languages."""
return rw.reassign_adjacent_alignments(
gr.J, ph.JH, j,
gr.NY, ph.NY, ny)
JNY_TO_GNY = _rewrite_jny(ph.G, ph.NY)
JNY_TO_GY = _rewrite_jny(ph.G, ph.Y)
JNY_TO_NY = _rewrite_jny(ph.SIL, ph.NY)
# <ph><ph> pronounced {f}{f}. Should only occur in Perso-Arabic words.
# TODO: Move this when there is a Perso-Arabic module.
PHPH_TO_FF = rw.reassign_adjacent_alignments(
gr.PH, ph.P + ph.ASP, ph.F,
gr.PH, ph.P + ph.ASP, ph.F,)
RT_TO_R = rw.rewrite(ph.RT, ph.R)
A_TO_EC = rw.rewrite(ph.A, ph.EC)
|
{
"content_hash": "16d496cc5daf2f10cecb3ef108f76e41",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 80,
"avg_line_length": 26.92896174863388,
"alnum_prop": 0.671672077922078,
"repo_name": "google-research/nisaba",
"id": "dd4ab2c1fedc95c81624c353b0fa5c78a6437df1",
"size": "5508",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "nisaba/scripts/natural_translit/brahmic/iso2txn_ops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "75123"
},
{
"name": "Python",
"bytes": "214834"
},
{
"name": "Starlark",
"bytes": "121418"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import logging
from dataclasses import dataclass
from typing import ClassVar
import ijson
from pants.backend.go.util_rules.build_opts import GoBuildOptions
from pants.backend.go.util_rules.sdk import GoSdkProcess
from pants.engine.fs import CreateDigest, Digest, FileContent
from pants.engine.internals.selectors import Get
from pants.engine.process import ProcessResult
from pants.engine.rules import collect_rules, rule
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
logger = logging.getLogger(__name__)
class GoStdLibImports(FrozenDict[str, str]):
"""A mapping of standard library import paths to the `.a` static file paths for that import
path.
For example, "net/smtp": "/absolute_path_to_goroot/pkg/darwin_arm64/net/smtp.a".
"""
@dataclass(frozen=True)
class GoStdLibImportsRequest:
with_race_detector: bool
@rule(desc="Determine Go std lib's imports", level=LogLevel.DEBUG)
async def determine_go_std_lib_imports(request: GoStdLibImportsRequest) -> GoStdLibImports:
maybe_race_arg = ["-race"] if request.with_race_detector else []
list_result = await Get(
ProcessResult,
GoSdkProcess(
# "-find" skips determining dependencies and imports for each package.
command=("list", "-find", *maybe_race_arg, "-json", "std"),
description="Ask Go for its available import paths",
),
)
result = {}
for package_descriptor in ijson.items(list_result.stdout, "", multiple_values=True):
import_path = package_descriptor.get("ImportPath")
target = package_descriptor.get("Target")
if not import_path or not target:
continue
result[import_path] = target
return GoStdLibImports(result)
@dataclass(frozen=True)
class ImportConfig:
"""An `importcfg` file associating import paths to their `__pkg__.a` files."""
digest: Digest
CONFIG_PATH: ClassVar[str] = "./importcfg"
@dataclass(frozen=True)
class ImportConfigRequest:
"""Create an `importcfg` file associating import paths to their `__pkg__.a` files."""
import_paths_to_pkg_a_files: FrozenDict[str, str]
build_opts: GoBuildOptions
include_stdlib: bool = True
@classmethod
def stdlib_only(cls, build_opts: GoBuildOptions) -> ImportConfigRequest:
return cls(FrozenDict(), build_opts=build_opts, include_stdlib=True)
@rule
async def generate_import_config(request: ImportConfigRequest) -> ImportConfig:
lines = [
"# import config",
*(
f"packagefile {import_path}={pkg_a_path}"
for import_path, pkg_a_path in request.import_paths_to_pkg_a_files.items()
),
]
if request.include_stdlib:
std_lib_imports = await Get(
GoStdLibImports,
GoStdLibImportsRequest(with_race_detector=request.build_opts.with_race_detector),
)
lines.extend(
f"packagefile {import_path}={static_file_path}"
for import_path, static_file_path in std_lib_imports.items()
)
content = "\n".join(lines).encode("utf-8")
result = await Get(Digest, CreateDigest([FileContent(ImportConfig.CONFIG_PATH, content)]))
return ImportConfig(result)
def rules():
return collect_rules()
|
{
"content_hash": "931e42f26982b5d603efa4b2ff9951e0",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 95,
"avg_line_length": 32.82178217821782,
"alnum_prop": 0.6868778280542986,
"repo_name": "pantsbuild/pants",
"id": "5fa6bf98f2a81b6dd03b3f36b9485709c3fc328f",
"size": "3447",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/python/pants/backend/go/util_rules/import_analysis.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "688"
},
{
"name": "Dockerfile",
"bytes": "1132"
},
{
"name": "Go",
"bytes": "67315"
},
{
"name": "Java",
"bytes": "97190"
},
{
"name": "Kotlin",
"bytes": "6433"
},
{
"name": "Mustache",
"bytes": "3771"
},
{
"name": "Python",
"bytes": "7582858"
},
{
"name": "Rust",
"bytes": "1657282"
},
{
"name": "Scala",
"bytes": "21950"
},
{
"name": "Shell",
"bytes": "31400"
},
{
"name": "Starlark",
"bytes": "76892"
}
],
"symlink_target": ""
}
|
import os
import pickle
import torch
import numpy as np
import matplotlib.pyplot as plt
from deluca.lung.controllers import ResidualExplorer, PID
from deluca.lung.environments import PhysicalLung
from deluca.lung.utils.data.analyzer import Analyzer
from deluca.lung.utils.core import BreathWaveform
from deluca.lung.utils.scripts.run_calibration import run_calibration
from deluca.lung.utils.scripts.run_controller import run_controller
plt.rc("figure", figsize=(10, 3))
def collect_runs(controller, path, n_runs=1, append_to=None, env=None, **kwargs):
if append_to is None:
results = []
else:
results = append_to
for i in range(n_runs):
env.reset()
result = run_controller(controller, env=env, **kwargs)
results.append(result)
pickle.dump(result, open(path % i, "wb"))
return results
def run_explorer(
controller,
directory,
R=50,
C=10,
PEEP=5,
T=10000,
n_runs=5,
PIPs=[10, 15, 20, 25, 30, 35],
abort=70,
dt=0.03,
env=None,
):
if not isinstance(controller, ResidualExplorer):
print("ERROR: expecting ResidualExplorer as controller")
return
env = env or PhysicalLung()
print("Running calibration")
run_calibration(R, C, PEEP, directory)
all_results = {}
for PIP in PIPs:
path = os.path.join(directory, f"R{R}C{C}PEEP{PEEP}_PIP{PIP}_pid_triangle_residual_%i.pkl")
controller.waveform = BreathWaveform((PEEP, PIP))
results = collect_runs(
controller,
path,
R=R,
C=C,
PEEP=PEEP,
T=T,
dt=dt,
n_runs=n_runs,
abort=abort,
env=env,
)
all_results[PIP] = results
print("Running calibration")
run_calibration(R, C, PEEP, directory)
return all_results
|
{
"content_hash": "065db2bcbd55c9cb73983ac8075fa6a0",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 99,
"avg_line_length": 24.363636363636363,
"alnum_prop": 0.6215351812366737,
"repo_name": "google/deluca-lung",
"id": "af430365b606eb3adbb39f122f015c6570be667f",
"size": "1876",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "deluca/lung/utils/scripts/run_explorer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "217970"
}
],
"symlink_target": ""
}
|
import time
from testrunner import testcase
from testutils import mock
from webob import request
from upsrv import config
from upsrv import url_sign
class UrlSignTest(testcase.TestCase):
def setUp(self):
testcase.TestCase.setUp(self)
self.cfg = config.UpsrvConfig()
self.cfg.downloadSignatureExpiry = 3600
self.cfg.downloadSignatureKey = ['default key']
self.now = 1000000000.0
mock.mock(time, 'time', self.now)
def tearDown(self):
mock.unmockAll()
testcase.TestCase.tearDown(self)
def _time(self, when):
mock.mock(time, 'time', self.now + when)
def _req(self, path):
req = request.Request.blank(path)
return req
def testUrlSign(self):
result = url_sign.sign_path(self.cfg, '/example')
self.assertEqual(result, '/example?e=1000003600&s=4001ab09d49d0c585a06f243156a056ddde12cf4')
self.cfg.downloadSignatureKey = []
self.assertRaises(RuntimeError, url_sign.sign_path, self.cfg, '/example')
def testUrlVerify(self):
req = self._req('/example?e=1000003600&s=4001ab09d49d0c585a06f243156a056ddde12cf4')
result = url_sign.verify_request(self.cfg, req)
self.assertEqual(result, True)
self.cfg.downloadSignatureKey = []
self.assertRaises(RuntimeError, url_sign.verify_request, self.cfg, req)
def testVerifyOldKey(self):
self.cfg.downloadSignatureKey = ['b']
path = url_sign.sign_path(self.cfg, '/example')
self.cfg.downloadSignatureKey = ['a', 'b']
self.assertNotEqual(url_sign.sign_path(self.cfg, '/example'), path)
req = self._req(path)
self.assertEqual(url_sign.verify_request(self.cfg, req), True)
def testVerifyExpired(self):
path = url_sign.sign_path(self.cfg, '/example')
self._time(4000)
req = self._req(path)
result = url_sign.verify_request(self.cfg, req)
self.assertEqual(result, False)
def testVerifyBad(self):
self.assertEqual(url_sign.verify_request(self.cfg, self._req('/example?e=1000003600')), False)
self.assertEqual(url_sign.verify_request(self.cfg, self._req('/example?e=1000003600&s=')), False)
path = url_sign.sign_path(self.cfg, '/example')
path += '&e=2000000000'
self._time(4000)
self.assertEqual(url_sign.verify_request(self.cfg, self._req(path)), False)
|
{
"content_hash": "5c5397cd185e450b2550bace294c6a7a",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 105,
"avg_line_length": 36.08955223880597,
"alnum_prop": 0.6555004135649297,
"repo_name": "sassoftware/rbm",
"id": "c4ca6302b5353c15c639faf7a57186a61b042275",
"size": "3005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "upsrv_test/url_sign_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "88"
},
{
"name": "Makefile",
"bytes": "10875"
},
{
"name": "Nginx",
"bytes": "975"
},
{
"name": "Puppet",
"bytes": "2655"
},
{
"name": "Python",
"bytes": "103667"
},
{
"name": "Ruby",
"bytes": "2386"
},
{
"name": "Shell",
"bytes": "7886"
}
],
"symlink_target": ""
}
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ["setup_ranger_plugin", "get_audit_configs", "generate_ranger_service_config"]
import os
import ambari_simplejson as json
from datetime import datetime
from resource_management.libraries.functions.ranger_functions import Rangeradmin
from resource_management.core.resources import File, Directory, Execute
from resource_management.libraries.resources.xml_config import XmlConfig
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.core.logger import Logger
from resource_management.core.source import DownloadSource, InlineTemplate
from resource_management.libraries.functions.ranger_functions_v2 import RangeradminV2
from resource_management.core.utils import PasswordString
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
def setup_ranger_plugin(component_select_name, service_name, previous_jdbc_jar,
component_downloaded_custom_connector, component_driver_curl_source,
component_driver_curl_target, java_home,
repo_name, plugin_repo_dict,
ranger_env_properties, plugin_properties,
policy_user, policymgr_mgr_url,
plugin_enabled, conf_dict, component_user, component_group,
cache_service_list, plugin_audit_properties, plugin_audit_attributes,
plugin_security_properties, plugin_security_attributes,
plugin_policymgr_ssl_properties, plugin_policymgr_ssl_attributes,
component_list, audit_db_is_enabled, credential_file,
xa_audit_db_password, ssl_truststore_password,
ssl_keystore_password, api_version=None, stack_version_override = None, skip_if_rangeradmin_down = True,
is_security_enabled = False, is_stack_supports_ranger_kerberos = False,
component_user_principal = None, component_user_keytab = None, cred_lib_path_override = None, cred_setup_prefix_override = None):
if audit_db_is_enabled and component_driver_curl_source is not None and not component_driver_curl_source.endswith("/None"):
if previous_jdbc_jar and os.path.isfile(previous_jdbc_jar):
File(previous_jdbc_jar, action='delete')
File(component_downloaded_custom_connector,
content = DownloadSource(component_driver_curl_source),
mode = 0644
)
Execute(('cp', '--remove-destination', component_downloaded_custom_connector, component_driver_curl_target),
path=["/bin", "/usr/bin/"],
sudo=True
)
File(component_driver_curl_target, mode=0644)
if policymgr_mgr_url.endswith('/'):
policymgr_mgr_url = policymgr_mgr_url.rstrip('/')
if stack_version_override is None:
stack_version = get_stack_version(component_select_name)
else:
stack_version = stack_version_override
component_conf_dir = conf_dict
if plugin_enabled:
service_name_exist = False
policycache_path = os.path.join('/etc', 'ranger', repo_name, 'policycache')
try:
for cache_service in cache_service_list:
policycache_json_file = format('{policycache_path}/{cache_service}_{repo_name}.json')
if os.path.isfile(policycache_json_file) and os.path.getsize(policycache_json_file) > 0:
with open(policycache_json_file) as json_file:
json_data = json.load(json_file)
if 'serviceName' in json_data and json_data['serviceName'] == repo_name:
service_name_exist = True
Logger.info("Skipping Ranger API calls, as policy cache file exists for {0}".format(service_name))
Logger.warning("If service name for {0} is not created on Ranger Admin UI, then to re-create it delete policy cache file: {1}".format(service_name, policycache_json_file))
break
except Exception, err:
Logger.error("Error occurred while fetching service name from policy cache file.\nError: {0}".format(err))
if not service_name_exist:
if api_version is not None and api_version == 'v2':
ranger_adm_obj = RangeradminV2(url=policymgr_mgr_url, skip_if_rangeradmin_down=skip_if_rangeradmin_down)
ranger_adm_obj.create_ranger_repository(service_name, repo_name, plugin_repo_dict,
ranger_env_properties['ranger_admin_username'], ranger_env_properties['ranger_admin_password'],
ranger_env_properties['admin_username'], ranger_env_properties['admin_password'],
policy_user, is_security_enabled, is_stack_supports_ranger_kerberos, component_user,
component_user_principal, component_user_keytab)
else:
ranger_adm_obj = Rangeradmin(url=policymgr_mgr_url, skip_if_rangeradmin_down=skip_if_rangeradmin_down)
ranger_adm_obj.create_ranger_repository(service_name, repo_name, plugin_repo_dict,
ranger_env_properties['ranger_admin_username'], ranger_env_properties['ranger_admin_password'],
ranger_env_properties['admin_username'], ranger_env_properties['admin_password'],
policy_user)
current_datetime = datetime.now()
File(format('{component_conf_dir}/ranger-security.xml'),
owner = component_user,
group = component_group,
mode = 0644,
content = InlineTemplate(format('<ranger>\n<enabled>{current_datetime}</enabled>\n</ranger>'))
)
Directory([os.path.join('/etc', 'ranger', repo_name), os.path.join('/etc', 'ranger', repo_name, 'policycache')],
owner = component_user,
group = component_group,
mode=0775,
create_parents = True,
cd_access = 'a'
)
for cache_service in cache_service_list:
File(os.path.join('/etc', 'ranger', repo_name, 'policycache', format('{cache_service}_{repo_name}.json')),
owner = component_user,
group = component_group,
mode = 0644
)
# remove plain-text password from xml configs
plugin_audit_password_property = 'xasecure.audit.destination.db.password'
plugin_audit_properties_copy = {}
plugin_audit_properties_copy.update(plugin_audit_properties)
if plugin_audit_password_property in plugin_audit_properties_copy:
plugin_audit_properties_copy[plugin_audit_password_property] = "crypted"
XmlConfig(format('ranger-{service_name}-audit.xml'),
conf_dir=component_conf_dir,
configurations=plugin_audit_properties_copy,
configuration_attributes=plugin_audit_attributes,
owner = component_user,
group = component_group,
mode=0744)
XmlConfig(format('ranger-{service_name}-security.xml'),
conf_dir=component_conf_dir,
configurations=plugin_security_properties,
configuration_attributes=plugin_security_attributes,
owner = component_user,
group = component_group,
mode=0744)
# remove plain-text password from xml configs
plugin_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
plugin_policymgr_ssl_properties_copy = {}
plugin_policymgr_ssl_properties_copy.update(plugin_policymgr_ssl_properties)
for prop in plugin_password_properties:
if prop in plugin_policymgr_ssl_properties_copy:
plugin_policymgr_ssl_properties_copy[prop] = "crypted"
if str(service_name).lower() == 'yarn' :
XmlConfig("ranger-policymgr-ssl-yarn.xml",
conf_dir=component_conf_dir,
configurations=plugin_policymgr_ssl_properties_copy,
configuration_attributes=plugin_policymgr_ssl_attributes,
owner = component_user,
group = component_group,
mode=0744)
else:
XmlConfig("ranger-policymgr-ssl.xml",
conf_dir=component_conf_dir,
configurations=plugin_policymgr_ssl_properties_copy,
configuration_attributes=plugin_policymgr_ssl_attributes,
owner = component_user,
group = component_group,
mode=0744)
# creating symblink should be done by rpm package
# setup_ranger_plugin_jar_symblink(stack_version, service_name, component_list)
setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file,
xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,
component_user, component_group, java_home, cred_lib_path_override, cred_setup_prefix_override)
else:
File(format('{component_conf_dir}/ranger-security.xml'),
action="delete"
)
def setup_ranger_plugin_jar_symblink(stack_version, service_name, component_list):
stack_root = Script.get_stack_root()
jar_files = os.listdir(format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/lib'))
for jar_file in jar_files:
for component in component_list:
Execute(('ln','-sf',format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/lib/{jar_file}'),format('{stack_root}/current/{component}/lib/{jar_file}')),
not_if=format('ls {stack_root}/current/{component}/lib/{jar_file}'),
only_if=format('ls {stack_root}/{stack_version}/ranger-{service_name}-plugin/lib/{jar_file}'),
sudo=True)
def setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file, xa_audit_db_password,
ssl_truststore_password, ssl_keystore_password, component_user, component_group, java_home, cred_lib_path_override = None, cred_setup_prefix_override = None):
stack_root = Script.get_stack_root()
service_name = str(service_name).lower()
if cred_lib_path_override is not None:
cred_lib_path = cred_lib_path_override
else:
cred_lib_path = format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/install/lib/*')
if cred_setup_prefix_override is not None:
cred_setup_prefix = cred_setup_prefix_override
else:
cred_setup_prefix = (format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/ranger_credential_helper.py'), '-l', cred_lib_path)
if audit_db_is_enabled:
cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'auditDBCred', '-v', PasswordString(xa_audit_db_password), '-c', '1')
Execute(cred_setup, environment={'JAVA_HOME': java_home}, logoutput=True, sudo=True)
cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'sslKeyStore', '-v', PasswordString(ssl_keystore_password), '-c', '1')
Execute(cred_setup, environment={'JAVA_HOME': java_home}, logoutput=True, sudo=True)
cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'sslTrustStore', '-v', PasswordString(ssl_truststore_password), '-c', '1')
Execute(cred_setup, environment={'JAVA_HOME': java_home}, logoutput=True, sudo=True)
File(credential_file,
owner = component_user,
group = component_group,
mode = 0640
)
def setup_core_site_for_required_plugins(component_user, component_group, create_core_site_path, configurations = {}, configuration_attributes = {}):
XmlConfig('core-site.xml',
conf_dir = create_core_site_path,
configurations = configurations,
configuration_attributes = configuration_attributes,
owner = component_user,
group = component_group,
mode = 0644
)
def get_audit_configs(config):
xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR'].lower()
xa_db_host = config['configurations']['admin-properties']['db_host']
xa_audit_db_name = default('/configurations/admin-properties/audit_db_name', 'ranger_audits')
if xa_audit_db_flavor == 'mysql':
jdbc_jar_name = default("/hostLevelParams/custom_mysql_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_mysql_jdbc_name", None)
audit_jdbc_url = format('jdbc:mysql://{xa_db_host}/{xa_audit_db_name}')
jdbc_driver = "com.mysql.jdbc.Driver"
elif xa_audit_db_flavor == 'oracle':
jdbc_jar_name = default("/hostLevelParams/custom_oracle_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_oracle_jdbc_name", None)
colon_count = xa_db_host.count(':')
if colon_count == 2 or colon_count == 0:
audit_jdbc_url = format('jdbc:oracle:thin:@{xa_db_host}')
else:
audit_jdbc_url = format('jdbc:oracle:thin:@//{xa_db_host}')
jdbc_driver = "oracle.jdbc.OracleDriver"
elif xa_audit_db_flavor == 'postgres':
jdbc_jar_name = default("/hostLevelParams/custom_postgres_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_postgres_jdbc_name", None)
audit_jdbc_url = format('jdbc:postgresql://{xa_db_host}/{xa_audit_db_name}')
jdbc_driver = "org.postgresql.Driver"
elif xa_audit_db_flavor == 'mssql':
jdbc_jar_name = default("/hostLevelParams/custom_mssql_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_mssql_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlserver://{xa_db_host};databaseName={xa_audit_db_name}')
jdbc_driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver"
elif xa_audit_db_flavor == 'sqla':
jdbc_jar_name = default("/hostLevelParams/custom_sqlanywhere_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}')
jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
else: raise Fail(format("'{xa_audit_db_flavor}' db flavor not supported."))
return jdbc_jar_name, previous_jdbc_jar_name, audit_jdbc_url, jdbc_driver
def generate_ranger_service_config(ranger_plugin_properties):
custom_service_config_dict = {}
ranger_plugin_properties_copy = {}
ranger_plugin_properties_copy.update(ranger_plugin_properties)
for key, value in ranger_plugin_properties_copy.iteritems():
if key.startswith("ranger.service.config.param."):
modify_key_name = key.replace("ranger.service.config.param.","")
custom_service_config_dict[modify_key_name] = value
return custom_service_config_dict
|
{
"content_hash": "472bc6780ff33f2a33306daf9c488bbf",
"timestamp": "",
"source": "github",
"line_count": 300,
"max_line_length": 190,
"avg_line_length": 50.833333333333336,
"alnum_prop": 0.6873442622950819,
"repo_name": "arenadata/ambari",
"id": "78692cba45bc8dca84a9d3ef90a75b5c5c67bd35",
"size": "15272",
"binary": false,
"copies": "1",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
}
|
from k5test import *
realm = K5Realm(create_host=False)
keyctl = which('keyctl')
out = realm.run([klist, '-c', 'KEYRING:process:abcd'], expected_code=1)
test_keyring = (keyctl is not None and
'Unknown credential cache type' not in out)
# Test kdestroy and klist of a non-existent ccache.
realm.run([kdestroy])
output = realm.run([klist], expected_code=1)
if ' not found' not in output:
fail('Expected error message not seen in klist output')
realm.addprinc('alice', password('alice'))
realm.addprinc('bob', password('bob'))
realm.addprinc('carol', password('carol'))
def collection_test(realm, ccname):
realm.env['KRB5CCNAME'] = ccname
realm.kinit('alice', password('alice'))
output = realm.run([klist])
if 'Default principal: alice@' not in output:
fail('Initial kinit failed to get credentials for alice.')
realm.run([kdestroy])
output = realm.run([klist], expected_code=1)
if ' not found' not in output:
fail('Initial kdestroy failed to destroy primary cache.')
output = realm.run([klist, '-l'], expected_code=1)
if not output.endswith('---\n') or output.count('\n') != 2:
fail('Initial kdestroy failed to empty cache collection.')
realm.kinit('alice', password('alice'))
realm.kinit('carol', password('carol'))
output = realm.run([klist, '-l'])
if '---\ncarol@' not in output or '\nalice@' not in output:
fail('klist -l did not show expected output after two kinits.')
realm.kinit('alice', password('alice'))
output = realm.run([klist, '-l'])
if '---\nalice@' not in output or output.count('\n') != 4:
fail('klist -l did not show expected output after re-kinit for alice.')
realm.kinit('bob', password('bob'))
output = realm.run([klist, '-A'])
if 'bob@' not in output.splitlines()[1] or 'alice@' not in output or \
'carol' not in output or output.count('Default principal:') != 3:
fail('klist -A did not show expected output after kinit for bob.')
realm.run([kswitch, '-p', 'carol'])
output = realm.run([klist, '-l'])
if '---\ncarol@' not in output or output.count('\n') != 5:
fail('klist -l did not show expected output after kswitch to carol.')
realm.run([kdestroy])
output = realm.run([klist, '-l'])
if 'carol@' in output or 'bob@' not in output or output.count('\n') != 4:
fail('kdestroy failed to remove only primary ccache.')
realm.run([kdestroy, '-A'])
output = realm.run([klist, '-l'], expected_code=1)
if not output.endswith('---\n') or output.count('\n') != 2:
fail('kdestroy -a failed to empty cache collection.')
collection_test(realm, 'DIR:' + os.path.join(realm.testdir, 'cc'))
if test_keyring:
def cleanup_keyring(anchor, name):
out = realm.run(['keyctl', 'list', anchor])
if ('keyring: ' + name + '\n') in out:
keyid = realm.run(['keyctl', 'search', anchor, 'keyring', name])
realm.run(['keyctl', 'unlink', keyid.strip(), anchor])
# Use realm.testdir as the collection name to avoid conflicts with
# other build trees.
cname = realm.testdir
col_ringname = '_krb_' + cname
cleanup_keyring('@s', col_ringname)
collection_test(realm, 'KEYRING:session:' + cname)
cleanup_keyring('@s', col_ringname)
# Test legacy keyring cache linkage.
realm.env['KRB5CCNAME'] = 'KEYRING:' + cname
realm.run([kdestroy, '-A'])
realm.kinit(realm.user_princ, password('user'))
out = realm.run([klist, '-l'])
if 'KEYRING:legacy:' + cname + ':' + cname not in out:
fail('Wrong initial primary name in keyring legacy collection')
# Make sure this cache is linked to the session keyring.
id = realm.run([keyctl, 'search', '@s', 'keyring', cname])
out = realm.run([keyctl, 'list', id.strip()])
if 'user: __krb5_princ__' not in out:
fail('Legacy cache not linked into session keyring')
# Remove the collection keyring. When the collection is
# reinitialized, the legacy cache should reappear inside it
# automatically as the primary cache.
cleanup_keyring('@s', col_ringname)
out = realm.run([klist])
if realm.user_princ not in out:
fail('Cannot see legacy cache after removing collection')
coll_id = realm.run([keyctl, 'search', '@s', 'keyring', '_krb_' + cname])
out = realm.run([keyctl, 'list', coll_id.strip()])
if (id.strip() + ':') not in out:
fail('Legacy cache did not reappear in collection after klist')
# Destroy the cache and check that it is unlinked from the session keyring.
realm.run([kdestroy])
realm.run([keyctl, 'search', '@s', 'keyring', cname], expected_code=1)
cleanup_keyring('@s', col_ringname)
# Test parameter expansion in default_ccache_name
realm.stop()
conf = {'libdefaults': {'default_ccache_name': 'testdir/%{null}abc%{uid}'}}
realm = K5Realm(krb5_conf=conf, create_kdb=False)
del realm.env['KRB5CCNAME']
uidstr = str(os.getuid())
out = realm.run([klist], expected_code=1)
if 'testdir/abc%s' % uidstr not in out:
fail('Wrong ccache in klist')
success('Credential cache tests')
|
{
"content_hash": "4ddc54027438f7ee7c5a3e395d521d99",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 79,
"avg_line_length": 43.46610169491525,
"alnum_prop": 0.6402807564827452,
"repo_name": "drankye/kerb-token",
"id": "dd20e11399a53c57ec64c815d6ab3ed4e5ee3fcf",
"size": "6340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "krb5/src/tests/t_ccache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "42358"
},
{
"name": "Awk",
"bytes": "10967"
},
{
"name": "C",
"bytes": "13257208"
},
{
"name": "C++",
"bytes": "1361087"
},
{
"name": "CSS",
"bytes": "50885"
},
{
"name": "Emacs Lisp",
"bytes": "6797"
},
{
"name": "Java",
"bytes": "73693"
},
{
"name": "Objective-C",
"bytes": "8596"
},
{
"name": "Perl",
"bytes": "132774"
},
{
"name": "Python",
"bytes": "422641"
},
{
"name": "Shell",
"bytes": "34053"
},
{
"name": "TeX",
"bytes": "463023"
}
],
"symlink_target": ""
}
|
import doctest
import os
import pickle
import shutil
import sys
import tempfile
import unittest
from genshi.compat import BytesIO, StringIO
from genshi.core import Markup
from genshi.input import XML
from genshi.template.base import BadDirectiveError, TemplateSyntaxError
from genshi.template.loader import TemplateLoader, TemplateNotFound
from genshi.template.markup import MarkupTemplate
class MarkupTemplateTestCase(unittest.TestCase):
"""Tests for markup template processing."""
def test_parse_fileobj(self):
fileobj = StringIO('<root> ${var} $var</root>')
tmpl = MarkupTemplate(fileobj)
self.assertEqual('<root> 42 42</root>', str(tmpl.generate(var=42)))
def test_parse_stream(self):
stream = XML('<root> ${var} $var</root>')
tmpl = MarkupTemplate(stream)
self.assertEqual('<root> 42 42</root>', str(tmpl.generate(var=42)))
def test_pickle(self):
stream = XML('<root>$var</root>')
tmpl = MarkupTemplate(stream)
buf = BytesIO()
pickle.dump(tmpl, buf, 2)
buf.seek(0)
unpickled = pickle.load(buf)
self.assertEqual('<root>42</root>', str(unpickled.generate(var=42)))
def test_interpolate_mixed3(self):
tmpl = MarkupTemplate('<root> ${var} $var</root>')
self.assertEqual('<root> 42 42</root>', str(tmpl.generate(var=42)))
def test_interpolate_leading_trailing_space(self):
tmpl = MarkupTemplate('<root>${ foo }</root>')
self.assertEqual('<root>bar</root>', str(tmpl.generate(foo='bar')))
def test_interpolate_multiline(self):
tmpl = MarkupTemplate("""<root>${dict(
bar = 'baz'
)[foo]}</root>""")
self.assertEqual('<root>baz</root>', str(tmpl.generate(foo='bar')))
def test_interpolate_non_string_attrs(self):
tmpl = MarkupTemplate('<root attr="${1}"/>')
self.assertEqual('<root attr="1"/>', str(tmpl.generate()))
def test_interpolate_list_result(self):
tmpl = MarkupTemplate('<root>$foo</root>')
self.assertEqual('<root>buzz</root>', str(tmpl.generate(foo=('buzz',))))
def test_empty_attr(self):
tmpl = MarkupTemplate('<root attr=""/>')
self.assertEqual('<root attr=""/>', str(tmpl.generate()))
def test_empty_attr_interpolated(self):
tmpl = MarkupTemplate('<root attr="$attr"/>')
self.assertEqual('<root attr=""/>', str(tmpl.generate(attr='')))
def test_bad_directive_error(self):
xml = '<p xmlns:py="http://genshi.edgewall.org/" py:do="nothing" />'
try:
tmpl = MarkupTemplate(xml, filename='test.html')
except BadDirectiveError, e:
self.assertEqual('test.html', e.filename)
self.assertEqual(1, e.lineno)
def test_directive_value_syntax_error(self):
xml = """<p xmlns:py="http://genshi.edgewall.org/" py:if="bar'" />"""
try:
tmpl = MarkupTemplate(xml, filename='test.html').generate()
self.fail('Expected TemplateSyntaxError')
except TemplateSyntaxError, e:
self.assertEqual('test.html', e.filename)
self.assertEqual(1, e.lineno)
def test_expression_syntax_error(self):
xml = """<p>
Foo <em>${bar"}</em>
</p>"""
try:
tmpl = MarkupTemplate(xml, filename='test.html')
self.fail('Expected TemplateSyntaxError')
except TemplateSyntaxError, e:
self.assertEqual('test.html', e.filename)
self.assertEqual(2, e.lineno)
def test_expression_syntax_error_multi_line(self):
xml = """<p><em></em>
${bar"}
</p>"""
try:
tmpl = MarkupTemplate(xml, filename='test.html')
self.fail('Expected TemplateSyntaxError')
except TemplateSyntaxError, e:
self.assertEqual('test.html', e.filename)
self.assertEqual(3, e.lineno)
def test_markup_noescape(self):
"""
Verify that outputting context data that is a `Markup` instance is not
escaped.
"""
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
$myvar
</div>""")
self.assertEqual("""<div>
<b>foo</b>
</div>""", str(tmpl.generate(myvar=Markup('<b>foo</b>'))))
def test_text_noescape_quotes(self):
"""
Verify that outputting context data in text nodes doesn't escape
quotes.
"""
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
$myvar
</div>""")
self.assertEqual("""<div>
"foo"
</div>""", str(tmpl.generate(myvar='"foo"')))
def test_attr_escape_quotes(self):
"""
Verify that outputting context data in attribtes escapes quotes.
"""
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
<elem class="$myvar"/>
</div>""")
self.assertEqual("""<div>
<elem class=""foo""/>
</div>""", str(tmpl.generate(myvar='"foo"')))
def test_directive_element(self):
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
<py:if test="myvar">bar</py:if>
</div>""")
self.assertEqual("""<div>
bar
</div>""", str(tmpl.generate(myvar='"foo"')))
def test_normal_comment(self):
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
<!-- foo bar -->
</div>""")
self.assertEqual("""<div>
<!-- foo bar -->
</div>""", str(tmpl.generate()))
def test_template_comment(self):
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
<!-- !foo -->
<!--!bar-->
</div>""")
self.assertEqual("""<div>
</div>""", str(tmpl.generate()))
def test_parse_with_same_namespace_nested(self):
tmpl = MarkupTemplate("""<div xmlns:py="http://genshi.edgewall.org/">
<span xmlns:py="http://genshi.edgewall.org/">
</span>
</div>""")
self.assertEqual("""<div>
<span>
</span>
</div>""", str(tmpl.generate()))
def test_latin1_encoded_with_xmldecl(self):
tmpl = MarkupTemplate(u"""<?xml version="1.0" encoding="iso-8859-1" ?>
<div xmlns:py="http://genshi.edgewall.org/">
\xf6
</div>""".encode('iso-8859-1'), encoding='iso-8859-1')
self.assertEqual(u"""<?xml version="1.0" encoding="iso-8859-1"?>\n<div>
\xf6
</div>""", unicode(tmpl.generate()))
def test_latin1_encoded_explicit_encoding(self):
tmpl = MarkupTemplate(u"""<div xmlns:py="http://genshi.edgewall.org/">
\xf6
</div>""".encode('iso-8859-1'), encoding='iso-8859-1')
self.assertEqual(u"""<div>
\xf6
</div>""", unicode(tmpl.generate()))
def test_exec_with_trailing_space(self):
"""
Verify that a code block processing instruction with trailing space
does not cause a syntax error (see ticket #127).
"""
MarkupTemplate("""<foo>
<?python
bar = 42
?>
</foo>""")
def test_exec_import(self):
tmpl = MarkupTemplate("""<?python from datetime import timedelta ?>
<div xmlns:py="http://genshi.edgewall.org/">
${timedelta(days=2)}
</div>""")
self.assertEqual("""<div>
2 days, 0:00:00
</div>""", str(tmpl.generate()))
def test_exec_def(self):
tmpl = MarkupTemplate("""
<?python
def foo():
return 42
?>
<div xmlns:py="http://genshi.edgewall.org/">
${foo()}
</div>""")
self.assertEqual("""<div>
42
</div>""", str(tmpl.generate()))
def test_namespace_on_removed_elem(self):
"""
Verify that a namespace declaration on an element that is removed from
the generated stream does not get pushed up to the next non-stripped
element (see ticket #107).
"""
tmpl = MarkupTemplate("""<?xml version="1.0"?>
<Test xmlns:py="http://genshi.edgewall.org/">
<Size py:if="0" xmlns:t="test">Size</Size>
<Item/>
</Test>""")
self.assertEqual("""<?xml version="1.0"?>\n<Test>
<Item/>
</Test>""", str(tmpl.generate()))
def test_include_in_loop(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included $idx</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<xi:include href="${name}.html" py:for="idx in range(3)" />
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
<div>Included 0</div><div>Included 1</div><div>Included 2</div>
</html>""", tmpl.generate(name='tmpl1').render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_dynamic_include_href(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<xi:include href="${name}.html" />
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
<div>Included</div>
</html>""", tmpl.generate(name='tmpl1').render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_select_included_elements(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<li>$item</li>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<ul py:match="ul">${select('li')}</ul>
<ul py:with="items=(1, 2, 3)">
<xi:include href="tmpl1.html" py:for="item in items" />
</ul>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
<ul><li>1</li><li>2</li><li>3</li></ul>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_fallback_when_include_found(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl1.html"><xi:fallback>
Missing</xi:fallback></xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
<div>Included</div>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_error_when_include_not_found(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl1.html"/>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname], auto_reload=True)
tmpl = loader.load('tmpl2.html')
self.assertRaises(TemplateNotFound, tmpl.generate().render)
finally:
shutil.rmtree(dirname)
def test_fallback_when_include_not_found(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl1.html"><xi:fallback>
Missing</xi:fallback></xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
Missing
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_fallback_when_auto_reload_true(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl1.html"><xi:fallback>
Missing</xi:fallback></xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname], auto_reload=True)
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
Missing
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_include_in_fallback(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl3.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl2.html">
<xi:fallback>
<xi:include href="tmpl1.html">
<xi:fallback>Missing</xi:fallback>
</xi:include>
</xi:fallback>
</xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl3.html')
self.assertEqual("""<html>
<div>Included</div>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_nested_include_fallback(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file2 = open(os.path.join(dirname, 'tmpl3.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl2.html">
<xi:fallback>
<xi:include href="tmpl1.html">
<xi:fallback>Missing</xi:fallback>
</xi:include>
</xi:fallback>
</xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl3.html')
self.assertEqual("""<html>
Missing
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_nested_include_in_fallback(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file1.write("""<div>Included</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl3.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude">
<xi:include href="tmpl2.html">
<xi:fallback>
<xi:include href="tmpl1.html" />
</xi:fallback>
</xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl3.html')
self.assertEqual("""<html>
<div>Included</div>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_include_fallback_with_directive(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<xi:include href="tmpl1.html"><xi:fallback>
<py:if test="True">tmpl1.html not found</py:if>
</xi:fallback></xi:include>
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
tmpl1.html not found
</html>""", tmpl.generate(debug=True).render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_include_inlined(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<xi:include href="tmpl1.html" />
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname], auto_reload=False)
tmpl = loader.load('tmpl2.html')
# if not inlined the following would be 5
self.assertEqual(7, len(tmpl.stream))
self.assertEqual("""<html>
<div>Included</div>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_include_inlined_in_loop(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<div>Included $idx</div>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/">
<xi:include href="tmpl1.html" py:for="idx in range(3)" />
</html>""")
finally:
file2.close()
loader = TemplateLoader([dirname], auto_reload=False)
tmpl = loader.load('tmpl2.html')
self.assertEqual("""<html>
<div>Included 0</div><div>Included 1</div><div>Included 2</div>
</html>""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_include_inline_recursive(self):
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write(
'<div xmlns:xi="http://www.w3.org/2001/XInclude"'
' xmlns:py="http://genshi.edgewall.org/">'
'$depth'
'<py:with vars="depth = depth + 1">'
'<xi:include href="tmpl1.html"'
' py:if="depth < 3"/>'
'</py:with>'
'</div>'
)
finally:
file1.close()
loader = TemplateLoader([dirname], auto_reload=False)
tmpl = loader.load(os.path.join(dirname, 'tmpl1.html'))
self.assertEqual(
"<div>0<div>1<div>2</div></div></div>",
tmpl.generate(depth=0).render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_allow_exec_false(self):
xml = ("""<?python
title = "A Genshi Template"
?>
<html xmlns:py="http://genshi.edgewall.org/">
<head>
<title py:content="title">This is replaced.</title>
</head>
</html>""")
try:
tmpl = MarkupTemplate(xml, filename='test.html',
allow_exec=False)
self.fail('Expected SyntaxError')
except TemplateSyntaxError, e:
pass
def test_allow_exec_true(self):
xml = ("""<?python
title = "A Genshi Template"
?>
<html xmlns:py="http://genshi.edgewall.org/">
<head>
<title py:content="title">This is replaced.</title>
</head>
</html>""")
tmpl = MarkupTemplate(xml, filename='test.html', allow_exec=True)
def test_exec_in_match(self):
xml = ("""<html xmlns:py="http://genshi.edgewall.org/">
<py:match path="body/p">
<?python title="wakka wakka wakka" ?>
${title}
</py:match>
<body><p>moot text</p></body>
</html>""")
tmpl = MarkupTemplate(xml, filename='test.html', allow_exec=True)
self.assertEqual("""<html>
<body>
wakka wakka wakka
</body>
</html>""", tmpl.generate().render(encoding=None))
def test_with_in_match(self):
xml = ("""<html xmlns:py="http://genshi.edgewall.org/">
<py:match path="body/p">
<h1>${select('text()')}</h1>
${select('.')}
</py:match>
<body><p py:with="foo='bar'">${foo}</p></body>
</html>""")
tmpl = MarkupTemplate(xml, filename='test.html')
self.assertEqual("""<html>
<body>
<h1>bar</h1>
<p>bar</p>
</body>
</html>""", tmpl.generate().render(encoding=None))
def test_nested_include_matches(self):
# See ticket #157
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
file1 = open(os.path.join(dirname, 'tmpl1.html'), 'w')
try:
file1.write("""<html xmlns:py="http://genshi.edgewall.org/" py:strip="">
<div class="target">Some content.</div>
</html>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl2.html'), 'w')
try:
file2.write("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:xi="http://www.w3.org/2001/XInclude">
<body>
<h1>Some full html document that includes file1.html</h1>
<xi:include href="tmpl1.html" />
</body>
</html>""")
finally:
file2.close()
file3 = open(os.path.join(dirname, 'tmpl3.html'), 'w')
try:
file3.write("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:xi="http://www.w3.org/2001/XInclude" py:strip="">
<div py:match="div[@class='target']" py:attrs="select('@*')">
Some added stuff.
${select('*|text()')}
</div>
<xi:include href="tmpl2.html" />
</html>
""")
finally:
file3.close()
loader = TemplateLoader([dirname])
tmpl = loader.load('tmpl3.html')
self.assertEqual("""
<html>
<body>
<h1>Some full html document that includes file1.html</h1>
<div class="target">
Some added stuff.
Some content.
</div>
</body>
</html>
""", tmpl.generate().render(encoding=None))
finally:
shutil.rmtree(dirname)
def test_nested_matches_without_buffering(self):
xml = ("""<html xmlns:py="http://genshi.edgewall.org/">
<py:match path="body" once="true" buffer="false">
<body>
${select('*|text')}
And some other stuff...
</body>
</py:match>
<body>
<span py:match="span">Foo</span>
<span>Bar</span>
</body>
</html>""")
tmpl = MarkupTemplate(xml, filename='test.html')
self.assertEqual("""<html>
<body>
<span>Foo</span>
And some other stuff...
</body>
</html>""", tmpl.generate().render(encoding=None))
def test_match_without_select(self):
# See <http://genshi.edgewall.org/ticket/243>
xml = ("""<html xmlns:py="http://genshi.edgewall.org/">
<py:match path="body" buffer="false">
<body>
This replaces the other text.
</body>
</py:match>
<body>
This gets replaced.
</body>
</html>""")
tmpl = MarkupTemplate(xml, filename='test.html')
self.assertEqual("""<html>
<body>
This replaces the other text.
</body>
</html>""", tmpl.generate().render(encoding=None))
def test_match_tail_handling(self):
# See <http://genshi.edgewall.org/ticket/399>
xml = ("""<rhyme xmlns:py="http://genshi.edgewall.org/">
<py:match path="*[@type]">
${select('.')}
</py:match>
<lines>
<first type="one">fish</first>
<second type="two">fish</second>
<third type="red">fish</third>
<fourth type="blue">fish</fourth>
</lines>
</rhyme>""")
tmpl = MarkupTemplate(xml, filename='test.html')
self.assertEqual("""<rhyme>
<lines>
<first type="one">fish</first>
<second type="two">fish</second>
<third type="red">fish</third>
<fourth type="blue">fish</fourth>
</lines>
</rhyme>""", tmpl.generate().render(encoding=None))
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(MarkupTemplate.__module__))
suite.addTest(unittest.makeSuite(MarkupTemplateTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
{
"content_hash": "d6bc3c410b104818ef86f290375e3cfd",
"timestamp": "",
"source": "github",
"line_count": 780,
"max_line_length": 88,
"avg_line_length": 35.91025641025641,
"alnum_prop": 0.5056051410210639,
"repo_name": "hodgestar/genshi",
"id": "b6084f43ff4bb9f100962c55fd9423d9018537f3",
"size": "28507",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "genshi/template/tests/markup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "18893"
},
{
"name": "HTML",
"bytes": "550"
},
{
"name": "Python",
"bytes": "858169"
},
{
"name": "Shell",
"bytes": "728"
}
],
"symlink_target": ""
}
|
bg0_hard = "#1d2021"
bg0_soft = '#32302f'
bg0_normal = '#282828'
bg0 = bg0_normal
bg1 = "#3c3836"
bg2 = "#504945"
bg3 = "#665c54"
bg4 = "#7c6f64"
fg0 = "#fbf1c7"
fg1 = "#ebdbb2"
fg2 = "#d5c4a1"
fg3 = "#bdae93"
fg4 = "#a89984"
bright_red = "#fb4934"
bright_green = "#b8bb26"
bright_yellow = "#fabd2f"
bright_blue = "#83a598"
bright_purple = "#d3869b"
bright_aqua = "#8ec07c"
bright_gray = "#928374"
bright_orange = "#fe8019"
dark_red = "#cc241d"
dark_green = "#98971a"
dark_yellow = "#d79921"
dark_blue = "#458588"
dark_purple = "#b16286"
dark_aqua = "#689d6a"
dark_gray = "#a89984"
dark_orange = "#d65d0e"
### Completion
# Text color of the completion widget. May be a single color to use for
# all columns or a list of three colors, one for each column.
c.colors.completion.fg = [fg1, bright_aqua, bright_yellow]
# Background color of the completion widget for odd rows.
c.colors.completion.odd.bg = bg0
# Background color of the completion widget for even rows.
c.colors.completion.even.bg = c.colors.completion.odd.bg
# Foreground color of completion widget category headers.
c.colors.completion.category.fg = bright_blue
# Background color of the completion widget category headers.
c.colors.completion.category.bg = bg1
# Top border color of the completion widget category headers.
c.colors.completion.category.border.top = c.colors.completion.category.bg
# Bottom border color of the completion widget category headers.
c.colors.completion.category.border.bottom = c.colors.completion.category.bg
# Foreground color of the selected completion item.
c.colors.completion.item.selected.fg = fg0
# Background color of the selected completion item.
c.colors.completion.item.selected.bg = bg4
# Top border color of the selected completion item.
c.colors.completion.item.selected.border.top = bg2
# Bottom border color of the selected completion item.
c.colors.completion.item.selected.border.bottom = c.colors.completion.item.selected.border.top
# Foreground color of the matched text in the selected completion item.
c.colors.completion.item.selected.match.fg = bright_orange
# Foreground color of the matched text in the completion.
c.colors.completion.match.fg = c.colors.completion.item.selected.match.fg
# Color of the scrollbar handle in the completion view.
c.colors.completion.scrollbar.fg = c.colors.completion.item.selected.fg
# Color of the scrollbar in the completion view.
c.colors.completion.scrollbar.bg = c.colors.completion.category.bg
### Context menu
# Background color of disabled items in the context menu.
c.colors.contextmenu.disabled.bg = bg3
# Foreground color of disabled items in the context menu.
c.colors.contextmenu.disabled.fg = fg3
# Background color of the context menu. If set to null, the Qt default is used.
c.colors.contextmenu.menu.bg = bg0
# Foreground color of the context menu. If set to null, the Qt default is used.
c.colors.contextmenu.menu.fg = fg2
# Background color of the context menu’s selected item. If set to null, the Qt default is used.
c.colors.contextmenu.selected.bg = bg2
#Foreground color of the context menu’s selected item. If set to null, the Qt default is used.
c.colors.contextmenu.selected.fg = c.colors.contextmenu.menu.fg
### Downloads
# Background color for the download bar.
c.colors.downloads.bar.bg = bg0
# Color gradient start for download text.
c.colors.downloads.start.fg = bg0
# Color gradient start for download backgrounds.
c.colors.downloads.start.bg = bright_blue
# Color gradient end for download text.
c.colors.downloads.stop.fg = c.colors.downloads.start.fg
# Color gradient stop for download backgrounds.
c.colors.downloads.stop.bg = bright_aqua
# Foreground color for downloads with errors.
c.colors.downloads.error.fg = bright_red
### Hints
# Font color for hints.
c.colors.hints.fg = bg0
# Background color for hints.
c.colors.hints.bg = 'rgba(250, 191, 47, 200)' # bright_yellow
# Font color for the matched part of hints.
c.colors.hints.match.fg = bg4
### Keyhint widget
# Text color for the keyhint widget.
c.colors.keyhint.fg = fg4
# Highlight color for keys to complete the current keychain.
c.colors.keyhint.suffix.fg = fg0
# Background color of the keyhint widget.
c.colors.keyhint.bg = bg0
### Messages
# Foreground color of an error message.
c.colors.messages.error.fg = bg0
# Background color of an error message.
c.colors.messages.error.bg = bright_red
# Border color of an error message.
c.colors.messages.error.border = c.colors.messages.error.bg
# Foreground color of a warning message.
c.colors.messages.warning.fg = bg0
# Background color of a warning message.
c.colors.messages.warning.bg = bright_purple
# Border color of a warning message.
c.colors.messages.warning.border = c.colors.messages.warning.bg
# Foreground color of an info message.
c.colors.messages.info.fg = fg2
# Background color of an info message.
c.colors.messages.info.bg = bg0
# Border color of an info message.
c.colors.messages.info.border = c.colors.messages.info.bg
### Prompts
# Foreground color for prompts.
c.colors.prompts.fg = fg2
# Border used around UI elements in prompts.
c.colors.prompts.border = f'1px solid {bg1}'
# Background color for prompts.
c.colors.prompts.bg = bg3
# Background color for the selected item in filename prompts.
c.colors.prompts.selected.bg = bg2
### Statusbar
# Foreground color of the statusbar.
c.colors.statusbar.normal.fg = fg2
# Background color of the statusbar.
c.colors.statusbar.normal.bg = bg0
# Foreground color of the statusbar in insert mode.
c.colors.statusbar.insert.fg = bg0
# Background color of the statusbar in insert mode.
c.colors.statusbar.insert.bg = dark_aqua
# Foreground color of the statusbar in passthrough mode.
c.colors.statusbar.passthrough.fg = bg0
# Background color of the statusbar in passthrough mode.
c.colors.statusbar.passthrough.bg = dark_blue
# Foreground color of the statusbar in private browsing mode.
c.colors.statusbar.private.fg = bright_purple
# Background color of the statusbar in private browsing mode.
c.colors.statusbar.private.bg = bg0
# Foreground color of the statusbar in command mode.
c.colors.statusbar.command.fg = fg3
# Background color of the statusbar in command mode.
c.colors.statusbar.command.bg = bg1
# Foreground color of the statusbar in private browsing + command mode.
c.colors.statusbar.command.private.fg = c.colors.statusbar.private.fg
# Background color of the statusbar in private browsing + command mode.
c.colors.statusbar.command.private.bg = c.colors.statusbar.command.bg
# Foreground color of the statusbar in caret mode.
c.colors.statusbar.caret.fg = bg0
# Background color of the statusbar in caret mode.
c.colors.statusbar.caret.bg = dark_purple
# Foreground color of the statusbar in caret mode with a selection.
c.colors.statusbar.caret.selection.fg = c.colors.statusbar.caret.fg
# Background color of the statusbar in caret mode with a selection.
c.colors.statusbar.caret.selection.bg = bright_purple
# Background color of the progress bar.
c.colors.statusbar.progress.bg = bright_blue
# Default foreground color of the URL in the statusbar.
c.colors.statusbar.url.fg = fg4
# Foreground color of the URL in the statusbar on error.
c.colors.statusbar.url.error.fg = dark_red
# Foreground color of the URL in the statusbar for hovered links.
c.colors.statusbar.url.hover.fg = bright_orange
# Foreground color of the URL in the statusbar on successful load
# (http).
c.colors.statusbar.url.success.http.fg = bright_red
# Foreground color of the URL in the statusbar on successful load
# (https).
c.colors.statusbar.url.success.https.fg = fg0
# Foreground color of the URL in the statusbar when there's a warning.
c.colors.statusbar.url.warn.fg = bright_purple
### tabs
# Background color of the tab bar.
c.colors.tabs.bar.bg = bg0
# Color gradient start for the tab indicator.
c.colors.tabs.indicator.start = bright_blue
# Color gradient end for the tab indicator.
c.colors.tabs.indicator.stop = bright_aqua
# Color for the tab indicator on errors.
c.colors.tabs.indicator.error = bright_red
# Foreground color of unselected odd tabs.
c.colors.tabs.odd.fg = fg2
# Background color of unselected odd tabs.
c.colors.tabs.odd.bg = bg2
# Foreground color of unselected even tabs.
c.colors.tabs.even.fg = c.colors.tabs.odd.fg
# Background color of unselected even tabs.
c.colors.tabs.even.bg = bg3
# Foreground color of selected odd tabs.
c.colors.tabs.selected.odd.fg = fg2
# Background color of selected odd tabs.
c.colors.tabs.selected.odd.bg = bg0
# Foreground color of selected even tabs.
c.colors.tabs.selected.even.fg = c.colors.tabs.selected.odd.fg
# Background color of selected even tabs.
c.colors.tabs.selected.even.bg = bg0
# Background color of pinned unselected even tabs.
c.colors.tabs.pinned.even.bg = bright_green
# Foreground color of pinned unselected even tabs.
c.colors.tabs.pinned.even.fg = bg2
# Background color of pinned unselected odd tabs.
c.colors.tabs.pinned.odd.bg = bright_green
# Foreground color of pinned unselected odd tabs.
c.colors.tabs.pinned.odd.fg = c.colors.tabs.pinned.even.fg
# Background color of pinned selected even tabs.
c.colors.tabs.pinned.selected.even.bg = bg0
# Foreground color of pinned selected even tabs.
c.colors.tabs.pinned.selected.even.fg = c.colors.tabs.selected.odd.fg
# Background color of pinned selected odd tabs.
c.colors.tabs.pinned.selected.odd.bg = c.colors.tabs.pinned.selected.even.bg
# Foreground color of pinned selected odd tabs.
c.colors.tabs.pinned.selected.odd.fg = c.colors.tabs.selected.odd.fg
# Background color for webpages if unset (or empty to use the theme's
# color).
# c.colors.webpage.bg = bg4
|
{
"content_hash": "3e4be1e72af5fb7de6c657a2ccacece1",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 95,
"avg_line_length": 29.775384615384617,
"alnum_prop": 0.7628397230546657,
"repo_name": "The-Compiler/dotfiles",
"id": "7c96968c16ed5f9c71cd6e370740521b5f95d707",
"size": "10027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qutebrowser/gruvbox.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "49217"
},
{
"name": "Perl",
"bytes": "27"
},
{
"name": "Python",
"bytes": "20015"
},
{
"name": "Shell",
"bytes": "19362"
},
{
"name": "Vim Script",
"bytes": "636"
}
],
"symlink_target": ""
}
|
__author__ = 'Jachym Cepicky'
from pywps import Process, LiteralInput, ComplexOutput, ComplexInput, Format
from pywps.app.Common import Metadata
from pywps.validator.mode import MODE
from pywps.inout.formats import FORMATS
from pywps.response.status import WPS_STATUS
inpt_vector = ComplexInput(
'vector',
'Vector map',
supported_formats=[Format('application/gml+xml')],
mode=MODE.STRICT
)
inpt_size = LiteralInput('size', 'Buffer size', data_type='float')
out_output = ComplexOutput(
'output',
'HelloWorld Output',
supported_formats=[Format('application/gml+xml')]
)
inputs = [inpt_vector, inpt_size]
outputs = [out_output]
class DemoBuffer(Process):
def __init__(self):
super(DemoBuffer, self).__init__(
_handler,
identifier='demobuffer',
version='1.0.0',
title='Buffer',
abstract='This process demonstrates, how to create any process in PyWPS environment',
metadata=[Metadata('process metadata 1', 'http://example.org/1'),
Metadata('process metadata 2', 'http://example.org/2')],
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
"""Handler method - this method obtains request object and response
object and creates the buffer
"""
from osgeo import ogr
# obtaining input with identifier 'vector' as file name
input_file = request.inputs['vector'][0].file
# obtaining input with identifier 'size' as data directly
size = request.inputs['size'][0].data
# open file the "gdal way"
input_source = ogr.Open(input_file)
input_layer = input_source.GetLayer()
layer_name = input_layer.GetName()
# create output file
driver = ogr.GetDriverByName('GML')
output_source = driver.CreateDataSource(
layer_name,
["XSISCHEMAURI=http://schemas.opengis.net/gml/2.1.2/feature.xsd"])
output_layer = output_source.CreateLayer(layer_name, None, ogr.wkbUnknown)
# get feature count
count = input_layer.GetFeatureCount()
index = 0
# make buffer for each feature
while index < count:
response._update_status(WPS_STATUS.STARTED, 'Buffering feature {}'.format(index), float(index) / count)
# get the geometry
input_feature = input_layer.GetNextFeature()
input_geometry = input_feature.GetGeometryRef()
# make the buffer
buffer_geometry = input_geometry.Buffer(float(size))
# create output feature to the file
output_feature = ogr.Feature(feature_def=output_layer.GetLayerDefn())
output_feature.SetGeometryDirectly(buffer_geometry)
output_layer.CreateFeature(output_feature)
output_feature.Destroy()
index += 1
# set output format
response.outputs['output'].data_format = FORMATS.GML
# set output data as file name
response.outputs['output'].file = layer_name
return response
|
{
"content_hash": "ea78a0edd2a4438063f95a6373a8d3cd",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 111,
"avg_line_length": 30.26732673267327,
"alnum_prop": 0.6584887144259077,
"repo_name": "bird-house/PyWPS",
"id": "96de80a15c7d8bec04f731005b5d746c37d84fa0",
"size": "4379",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docs/demobuffer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "220792"
}
],
"symlink_target": ""
}
|
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
import f5lbaasdriver
import f5_sphinx_theme
VERSION = f5lbaasdriver.__version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.4'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinxjp.themes.basicstrap',
'cloud_sptheme.ext.table_styling',
#'sphinx.ext.autosectionlabel',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'F5 Driver for OpenStack LBaaSv2'
copyright = u'2018 F5 Networks Inc.'
author = u'F5 Networks'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
version = VERSION
release = VERSION
# OpenStack release
openstack_release = "Newton"
#rst_prolog = '''
#'''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build',
'Thumbs.db',
'.DS_Store',
'README.rst']
suppress_warnings = ['image.nonlocal_uri']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'f5_sphinx_theme'
html_theme_path = f5_sphinx_theme.get_html_theme_path()
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
#'site_name': 'F5 OpenStack Docs Home',
'next_prev_link': False
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'F5 Driver for OpenStack LBaaSv2'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['searchbox.html', 'localtoc.html', 'globaltoc.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'f5-openstack-lbaasv2-driver_doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'f5-openstack-lbaasv2-driver.tex',
u'F5 Driver for OpenStack LBaaSv2 Documentation',
u'F5 Networks', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'f5-openstack-lbaasv2-driver',
u'F5 Driver for OpenStack LBaaSv2 Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'f5-openstack-lbaasv2-driver',
u'F5 Driver for OpenStack LBaaSv2 Documentation',
author, 'f5-openstack-lbaasv2-driver', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# intersphinx: refer to other F5 OpenStack documentation sets.
f5_lbaasv2_driver_shim_version = '10.0.0'
f5_lbaasv2_driver_shim_url = 'https://github.com/F5Networks/neutron-lbaas/releases/download/v%s/f5.tgz' % f5_lbaasv2_driver_shim_version
#intersphinx_mapping = {'heat': (
# 'http://f5-openstack-heat.readthedocs.io/en/'+openstack_release.lower(), None),
# 'heatplugins': (
# 'http://f5-openstack-heat-plugins.readthedocs.io/en/'+openstack_release.lower(), None),
# 'lbaasv1': (
# 'http://f5-openstack-lbaasv1.readthedocs.io/en/'+openstack_release.lower()+'/', None),
# 'agent': (
# 'http://f5-openstack-agent.readthedocs.io/en/'+openstack_release.lower()+'/', None),
# 'docs': (
# 'http://f5-openstack-docs.readthedocs.io/en/'+openstack_release.lower()+'/', None),
# }
rst_epilog = '''
.. |openstack| replace:: %(openstack_release)s
.. |community_tempest_lbaasv2_tests| raw:: html
<a href="https://github.com/openstack/neutron-lbaas/tree/stable/%(openstack_release_l)s">tests</a>
.. |f5_lbaasv2_driver_readme| raw:: html
<a href="https://github.com/F5Networks/f5-openstack-lbaasv2-driver/blob/%(openstack_release_l)s/README.rst">README</a>
.. |f5_agent_readme| raw:: html
<a href="https://github.com/F5Networks/f5-openstack-agent/blob/%(openstack_release_l)s/README.rst">README</a>
.. |f5_lbaasv2_driver_pip_url| replace:: git+https:\//github.com/F5Networks/f5-openstack-lbaasv2-driver@v%(version)s
.. |f5_lbaasv2_driver_pip_url_branch| replace:: git+https:\//github.com/F5Networks/f5-openstack-lbaasv2-driver@%(openstack_release_l)s
.. |f5_lbaasv2_driver_deb_url| replace:: \https://github.com/F5Networks/f5-openstack-lbaasv2-driver/releases/download/v%(version)s/python-f5-openstack-lbaasv2-driver_%(version)s-1_1404_all.deb
.. |f5_lbaasv2_driver_rpm_url| replace:: \https://github.com/F5Networks/f5-openstack-lbaasv2-driver/releases/download/v%(version)s/f5-openstack-lbaasv2-driver-%(version)s-1.el7.noarch.rpm
.. |f5_lbaasv2_driver_deb_package| replace:: python-f5-openstack-lbaasv2-driver_%(version)s-1_1404_all.deb
.. |f5_lbaasv2_driver_rpm_package| replace:: f5-openstack-lbaasv2-driver-%(version)s-1.el7.noarch.rpm
.. |f5_lbaasv2_driver_shim_url| replace:: %(f5_lbaasv2_driver_shim_url)s
.. |f5_agent_pip_url| replace:: git+\https://github.com/F5Networks/f5-openstack-agent@v%(version)s
.. |deb-download| raw:: html
<a class="btn btn-info" href="https://github.com/F5Networks/f5-openstack-lbaasv2-driver/releases/download/v%(version)s/python-f5-openstack-lbaasv2-driver_%(version)s-1_1404_all.deb">Debian package</a>
.. |rpm-download| raw:: html
<a class="btn btn-info" href="https://github.com/F5Networks/f5-openstack-lbaasv2-driver/releases/download/v%(version)s/f5-openstack-lbaasv2-driver-%(version)s-1.el7.noarch.rpm">RPM package</a>
.. |agent-long| replace:: F5 Agent for OpenStack Neutron
.. |agent| replace:: :code:`f5-openstack-agent`
.. |driver| replace:: :code:`f5-openstack-lbaasv2-driver`
.. |driver-long| replace:: F5 Driver for OpenStack LBaaSv2
.. |agent-url| raw:: html
<a target="_blank" href="%(base_url)s/products/openstack/agent/%(openstack_release)s">F5 Agent for OpenStack Neutron</a>
.. |driver-short| replace:: F5 driver
.. _OpenStack Neutron: https://docs.openstack.org/neutron/%(openstack_release_l)s/
.. _F5 Agent for OpenStack Neutron: %(base_url)s/products/openstack/agent/%(openstack_release_l)s
.. _user documentation: %(base_url)s/cloud/openstack/latest/lbaas
.. _Neutron LBaaS API: https://wiki.openstack.org/wiki/Neutron/LBaaS/API_2.0
.. _available F5 agent: %(base_url)s/products/openstack/agent/%(openstack_release_l)s
.. _F5 Service Provider Package: %(base_url)s/cloud/openstack/latest/lbaas-prep
.. _Download the latest debian package: |f5_lbaasv2_driver_deb_url|
.. _Download the latest rpm package: |f5_lbaasv2_driver_rpm_url|
.. _Partners: %(base_url)s/cloud/openstack/latest/support/partners.html
.. _Configure and start the F5 Agent: %(base_url)s/products/openstack/agent/%(openstack_release_l)s/#configure-the-f5-agent
.. _Capacity-based Scale out: %(base_url)s/cloud/openstack/v1/lbaas/capacity-based-scaleout.html
.. _Differentiated Service Environments: %(base_url)s/cloud/openstack/v1/lbaas/differentiated-service-environments.html
''' % {
'openstack_release': openstack_release,
'openstack_release_l': openstack_release.lower(),
'f5_lbaasv2_driver_shim_url': f5_lbaasv2_driver_shim_url,
'version': version,
'base_url': 'http://clouddocs.f5.com'
}
# Links to external sites (i.e., outside of clouddocs)
# Use: :issues:`287` would transform to "issue 287" and link to issue #287 in GitHub
extlinks = {'issues': ('https://github.com/F5Networks/f5-openstack-lbaasv2-driver/issues/%s',
'issue ')}
|
{
"content_hash": "6ee9edb6ac0c8c1443f3c4fa71420330",
"timestamp": "",
"source": "github",
"line_count": 378,
"max_line_length": 204,
"avg_line_length": 38.42857142857143,
"alnum_prop": 0.7030152829409335,
"repo_name": "jlongstaf/f5-openstack-lbaasv2-driver",
"id": "41a373f65010b6f8f56d75ecbd3ce880c0894e5a",
"size": "14972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "849"
},
{
"name": "Python",
"bytes": "280150"
},
{
"name": "Shell",
"bytes": "6261"
}
],
"symlink_target": ""
}
|
BOT_NAME = 'tutorial'
SPIDER_MODULES = ['tutorial.spiders']
NEWSPIDER_MODULE = 'tutorial.spiders'
ITEM_PIPELINES = {
# 'tutorial.pipelines.JsonWithEncodingPipeline': 300,
}
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'tutorial (+http://www.yourdomain.com)'
DOWNLOADER_MIDDLEWARES = {
'tutorial.misc.middleware.CustomHttpProxyMiddleware': 400,
'tutorial.misc.middleware.CustomUserAgentMiddleware': 401,
}
LOG_LEVEL = 'INFO'
|
{
"content_hash": "b43519b391160854ab019628ba9bf5a3",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 80,
"avg_line_length": 30.625,
"alnum_prop": 0.7448979591836735,
"repo_name": "openslack/openslack-crawler",
"id": "609038dd5b6a67330cde9d9147867fe6928b4ffb",
"size": "723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/tutorial/tutorial/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "286907"
},
{
"name": "Shell",
"bytes": "72"
},
{
"name": "Thrift",
"bytes": "1132"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import djgeojson.fields
import elecciones.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Circuito',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.CharField(max_length=10)),
('nombre', models.CharField(max_length=100)),
],
options={
'verbose_name': 'Circuito electoral',
'verbose_name_plural': 'Circuitos electorales',
'ordering': ('numero',),
},
),
migrations.CreateModel(
name='Eleccion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('nombre', models.CharField(max_length=50)),
('fecha', models.DateTimeField(blank=True, null=True)),
],
options={
'verbose_name': 'Elección',
'verbose_name_plural': 'Elecciones',
},
),
migrations.CreateModel(
name='LugarVotacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
('direccion', models.CharField(max_length=100)),
('barrio', models.CharField(blank=True, max_length=100)),
('ciudad', models.CharField(blank=True, max_length=100)),
('calidad', models.CharField(blank=True, editable=False, help_text='calidad de la geolocalizacion', max_length=20)),
('electores', models.PositiveIntegerField(blank=True, null=True)),
('geom', djgeojson.fields.PointField(null=True)),
('latitud', models.FloatField(editable=False, null=True)),
('longitud', models.FloatField(editable=False, null=True)),
('circuito', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='elecciones.Circuito')),
],
options={
'verbose_name': 'Lugar de votación',
'verbose_name_plural': 'Lugares de votación',
},
),
migrations.CreateModel(
name='Mesa',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.PositiveIntegerField()),
('es_testigo', models.BooleanField(default=False)),
('url_datos_oficiales', models.URLField(blank=True, help_text='url al telegrama')),
('url_pdf_datos_oficiales', models.URLField(blank=True, help_text='url al pdf del telegrama')),
('foto_acta', models.ImageField(blank=True, null=True, upload_to=elecciones.models.path_foto_acta)),
('electores', models.PositiveIntegerField(blank=True, null=True)),
('eleccion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='elecciones.Eleccion')),
('lugar_votacion', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='mesas', to='elecciones.LugarVotacion', verbose_name='Lugar de votacion')),
],
),
migrations.CreateModel(
name='Opcion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
('nombre_corto', models.CharField(default='', max_length=10)),
('orden', models.PositiveIntegerField(blank=True, help_text='Orden en la boleta', null=True)),
('obligatorio', models.BooleanField(default=False)),
('es_contable', models.BooleanField(default=True)),
('codigo_dne', models.PositiveIntegerField(blank=True, null=True)),
],
options={
'verbose_name': 'Opción',
'verbose_name_plural': 'Opciones',
'ordering': ['orden'],
},
),
migrations.CreateModel(
name='Partido',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('orden', models.PositiveIntegerField(help_text='Orden opcion')),
('numero', models.PositiveIntegerField(blank=True, null=True)),
('nombre', models.CharField(max_length=100)),
('nombre_corto', models.CharField(blank=True, max_length=10)),
('color', models.CharField(blank=True, max_length=30)),
('obligatorio', models.BooleanField(default=False)),
('es_contable', models.BooleanField(default=True)),
('codigo_dne', models.PositiveIntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Seccion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.PositiveIntegerField()),
('nombre', models.CharField(max_length=100)),
],
options={
'verbose_name': 'Sección electoral',
'verbose_name_plural': 'Secciones electorales',
},
),
migrations.AddField(
model_name='opcion',
name='partido',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='elecciones.Partido'),
),
migrations.AddField(
model_name='eleccion',
name='opciones',
field=models.ManyToManyField(to='elecciones.Opcion'),
),
migrations.AddField(
model_name='circuito',
name='seccion',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='elecciones.Seccion'),
),
]
|
{
"content_hash": "a908a12a2d3ce53186ff18ae4cdac721",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 197,
"avg_line_length": 47.71111111111111,
"alnum_prop": 0.5576773792889302,
"repo_name": "democraciaconcodigos/escrutinio-social",
"id": "1dbd333499749707307d213e71764c9b4852ad33",
"size": "6519",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "elecciones/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1284"
},
{
"name": "Dockerfile",
"bytes": "1000"
},
{
"name": "HTML",
"bytes": "34793"
},
{
"name": "JavaScript",
"bytes": "1565"
},
{
"name": "Jupyter Notebook",
"bytes": "4812"
},
{
"name": "Python",
"bytes": "69717"
},
{
"name": "Shell",
"bytes": "915"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
from corehq.apps.app_manager.xpath_validator.config import get_xpath_validator_path
from corehq.apps.app_manager.xpath_validator.exceptions import XpathValidationError
from dimagi.utils.subprocess_manager import subprocess_context
XpathValidationResponse = namedtuple('XpathValidationResponse', ['is_valid', 'message'])
def validate_xpath(xpath, allow_case_hashtags=False):
with subprocess_context() as subprocess:
path = get_xpath_validator_path()
if allow_case_hashtags:
cmd = ['node', path, '--allow-case-hashtags']
else:
cmd = ['node', path]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate(xpath.encode('utf-8'))
exit_code = p.wait()
if exit_code == 0:
return XpathValidationResponse(is_valid=True, message=None)
elif exit_code == 1:
return XpathValidationResponse(is_valid=False, message=stdout)
else:
raise XpathValidationError(
"{path} failed with exit code {exit_code}:\n{stderr}"
.format(path=path, exit_code=exit_code, stderr=stderr))
|
{
"content_hash": "64af587cbc8002485100f4df603fc982",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 88,
"avg_line_length": 46.666666666666664,
"alnum_prop": 0.6595238095238095,
"repo_name": "qedsoftware/commcare-hq",
"id": "df9e766dfee8333852581c910314b3b1ff843ac7",
"size": "1260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/app_manager/xpath_validator/wrapper.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
}
|
'''OpenGL extension EXT.texture_object
This module customises the behaviour of the
OpenGL.raw.GL.EXT.texture_object to provide a more
Python-friendly API
Overview (from the spec)
This extension introduces named texture objects. The only way to name
a texture in GL 1.0 is by defining it as a single display list. Because
display lists cannot be edited, these objects are static. Yet it is
important to be able to change the images and parameters of a texture.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/texture_object.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.EXT.texture_object import *
from OpenGL.raw.GL.EXT.texture_object import _EXTENSION_NAME
def glInitTextureObjectEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
# INPUT glAreTexturesResidentEXT.textures size not checked against n
glAreTexturesResidentEXT=wrapper.wrapper(glAreTexturesResidentEXT).setInputArraySize(
'textures', None
).setOutput(
'residences',size=lambda x:(x,),pnameArg='n',orPassIn=True
)
# INPUT glDeleteTexturesEXT.textures size not checked against n
glDeleteTexturesEXT=wrapper.wrapper(glDeleteTexturesEXT).setInputArraySize(
'textures', None
)
glGenTexturesEXT=wrapper.wrapper(glGenTexturesEXT).setOutput(
'textures',size=lambda x:(x,),pnameArg='n',orPassIn=True
)
# INPUT glPrioritizeTexturesEXT.textures size not checked against n
# INPUT glPrioritizeTexturesEXT.priorities size not checked against n
glPrioritizeTexturesEXT=wrapper.wrapper(glPrioritizeTexturesEXT).setInputArraySize(
'textures', None
).setInputArraySize(
'priorities', None
)
### END AUTOGENERATED SECTION
|
{
"content_hash": "466947b9e3a1a972515fe66ff73d8dfd",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 85,
"avg_line_length": 38.816326530612244,
"alnum_prop": 0.7949526813880127,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "a7ef9e7588bbbd0275ced58bc02f7f2f87309b2f",
"size": "1902",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GL/EXT/texture_object.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from django.test import TestCase
from .models import Article, IndexErrorArticle, Person
class EarliestOrLatestTests(TestCase):
"""Tests for the earliest() and latest() objects methods"""
def tearDown(self):
"""Makes sure Article has a get_latest_by"""
if not Article._meta.get_latest_by:
Article._meta.get_latest_by = 'pub_date'
def test_earliest(self):
# Because no Articles exist yet, earliest() raises ArticleDoesNotExist.
with self.assertRaises(Article.DoesNotExist):
Article.objects.earliest()
a1 = Article.objects.create(
headline="Article 1", pub_date=datetime(2005, 7, 26),
expire_date=datetime(2005, 9, 1)
)
a2 = Article.objects.create(
headline="Article 2", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 7, 28)
)
Article.objects.create(
headline="Article 3", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 8, 27)
)
Article.objects.create(
headline="Article 4", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 7, 30)
)
# Get the earliest Article.
self.assertEqual(Article.objects.earliest(), a1)
# Get the earliest Article that matches certain filters.
self.assertEqual(
Article.objects.filter(pub_date__gt=datetime(2005, 7, 26)).earliest(),
a2
)
# Pass a custom field name to earliest() to change the field that's used
# to determine the earliest object.
self.assertEqual(Article.objects.earliest('expire_date'), a2)
self.assertEqual(Article.objects.filter(
pub_date__gt=datetime(2005, 7, 26)).earliest('expire_date'), a2)
# earliest() overrides any other ordering specified on the query.
# Refs #11283.
self.assertEqual(Article.objects.order_by('id').earliest(), a1)
# Error is raised if the user forgot to add a get_latest_by
# in the Model.Meta
Article.objects.model._meta.get_latest_by = None
with self.assertRaisesMessage(
AssertionError,
"earliest() and latest() require either a field_name parameter or "
"'get_latest_by' in the model"
):
Article.objects.earliest()
def test_latest(self):
# Because no Articles exist yet, latest() raises ArticleDoesNotExist.
with self.assertRaises(Article.DoesNotExist):
Article.objects.latest()
a1 = Article.objects.create(
headline="Article 1", pub_date=datetime(2005, 7, 26),
expire_date=datetime(2005, 9, 1)
)
Article.objects.create(
headline="Article 2", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 7, 28)
)
a3 = Article.objects.create(
headline="Article 3", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 8, 27)
)
a4 = Article.objects.create(
headline="Article 4", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 7, 30)
)
# Get the latest Article.
self.assertEqual(Article.objects.latest(), a4)
# Get the latest Article that matches certain filters.
self.assertEqual(
Article.objects.filter(pub_date__lt=datetime(2005, 7, 27)).latest(),
a1
)
# Pass a custom field name to latest() to change the field that's used
# to determine the latest object.
self.assertEqual(Article.objects.latest('expire_date'), a1)
self.assertEqual(
Article.objects.filter(pub_date__gt=datetime(2005, 7, 26)).latest('expire_date'),
a3,
)
# latest() overrides any other ordering specified on the query (#11283).
self.assertEqual(Article.objects.order_by('id').latest(), a4)
# Error is raised if get_latest_by isn't in Model.Meta.
Article.objects.model._meta.get_latest_by = None
with self.assertRaisesMessage(
AssertionError,
"earliest() and latest() require either a field_name parameter or "
"'get_latest_by' in the model"
):
Article.objects.latest()
def test_latest_manual(self):
# You can still use latest() with a model that doesn't have
# "get_latest_by" set -- just pass in the field name manually.
Person.objects.create(name="Ralph", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(name="Stephanie", birthday=datetime(1960, 2, 3))
msg = (
"earliest() and latest() require either a field_name parameter or "
"'get_latest_by' in the model"
)
with self.assertRaisesMessage(AssertionError, msg):
Person.objects.latest()
self.assertEqual(Person.objects.latest("birthday"), p2)
class TestFirstLast(TestCase):
def test_first(self):
p1 = Person.objects.create(name="Bob", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(name="Alice", birthday=datetime(1961, 2, 3))
self.assertEqual(Person.objects.first(), p1)
self.assertEqual(Person.objects.order_by('name').first(), p2)
self.assertEqual(Person.objects.filter(birthday__lte=datetime(1955, 1, 1)).first(), p1)
self.assertIsNone(Person.objects.filter(birthday__lte=datetime(1940, 1, 1)).first())
def test_last(self):
p1 = Person.objects.create(name="Alice", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(name="Bob", birthday=datetime(1960, 2, 3))
# Note: by default PK ordering.
self.assertEqual(Person.objects.last(), p2)
self.assertEqual(Person.objects.order_by('-name').last(), p1)
self.assertEqual(Person.objects.filter(birthday__lte=datetime(1955, 1, 1)).last(), p1)
self.assertIsNone(Person.objects.filter(birthday__lte=datetime(1940, 1, 1)).last())
def test_index_error_not_suppressed(self):
"""
#23555 -- Unexpected IndexError exceptions in QuerySet iteration
shouldn't be suppressed.
"""
def check():
# We know that we've broken the __iter__ method, so the queryset
# should always raise an exception.
with self.assertRaises(IndexError):
IndexErrorArticle.objects.all()[:10:2]
with self.assertRaises(IndexError):
IndexErrorArticle.objects.all().first()
with self.assertRaises(IndexError):
IndexErrorArticle.objects.all().last()
check()
# And it does not matter if there are any records in the DB.
IndexErrorArticle.objects.create(
headline="Article 1", pub_date=datetime(2005, 7, 26),
expire_date=datetime(2005, 9, 1)
)
check()
|
{
"content_hash": "63b5a7571cefd15717fca77b524584e8",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 95,
"avg_line_length": 40.78362573099415,
"alnum_prop": 0.6098365357040436,
"repo_name": "darjeeling/django",
"id": "eaa318663fb7135dc7a454c5dd4d841583562d09",
"size": "6974",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/get_earliest_or_latest/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "55975"
},
{
"name": "HTML",
"bytes": "219390"
},
{
"name": "JavaScript",
"bytes": "253393"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12053151"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from elasticsearch import Elasticsearch
from pytest import raises
from elasticsearch_dsl import connections, serializer
def test_default_connection_is_returned_by_default():
c = connections.Connections()
con, con2 = object(), object()
c.add_connection("default", con)
c.add_connection("not-default", con2)
assert c.get_connection() is con
def test_get_connection_created_connection_if_needed():
c = connections.Connections()
c.configure(default={"hosts": ["es.com"]}, local={"hosts": ["localhost"]})
default = c.get_connection()
local = c.get_connection("local")
assert isinstance(default, Elasticsearch)
assert isinstance(local, Elasticsearch)
assert [{"host": "es.com"}] == default.transport.hosts
assert [{"host": "localhost"}] == local.transport.hosts
def test_configure_preserves_unchanged_connections():
c = connections.Connections()
c.configure(default={"hosts": ["es.com"]}, local={"hosts": ["localhost"]})
default = c.get_connection()
local = c.get_connection("local")
c.configure(default={"hosts": ["not-es.com"]}, local={"hosts": ["localhost"]})
new_default = c.get_connection()
new_local = c.get_connection("local")
assert new_local is local
assert new_default is not default
def test_remove_connection_removes_both_conn_and_conf():
c = connections.Connections()
c.configure(default={"hosts": ["es.com"]}, local={"hosts": ["localhost"]})
c.add_connection("local2", object())
c.remove_connection("default")
c.get_connection("local2")
c.remove_connection("local2")
with raises(Exception):
c.get_connection("local2")
c.get_connection("default")
def test_create_connection_constructs_client():
c = connections.Connections()
c.create_connection("testing", hosts=["es.com"])
con = c.get_connection("testing")
assert [{"host": "es.com"}] == con.transport.hosts
def test_create_connection_adds_our_serializer():
c = connections.Connections()
c.create_connection("testing", hosts=["es.com"])
assert c.get_connection("testing").transport.serializer is serializer.serializer
|
{
"content_hash": "fe79c7568866d4223b5d6ba7b32297b2",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 84,
"avg_line_length": 29.22972972972973,
"alnum_prop": 0.675913083680074,
"repo_name": "elastic/elasticsearch-dsl-py",
"id": "278760cc38f1be7422ed1601f23349b94a336177",
"size": "2951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_connections.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "423092"
}
],
"symlink_target": ""
}
|
from nose.tools import assert_equals
from os.path import dirname, relpath, join
from diylisp.interpreter import interpret, interpret_file
from diylisp.types import Environment
env = Environment()
path = join(dirname(relpath(__file__)), '..', 'stdlib.diy')
interpret_file(path, env)
"""
Consider these tests as suggestions for what a standard library for
your language could contain. Each test function tests the implementation
of one stdlib function.
Put the implementation in the file `stdlib.diy` at the root directory
of the repository. The first function, `not` is already defined for you.
It's your job to create the rest, or perhaps somthing completely different?
Anything you put in `stdlib.diy` is also available from the REPL, so feel
free to test things out there.
$ ./repl
→ (not #t)
#f
PS: Note that in these tests, `interpret` is used. In addition to parsing
and evaluating, it "unparses" the result, hence strings such as "#t" as the
expected result instead of `True`.
"""
def test_not():
assert_equals("#t", interpret('(not #f)', env))
assert_equals("#f", interpret('(not #t)', env))
def test_or():
assert_equals("#f", interpret('(or #f #f)', env))
assert_equals("#t", interpret('(or #t #f)', env))
assert_equals("#t", interpret('(or #f #t)', env))
assert_equals("#t", interpret('(or #t #t)', env))
def test_and():
assert_equals("#f", interpret('(and #f #f)', env))
assert_equals("#f", interpret('(and #t #f)', env))
assert_equals("#f", interpret('(and #f #t)', env))
assert_equals("#t", interpret('(and #t #t)', env))
def test_xor():
assert_equals("#f", interpret('(xor #f #f)', env))
assert_equals("#t", interpret('(xor #t #f)', env))
assert_equals("#t", interpret('(xor #f #t)', env))
assert_equals("#f", interpret('(xor #t #t)', env))
def test_greater_or_equal():
assert_equals("#f", interpret('(>= 1 2)', env))
assert_equals("#t", interpret('(>= 2 2)', env))
assert_equals("#t", interpret('(>= 2 1)', env))
def test_less_or_equal():
assert_equals("#t", interpret('(<= 1 2)', env))
assert_equals("#t", interpret('(<= 2 2)', env))
assert_equals("#f", interpret('(<= 2 1)', env))
def test_less_than():
assert_equals("#t", interpret('(< 1 2)', env))
assert_equals("#f", interpret('(< 2 2)', env))
assert_equals("#f", interpret('(< 2 1)', env))
def test_sum():
assert_equals("5", interpret("(sum '(1 1 1 1 1))", env))
assert_equals("10", interpret("(sum '(1 2 3 4))", env))
assert_equals("0", interpret("(sum '())", env))
def test_length():
assert_equals("5", interpret("(length '(1 2 3 4 5))", env))
assert_equals("3", interpret("(length '(#t '(1 2 3) 'foo-bar))", env))
assert_equals("0", interpret("(length '())", env))
def test_append():
assert_equals("(1 2 3 4 5)", interpret("(append '(1 2) '(3 4 5))", env))
assert_equals("(#t #f 'maybe)", interpret("(append '(#t) '(#f 'maybe))", env))
assert_equals("()", interpret("(append '() '())", env))
def test_filter():
interpret("""
(define even
(lambda (x)
(eq (mod x 2) 0)))
""", env)
assert_equals("(2 4 6)", interpret("(filter even '(1 2 3 4 5 6))", env))
def test_map():
interpret("""
(define inc
(lambda (x) (+ 1 x)))
""", env)
assert_equals("(2 3 4)", interpret("(map inc '(1 2 3))", env))
def test_reverse():
assert_equals("(4 3 2 1)", interpret("(reverse '(1 2 3 4))", env))
assert_equals("()", interpret("(reverse '())", env))
def test_range():
assert_equals("(1 2 3 4 5)", interpret("(range 1 5)", env))
assert_equals("(1)", interpret("(range 1 1)", env))
assert_equals("()", interpret("(range 2 1)", env))
def test_sort():
assert_equals("(1 2 3 4 5 6 7)",
interpret("(sort '(6 3 7 2 4 1 5))", env))
assert_equals("()", interpret("'()", env))
|
{
"content_hash": "cbecdd5061ed8d72a527dea1748bd472",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 82,
"avg_line_length": 31.158730158730158,
"alnum_prop": 0.5858380030565461,
"repo_name": "kjbekkelund/kim-lisp",
"id": "57695c614d87d20fe0fc7e2b6f431eae00c46380",
"size": "3953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_7_using_the_language.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "42789"
},
{
"name": "Shell",
"bytes": "944"
}
],
"symlink_target": ""
}
|
import sys, os, logging, itertools
from hmm.pssm import seq_to_numpy, numpy_to_seq
import hmm.pssm.logo as L
from biopsy.chow_liu.chow_liu import DependencyAnalyser
from gapped_pssms.parse_gapped_format import parse_models, build_hmm_from_semi_parsed
from cookbook.dicts import DictOf
from cookbook.pylab_utils import pylab_ioff
import pylab as P
import numpy
@pylab_ioff
def conditional_logo(joint):
"""
Produces an image summarising the marginal, conditional and joint distributions of 2 bases.
"""
import pylab as P, numpy as N, hmm.pssm.logo as L
#dpi = 150
#fig = P.figure(figsize=(9,10), dpi=dpi, facecolor='white')
#dpi = 150
fig = P.figure(figsize=(6, 6), facecolor='white')
dpi = fig.dpi
def logo_from_dist(dist):
return L.dist_as_image(dist/dist.sum(), (dpi, dpi))
def place_logo(logo, x, y):
P.figimage(N.asarray(logo, dtype=N.float32) / 255., x*dpi, y*dpi)
# conditionals: logo
for x in xrange(4):
cond = joint[x]
place_logo(logo_from_dist(cond/cond.sum()), x+1, 0)
for y in xrange(4):
cond = joint[:,y]
place_logo(logo_from_dist(cond/cond.sum()), 5, 4-y)
# marginals: logo
x_marg = joint.sum(axis=1)
place_logo(logo_from_dist(x_marg/x_marg.sum()), 2.5, 5)
y_marg = joint.sum(axis=0)
place_logo(logo_from_dist(y_marg/y_marg.sum()), 0, 2.5)
# joint distribution: heat map
Z = N.ones((dpi, dpi))
for x in xrange(4):
for y in xrange(4):
area = joint[x,y]
edge_len = int(area * dpi)
if edge_len:
offset = (dpi-edge_len)/2.
P.figimage(N.ones((edge_len, edge_len)), xo=dpi*(x+1)+offset, yo=dpi*(4-y)+offset)
return fig
#logo = conditional_logo(joint)
#logo.savefig('dependencies/test.png', dpi=logo.dpi)
#raise ''
@pylab_ioff
def conditional_logo_2(joint, X=None, Y=None):
"""
Produces an image summarising the marginal, conditional and joint distributions of 2 bases.
"""
import pylab as P, numpy as N, hmm.pssm.logo as L
from PIL import Image, ImageDraw
#dpi = 150
#fig = P.figure(figsize=(9,10), dpi=dpi, facecolor='white')
#dpi = 150
if None == X:
X = joint.shape[0]
if None == Y:
Y = joint.shape[1]
fig = P.figure(figsize=(X+2, Y+2), facecolor='white')
dpi = fig.dpi
def place_image(logo, x, y):
P.figimage(N.asarray(logo, dtype=N.float32) / 255., x*dpi, y*dpi)
def place_square(area, x, y, colour=N.array([255,255,255])):
edge_len = int((area**.5)*dpi)
if edge_len:
offset = (dpi-edge_len)/2.
array = N.ones((edge_len, edge_len, 3))
array[:,:] = colour
P.figimage(array, xo=dpi*x+offset, yo=dpi*y+offset)
# base labels
bases = ['A', 'C', 'G', 'T', None]
font = L.get_font(L._default_font, font_size=int(dpi/2))
def image_for_base(b):
image = Image.new('RGB', (dpi, dpi), 'white')
draw = ImageDraw.Draw(image)
if b:
textsize = draw.textsize(b, font=font)
draw.text((dpi/2-textsize[0]/2,dpi/2-textsize[1]/2), b, font=font, fill=colour)
else:
draw.rectangle(((dpi/4,dpi/4), (3*dpi/4,3*dpi/4)), outline=colour)
return image
#colour = '#009999'
colour = 'black'
for x, image in enumerate(map(image_for_base, bases[:X])):
place_image(image, x+1, Y+1)
#colour = '#990099'
colour = 'black'
for y, image in enumerate(map(image_for_base, bases[:Y])):
place_image(image, 0, Y-y)
# joint distribution
for x in xrange(X):
for y in xrange(Y):
place_square(joint[x,y], x+1, Y-y, colour=N.array([128,128,128]))
# marginals
x_marg = joint.sum(axis=1)
for x in xrange(X):
place_square(x_marg[x], x+1, 0)
y_marg = joint.sum(axis=0)
for y in xrange(Y):
place_square(y_marg[y], X+1, Y-y)
return fig
#logo = conditional_logo_2(joint, X=5, Y=4)
#logo.savefig('dependencies/test.png', dpi=logo.dpi)
#raise ''
def sequences_from_fasta(fasta):
"""Yields sequences from fasta file."""
import corebio.seq_io.fasta_io
return itertools.imap(
lambda s: s.strip('nN'),
itertools.imap(
str,
corebio.seq_io.fasta_io.iterseq(
open(fasta, 'r'),
corebio.seq.dna_alphabet
)
)
)
def sites_from_states(states, sequence, background_states):
assert len(states) == len(sequence)
site_seq, site_states = [], []
in_site = False
for state, base in zip(states, sequence):
if in_site and state in background_states:
yield site_seq, site_states
site_seq, site_states = [], []
in_site = state not in background_states
if in_site:
site_seq.append(base)
site_states.append(state)
if in_site:
yield site_seq, site_states
def uncomplement_base(base, state, traits):
"""
Takes the base and its state and if it is a reverse complement state returns its complement.
"""
if state in traits.reverse_complements:
return 3-base, traits.reverse_complements[state]
else:
return base, state
def uncomplement_site(site_seq, site_states, traits):
"""
Un-complements a site.
"""
seq, states = [], []
for base, state in zip(site_seq, site_states):
base, state = uncomplement_base(base, state, traits)
seq.append(base)
states.append(state)
return seq, states
def fisher_test(table, B=2000):
import rpy2.robjects.numpy2ri
from rpy2.robjects import r
args = {
'simulate.p.value' : True,
'B' : B
}
return r['fisher.test'](table, **args)
#
# Initialise the logging
#
logging.basicConfig(level=logging.INFO)
file_handler = logging.FileHandler('position-dependencies.log')
file_handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s"))
logging.getLogger('').addHandler(file_handler)
P.rcParams['figure.dpi'] = 150
bases = ['a', 'c', 'g', 't']
p_binding_site = 0.001
min_mutual_info = 0.3
ignore_edges_below = .1
image_size = (200,500)
model_dir = sys.argv[1]
sequence_dir = sys.argv[2]
dependency_dir = os.path.join(model_dir, 'dependencies')
sequence_filename_fmt = '%strimRM.fa'
min_num_sites = 20
def pssms():
for pssm in [
'T99002',
'T99003',
'T99004',
'T99005',
'T99006',
]:
for i in xrange(10):
yield pssm, '%03d' % i
#def pssms():
# yield 'T99006', '009'
#pssms = [
# ('T99002', '000'),
# ('T99003', '000'),
# ('T99004', '000'),
# ('T99005', '000'),
# ('T99006', '000'),
#]
#model_dir = os.path.join('c:\\', 'Johns', 'Writing', 'GappedPssms', 'Single-Gap', 'results-2')
#model_dir = '/home/reid/Analysis/GappedPssms/apr-2009/single-gap'
#sequence_dir = '/home/reid/Data/GappedPssms/apr-2009/'
fisher_p_values = list()
for fragment, pssm in pssms():
sequence_file = os.path.join(sequence_dir, sequence_filename_fmt % fragment)
model_file = os.path.join(model_dir, '%s-%s.pssm' % (fragment, pssm))
logging.info('Loading sequences: %s', sequence_file)
sequences = list(sequences_from_fasta(sequence_file))
numpy_seqs = map(seq_to_numpy, sequences)
logging.info('Loaded %d sequences', len(sequences))
logging.info('Parsing PSSMs: %s', model_file)
pssms = list(parse_models(open(model_file)))
logging.info('Building models')
models = [
build_hmm_from_semi_parsed(parsed, p_binding_site=p_binding_site)
for parsed in pssms
]
def nucleotide_dist():
return numpy.zeros(4) + .25
base_dists = DictOf(nucleotide_dist)
min_site_length = 20
logging.info('Analysing sequences')
for hmm, traits in models:
sites = []
for sequence in numpy_seqs:
# analyse the sequence for its most likely state sequence
LL, states = hmm.viterbi(sequence)
# for each site
for site_seq, site_states in sites_from_states(states, sequence, traits.background_states):
# is it long enough?
if len(site_seq) > min_site_length:
# store uncomplemented version of site
sites.append(uncomplement_site(site_seq, site_states, traits))
logging.info('Found %d sites', len(sites))
if len(sites) < min_num_sites:
logging.info('Not enough sites')
continue
# work out which states are in the sites
states_in_sites = set()
gap_state = None
for site_seq, site_states in sites:
states_in_sites.update(site_states)
# find the gap base, it is the one with an even state index
for state in states_in_sites:
if 0 == state % 2:
assert None == gap_state
gap_state = state
states_in_sites = list(states_in_sites)
states_in_sites.sort()
state_to_feature = dict((s, f) for f, s in enumerate(states_in_sites))
num_features = len(state_to_feature)
logging.info('Gap state=%d; feature index=%d', gap_state, state_to_feature[gap_state])
# transform sites into format suitable for Chow-Liu analysis
sites_as_features = []
for site_seq, site_states in sites:
site_features = [4] * num_features
for base, state in zip(site_seq, site_states):
site_features[state_to_feature[state]] = base
if 4 != base:
base_dists[state_to_feature[state]][base] += 1
sites_as_features.append(site_features)
#print site_seq
#print site_states
#print 'Done sequence'
dependencies = DependencyAnalyser(
sites_as_features,
num_features,
5,
pseudo_count=0.0,
min_mutual_info=min_mutual_info
)
dependencies.remove_edges_below(ignore_edges_below)
dependencies.highlight_tree_edges()
dependency_graph_name = os.path.join(dependency_dir, 'dependencies-%s-%s' % (fragment, pssm))
logging.info('Saving dependency graph to %s', dependency_graph_name)
dependencies.write_graph(dependency_graph_name)
gap_feature = state_to_feature[gap_state]
for (i1, i2), counts in dependencies.counts.iteritems():
if gap_feature in (i1, i2):
if i1 == i2:
continue
elif gap_feature == i1:
counts = counts[:,:4]
elif gap_feature == i2:
counts = counts[:4,:]
counts = counts.take(numpy.where(counts.sum(axis=1) > 0)[0], axis=0)
counts = counts.take(numpy.where(counts.sum(axis=0) > 0)[0], axis=1)
if counts.shape[0] > 1 and counts.shape[1] > 1:
fisher_test_results = fisher_test(counts)
fisher_p_value = fisher_test_results[0][0]
fisher_p_values.append(fisher_p_value)
if fisher_p_value < 1e-4:
logging.info('%s-%s: Fisher test: %2d %2d %e', fragment, pssm, i1, i2, fisher_p_value)
most_dependent_pair = dependencies.strongest_dependency()[0]
#most_dependent_pair = None
gap_feature = state_to_feature[gap_state]
for pair, joint, mi in dependencies.mutual_infos:
if mi > min_mutual_info and (pair == most_dependent_pair or gap_feature in pair):
logging.info('%d-%d: mutual info=%.3f', pair[0], pair[1], mi)
cond_logo = conditional_logo_2(joint, X=4+int(pair[0]==gap_feature), Y=4+int(pair[1]==gap_feature))
image_filename = os.path.join(dependency_dir, 'dependencies-%s-%s-%02dx%02d.png' % (fragment, pssm, pair[1], pair[0]))
cond_logo.savefig(image_filename, dpi=cond_logo.dpi)
|
{
"content_hash": "c0d180bbb0441715435a5861a77b3136",
"timestamp": "",
"source": "github",
"line_count": 361,
"max_line_length": 134,
"avg_line_length": 33.24930747922438,
"alnum_prop": 0.5874364742147796,
"repo_name": "JohnReid/biopsy",
"id": "a18d7ebf4c542dc1f64a3a8d6e9ad182dce7c75e",
"size": "12040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/gapped_pssms/analyse_position_dependencies.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "2639"
},
{
"name": "C",
"bytes": "392541"
},
{
"name": "C++",
"bytes": "3946426"
},
{
"name": "Gnuplot",
"bytes": "42000"
},
{
"name": "Python",
"bytes": "976684"
},
{
"name": "R",
"bytes": "2714"
},
{
"name": "Shell",
"bytes": "3356"
},
{
"name": "TeX",
"bytes": "4212"
}
],
"symlink_target": ""
}
|
import datetime
from argparse import ArgumentParser
from typing import Any, List
import pytz
from django.core.management.base import BaseCommand
from django.db.models import Count
from django.utils.timezone import now as timezone_now
from zerver.models import Message, Realm, Recipient, Stream, \
Subscription, UserActivity, UserMessage, UserProfile, get_realm
MOBILE_CLIENT_LIST = ["Android", "ios"]
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
class Command(BaseCommand):
help = "Generate statistics on realm activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def active_users(self, realm: Realm) -> List[UserProfile]:
# Has been active (on the website, for now) in the last 7 days.
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
return [activity.user_profile for activity in (
UserActivity.objects.filter(user_profile__realm=realm,
user_profile__is_active=True,
last_visit__gt=activity_cutoff,
query="/json/users/me/pointer",
client__name="website"))]
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
def total_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
def human_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
def api_messages(self, realm: Realm, days_ago: int) -> int:
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
def stream_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
recipient__type=Recipient.STREAM).count()
def private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
if not denominator:
fraction = 0.0
else:
fraction = numerator / float(denominator)
print("%.2f%% of" % (fraction * 100,), text)
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
active_users = self.active_users(realm)
num_active = len(active_users)
print("%d active users (%d total)" % (num_active, len(user_profiles)))
streams = Stream.objects.filter(realm=realm).extra(
tables=['zerver_subscription', 'zerver_recipient'],
where=['zerver_subscription.recipient_id = zerver_recipient.id',
'zerver_recipient.type = 2',
'zerver_recipient.type_id = zerver_stream.id',
'zerver_subscription.active = true']).annotate(count=Count("name"))
print("%d streams" % (streams.count(),))
for days_ago in (1, 7, 30):
print("In last %d days, users sent:" % (days_ago,))
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
for quantity in sorted(sender_quantities, reverse=True):
print(quantity, end=' ')
print("")
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
self.report_percentage(num_notifications_enabled, num_active,
"active users have desktop notifications enabled")
num_enter_sends = len([x for x in active_users if x.enter_sends])
self.report_percentage(num_enter_sends, num_active,
"active users have enter-sends")
all_message_count = human_messages.filter(sender__realm=realm).count()
multi_paragraph_message_count = human_messages.filter(
sender__realm=realm, content__contains="\n\n").count()
self.report_percentage(multi_paragraph_message_count, all_message_count,
"all messages are multi-paragraph")
# Starred messages
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
flags=UserMessage.flags.starred).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users have starred %d messages" % (
len(starrers), sum([elt["count"] for elt in starrers])))
active_user_subs = Subscription.objects.filter(
user_profile__in=user_profiles, active=True)
# Streams not in home view
non_home_view = active_user_subs.filter(in_home_view=False).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users have %d streams not in home view" % (
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
# Code block markup
markup_messages = human_messages.filter(
sender__realm=realm, content__contains="~~~").values(
"sender").annotate(count=Count("sender"))
print("%d users have used code block markup on %s messages" % (
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
# Notifications for stream messages
notifications = active_user_subs.filter(desktop_notifications=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users receive desktop notifications for %d streams" % (
len(notifications), sum([elt["count"] for elt in notifications])))
print("")
|
{
"content_hash": "09634bce6d70faa36e721caabeae50ad",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 104,
"avg_line_length": 52.111111111111114,
"alnum_prop": 0.6026589740373761,
"repo_name": "jackrzhang/zulip",
"id": "f41ee54debd6a28f4a176d5978e8e02dd1dc6ffe",
"size": "7973",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "analytics/management/commands/realm_stats.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "428151"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "660198"
},
{
"name": "JavaScript",
"bytes": "2910049"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "90611"
},
{
"name": "Python",
"bytes": "6065880"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "112340"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
}
|
"""
This bot appends some text to all unused images and notifies uploaders.
Parameters:
-limit Specify number of pages to work on with "-limit:n" where
n is the maximum number of articles to work on.
If not used, all pages are used.
-always Don't be asked every time.
This script is a :py:obj:`ConfigParserBot <pywikibot.bot.ConfigParserBot>`.
The following options can be set within a settings file which is scripts.ini
by default::
-nouserwarning Do not warn uploader about orphaned file.
-filetemplate: Use a custom template on unused file pages.
-usertemplate: Use a custom template to warn the uploader.
"""
#
# (C) Pywikibot team, 2007-2022
#
# Distributed under the terms of the MIT license.
#
import re
import pywikibot
from pywikibot import i18n, pagegenerators
from pywikibot.bot import (
AutomaticTWSummaryBot,
ConfigParserBot,
ExistingPageBot,
SingleSiteBot,
)
from pywikibot.exceptions import Error, NoPageError, TranslationError
from pywikibot.flow import Board
template_to_the_image = {
'meta': '{{Orphan file}}',
'test': '{{Orphan file}}',
'ar': '{{صورة يتيمة}}',
'arz': '{{صوره يتيمه}}',
'en': '{{Orphan image}}',
'fa': '{{تصاویر بدون استفاده}}',
'id': '{{Berkas yatim}}',
'it': '{{immagine orfana}}',
'mk': '{{Слика сираче}}',
'te': '{{Orphan image}}',
'ur': '{{غیر مستعمل تصاویر}}',
'uz': '{{Yetim tasvir}}',
'vec': '{{Imaxine orfana}}',
'vi': '{{Hình mồ côi}}',
}
template_to_the_user = {
'test': '{{User:Happy5214/Unused file notice (user)|%(title)s}}',
'ar': '{{subst:تنبيه صورة يتيمة|%(title)s}}',
'arz': '{{subst:تنبيه صوره يتيمه|%(title)s}}',
'fa': '{{subst:اخطار به کاربر برای تصاویر بدون استفاده|%(title)s}}',
'ur': '{{subst:اطلاع برائے غیر مستعمل تصاویر|%(title)s}}',
}
class UnusedFilesBot(SingleSiteBot,
AutomaticTWSummaryBot,
ConfigParserBot,
ExistingPageBot):
"""Unused files bot.
.. versionchanged:: 7.0
UnusedFilesBot is a ConfigParserBot
"""
summary_key = 'unusedfiles-comment'
update_options = {
'nouserwarning': False, # do not warn uploader
'filetemplate': '',
'usertemplate': '',
}
def __init__(self, **kwargs) -> None:
"""Initializer."""
super().__init__(**kwargs)
# handle the custom templates
if not self.opt.filetemplate:
self.opt.filetemplate = i18n.translate(self.site,
template_to_the_image)
elif not re.fullmatch('{{.+}}', self.opt.filetemplate):
self.opt.filetemplate = '{{%s}}' % self.opt.filetemplate
if not self.opt.usertemplate:
self.opt.usertemplate = i18n.translate(self.site,
template_to_the_user)
elif not re.fullmatch('{{.+}}', self.opt.usertemplate):
self.opt.usertemplate = '{{%s}}' % self.opt.usertemplate
if not (self.opt.filetemplate
and (self.opt.usertemplate or self.opt.nouserwarning)):
# if no templates are given
raise TranslationError(
'This script is not localized for {} site;\n'
'try using -filetemplate:<template name>.'.format(self.site))
def treat(self, image) -> None:
"""Process one image page."""
# Use get_file_url() and file_is_shared() to confirm it is local media
# rather than a local page with the same name as shared media.
if (image.get_file_url() and not image.file_is_shared()
and 'http://' not in image.text):
if self.opt.filetemplate in image.text:
pywikibot.output('{} done already'
.format(image.title(as_link=True)))
return
self.append_text(image, '\n\n' + self.opt.filetemplate)
if self.opt.nouserwarning:
return
uploader = image.oldest_file_info.user
user = pywikibot.User(image.site, uploader)
usertalkpage = user.getUserTalkPage()
template2uploader = self.opt.usertemplate \
% {'title': image.title()}
msg2uploader = self.site.expand_text(template2uploader)
if usertalkpage.is_flow_page():
self.post_to_flow_board(usertalkpage, msg2uploader)
else:
self.append_text(usertalkpage, '\n\n' + msg2uploader + ' ~~~~')
def append_text(self, page, apptext):
"""Append apptext to the page."""
if page.isRedirectPage():
page = page.getRedirectTarget()
if page.exists():
text = page.text
else:
if page.isTalkPage():
text = ''
else:
raise NoPageError(page)
text += apptext
self.current_page = page
self.put_current(text)
def post_to_flow_board(self, page, post) -> None:
"""Post message as a Flow topic."""
board = Board(page)
header, rest = post.split('\n', 1)
title = header.strip('=')
content = rest.lstrip()
board.new_topic(title, content)
def main(*args: str) -> None:
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
:param args: command line arguments
"""
options = {}
total = None
local_args = pywikibot.handle_args(args)
for arg in local_args:
arg, sep, value = arg.partition(':')
if arg == '-limit':
total = value
elif arg == '-filetemplate':
options['filetemplate'] = value
elif arg == '-usertemplate':
options['usertemplate'] = value
else:
options[arg[1:]] = True
site = pywikibot.Site()
gen = site.unusedfiles(total=total)
gen = pagegenerators.PreloadingGenerator(gen)
bot = UnusedFilesBot(site=site, generator=gen, **options)
try:
bot.run()
except Error as e:
pywikibot.bot.suggest_help(exception=e)
if __name__ == '__main__':
main()
|
{
"content_hash": "26760e5774f954c0b3ff7e7e662d0c03",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 79,
"avg_line_length": 32.416666666666664,
"alnum_prop": 0.5721401028277635,
"repo_name": "wikimedia/pywikibot-core",
"id": "c61bee54e80f8d1530bf4896fb0b76e2f0eec46e",
"size": "6394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/unusedfiles.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "HTML",
"bytes": "1365"
},
{
"name": "Python",
"bytes": "4504123"
}
],
"symlink_target": ""
}
|
import simple
class Simple2:
def __init__(self):
self.info = "SimpleClass2"
class Simple3(simple.Simple):
def __init__(self):
simple.Simple.__init__(self)
text = "text in simple"
assert simple.text == text
_s=simple.Simple()
_s3=Simple3()
assert _s.info==_s3.info
import recursive_import
_s=recursive_import.myClass()
assert str(_s) == "success!"
import from_import_test.b
assert from_import_test.b.v == 1
import from_import_test.c
assert from_import_test.c.v == 1
# test of keyword "global" in functions of an imported module
import global_in_imported
assert global_in_imported.X == 15
from delegator import Delegator
delegate = Delegator([])
print('passed all tests')
|
{
"content_hash": "f059003acd9483da17eaf7c51de033eb",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 61,
"avg_line_length": 18.5,
"alnum_prop": 0.701280227596017,
"repo_name": "firmlyjin/brython",
"id": "09f1a58fe2fee4ddf9f38c4d8a598992d3bbeb33",
"size": "703",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "www/tests/test_import.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "15902"
},
{
"name": "Groff",
"bytes": "21080"
},
{
"name": "HTML",
"bytes": "4916595"
},
{
"name": "JavaScript",
"bytes": "1144557"
},
{
"name": "Makefile",
"bytes": "61"
},
{
"name": "Python",
"bytes": "13245454"
},
{
"name": "R",
"bytes": "2918"
},
{
"name": "Shell",
"bytes": "58"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
from flask import redirect, render_template, render_template_string, session
from flask import current_app, flash, redirect, render_template, request, url_for
from flask_user import current_user, login_required
from flask_login import login_user, logout_user
from app.pages.date import *
from app.pages.encryption import decode
from flask_user import signals
from flask_user.translations import gettext as _
from app.app_and_db import app, db
from app.users.forms import UserProfileForm
import threading
import stripe
#
# User Profile form
#
def confirm_email(token):
""" Verify email confirmation token and activate the user account."""
# Verify token
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
is_valid, has_expired, object_id = user_manager.verify_token(
token,
user_manager.confirm_email_expiration)
if has_expired:
flash(_('Your confirmation token has expired.'), 'error')
return redirect(url_for('user.login'))
if not is_valid:
flash(_('Invalid confirmation token.'), 'error')
return redirect(url_for('user.login'))
# Confirm email by setting User.active=True and User.confirmed_at=utcnow()
if db_adapter.UserEmailClass:
user_email = user_manager.get_user_email_by_id(object_id)
if user_email:
user_email.confirmed_at = datetime.utcnow()
user = user_email.user
else:
user = None
else:
user_email = None
user = user_manager.get_user_by_id(object_id)
if user:
user.confirmed_at = datetime.utcnow()
if user:
user.set_active(True)
db_adapter.commit()
else: # pragma: no cover
flash(_('Invalid confirmation token.'), 'error')
return redirect(url_for('user.login'))
# Send email_confirmed signal
signals.user_confirmed_email.send(current_app._get_current_object(), user=user)
# Prepare one-time system message
flash(_('Your email has been confirmed.'), 'success')
# Auto-login after confirm or redirect to login page
next = request.args.get('next', _endpoint_url(user_manager.after_confirm_endpoint))
if user_manager.auto_login_after_confirm:
return _do_login_user(user, next) # auto-login
else:
return redirect(url_for('user.login')+'?next='+next) # redirect to login page
@login_required
def change_password():
""" Prompt for old password and new password and change the user's password."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.change_password_form(request.form)
form.next.data = request.args.get('next', _endpoint_url(user_manager.after_change_password_endpoint)) # Place ?next query param in next form field
# Process valid POST
if request.method=='POST' and form.validate():
# Hash password
hashed_password = user_manager.hash_password(form.new_password.data)
# Change password
user_manager.update_password(current_user, hashed_password)
# Send 'password_changed' email
if user_manager.enable_email and user_manager.send_password_changed_email:
emails.send_password_changed_email(current_user)
# Send password_changed signal
signals.user_changed_password.send(current_app._get_current_object(), user=current_user)
# Prepare one-time system message
flash(_('Your password has been changed successfully.'), 'success')
# Redirect to 'next' URL
return redirect(form.next.data)
# Process GET or invalid POST
return render_template(user_manager.change_password_template, form=form)
@login_required
def change_username():
""" Prompt for new username and old password and change the user's username."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.change_username_form(request.form)
form.next.data = request.args.get('next', _endpoint_url(user_manager.after_change_username_endpoint)) # Place ?next query param in next form field
# Process valid POST
if request.method=='POST' and form.validate():
new_username = form.new_username.data
# Change username
user_auth = current_user.user_auth if db_adapter.UserAuthClass and hasattr(current_user, 'user_auth') else current_user
db_adapter.update_object(user_auth, username=new_username)
db_adapter.commit()
# Send 'username_changed' email
if user_manager.enable_email and user_manager.send_username_changed_email:
emails.send_username_changed_email(current_user)
# Send username_changed signal
signals.user_changed_username.send(current_app._get_current_object(), user=current_user)
# Prepare one-time system message
flash(_("Your username has been changed to '%(username)s'.", username=new_username), 'success')
# Redirect to 'next' URL
return redirect(form.next.data)
# Process GET or invalid POST
return render_template(user_manager.change_username_template, form=form)
@login_required
def email_action(id, action):
""" Perform action 'action' on UserEmail object 'id'
"""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Retrieve UserEmail by id
user_email = db_adapter.find_first_object(db_adapter.UserEmailClass, id=id)
# Users may only change their own UserEmails
if not user_email or user_email.user_id != int(current_user.get_id()):
return unauthorized()
if action=='delete':
# Primary UserEmail can not be deleted
if user_email.is_primary:
return unauthorized()
# Delete UserEmail
db_adapter.delete_object(user_email)
db_adapter.commit()
elif action=='make-primary':
# Disable previously primary emails
user_emails = db_adapter.find_all_objects(db_adapter.UserEmailClass, user_id=int(current_user.get_id()))
for ue in user_emails:
if ue.is_primary:
ue.is_primary = False
# Enable current primary email
user_email.is_primary = True
# Commit
db_adapter.commit()
elif action=='confirm':
_send_confirm_email(user_email.user, user_email)
else:
return unauthorized()
return redirect(url_for('user.manage_emails'))
def forgot_password():
"""Prompt for email and send reset password email."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.forgot_password_form(request.form)
# Process valid POST
if request.method=='POST' and form.validate():
email = form.email.data
# Find user by email
user, user_email = user_manager.find_user_by_email(email)
if user:
# Generate reset password link
token = user_manager.generate_token(int(user.get_id()))
reset_password_link = url_for('user.reset_password', token=token, _external=True)
# Send forgot password email
emails.send_forgot_password_email(user, user_email, reset_password_link)
# Store token
if hasattr(user, 'reset_password_token'):
db_adapter.update_object(user, reset_password_token=token)
db_adapter.commit()
# Send forgot_password signal
signals.user_forgot_password.send(current_app._get_current_object(), user=user)
# Prepare one-time system message
flash(_("A reset password email has been sent to '%(email)s'. Open that email and follow the instructions to reset your password.", email=email), 'success')
# Redirect to the login page
return redirect(_endpoint_url(user_manager.after_forgot_password_endpoint))
# Process GET or invalid POST
return render_template(user_manager.forgot_password_template, form=form)
@app.route('/user/profile', methods=['GET', 'POST'])
@login_required
def user_profile_page():
# Initialize form
form = UserProfileForm(request.form, current_user)
# Process valid POST
if request.method=='POST' and form.validate():
# Copy form fields to user_profile fields
form.populate_obj(current_user)
# Save user_profile
db.session.commit()
# Redirect to home page
return redirect(url_for('home_page'))
# Process GET or invalid POST
return render_template('users/user_profile_page.html',form=form)
@app.route('/user/sign-in', methods=['GET', 'POST'])
def login():
""" Prompt for username/email and password and sign the user in."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
next = request.args.get('next', _endpoint_url(user_manager.after_login_endpoint))
reg_next = request.args.get('reg_next', _endpoint_url(user_manager.after_register_endpoint))
# Immediately redirect already logged in users
if current_user.is_authenticated and user_manager.auto_login_at_login:
return redirect(next)
# Initialize form
login_form = user_manager.login_form(request.form) # for login.html
register_form = user_manager.register_form() # for login_or_register.html
if request.method!='POST':
login_form.next.data = register_form.next.data = next
login_form.reg_next.data = register_form.reg_next.data = reg_next
# Process valid POST
if request.method=='POST' and login_form.validate():
# Retrieve User
user = None
user_email = None
if user_manager.enable_username:
# Find user record by username
user = user_manager.find_user_by_username(login_form.username.data)
user_email = None
# Find primary user_email record
if user and db_adapter.UserEmailClass:
user_email = db_adapter.find_first_object(db_adapter.UserEmailClass,
user_id=int(user.get_id()),
is_primary=True,
)
# Find user record by email (with form.username)
if not user and user_manager.enable_email:
user, user_email = user_manager.find_user_by_email(login_form.username.data)
else:
# Find user by email (with form.email)
user, user_email = user_manager.find_user_by_email(login_form.email.data)
if user:
# Log user in
return _do_login_user(user, login_form.next.data, login_form.remember_me.data)
# Process GET or invalid POST
return render_template(user_manager.login_template,
form=login_form,
login_form=login_form,
register_form=register_form)
@app.route('/user/sign-out', methods=['GET', 'POST'])
def logout():
""" Sign the user out."""
user_manager = current_app.user_manager
# Send user_logged_out signal
signals.user_logged_out.send(current_app._get_current_object(), user=current_user)
# Use Flask-Login to sign out user
logout_user()
# Prepare one-time system message
flash(_('You have signed out successfully.'), 'success')
# Redirect to logout_next endpoint or '/'
next = request.args.get('next', _endpoint_url(user_manager.after_logout_endpoint)) # Get 'next' query param
return redirect(next)
def _do_login_user(user, next, remember_me=False):
# User must have been authenticated
if not user: return unauthenticated()
# Check if user account has been disabled
if not user.is_active():
flash(_('Your account has not been activated. Please check your email.'), 'error')
return redirect(url_for('user.login'))
# Check if user has a confirmed email address
user_manager = current_app.user_manager
if user_manager.enable_email and user_manager.enable_confirm_email \
and not current_app.user_manager.enable_login_without_confirm_email \
and not user.has_confirmed_email():
url = url_for('user.resend_confirm_email')
flash(_('Your email address has not yet been confirmed. <a href="%(url)s">Re-send confirmation email</a>.', url=url), 'error')
return redirect(url_for('user.login'))
# Use Flask-Login to sign in user
#print('login_user: remember_me=', remember_me)
login_user(user, remember=remember_me)
# Send user_logged_in signal
signals.user_logged_in.send(current_app._get_current_object(), user=user)
# Prepare one-time system message
#flash(_('You have signed in successfully.'), 'success')
if current_user.user_auth.credentials == 1:
stripe.api_key = decode(current_user.user_auth.api_key)
session['api_key'] = stripe.api_key
# Redirect to 'next' URL
return redirect(next)
else:
flash(_('Please enter your Stripe API key'), 'error')
return redirect(url_for('getstarted'))
def register():
""" Display registration form and create new User."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
next = request.args.get('next', _endpoint_url(user_manager.after_login_endpoint))
reg_next = request.args.get('reg_next', _endpoint_url(user_manager.after_register_endpoint))
# Initialize form
login_form = user_manager.login_form() # for login_or_register.html
register_form = user_manager.register_form(request.form) # for register.html
if request.method!='POST':
login_form.next.data = register_form.next.data = next
login_form.reg_next.data = register_form.reg_next.data = reg_next
# Process valid POST
if request.method=='POST' and register_form.validate():
# Create a User object using Form fields that have a corresponding User field
User = db_adapter.UserClass
user_class_fields = User.__dict__
user_fields = {}
user_auth_fi
# Create a UserEmail object using Form fields that have a corresponding UserEmail field
if db_adapter.UserEmailClass:
UserEmail = db_adapter.UserEmailClass
user_email_class_fields = UserEmail.__dict__
user_email_fields = {}
# Create a UserAuth object using Form fields that have a corresponding UserAuth field
if db_adapter.UserAuthClass:
UserAuth = db_adapter.UserAuthClass
user_auth_class_fields = UserAuth.__dict__
user_auth_fields = {}
# Enable user account
if db_adapter.UserProfileClass:
if hasattr(db_adapter.UserProfileClass, 'active'):
user_auth_fields['active'] = True
elif hasattr(db_adapter.UserProfileClass, 'is_enabled'):
user_auth_fields['is_enabled'] = True
else:
user_auth_fields['is_active'] = True
else:
if hasattr(db_adapter.UserClass, 'active'):
user_fields['active'] = True
elif hasattr(db_adapter.UserClass, 'is_enabled'):
user_fields['is_enabled'] = True
else:
user_fields['is_active'] = True
# For all form fields
for field_name, field_value in register_form.data.items():
# Hash password field
if field_name=='password':
hashed_password = user_manager.hash_password(field_value)
if db_adapter.UserAuthClass:
user_auth_fields['password'] = hashed_password
else:
user_fields['password'] = hashed_password
# Store corresponding Form fields into the User object and/or UserProfile object
else:
if field_name in user_class_fields:
user_fields[field_name] = field_value
if db_adapter.UserEmailClass:
if field_name in user_email_class_fields:
user_email_fields[field_name] = field_value
if db_adapter.UserAuthClass:
if field_name in user_auth_class_fields:
user_auth_fields[field_name] = field_value
# Add User record using named arguments 'user_fields'
user = db_adapter.add_object(User, **user_fields)
if db_adapter.UserProfileClass:
user_profile = user
# Add UserEmail record using named arguments 'user_email_fields'
if db_adapter.UserEmailClass:
user_email = db_adapter.add_object(UserEmail,
user=user,
is_primary=True,
**user_email_fields)
else:
user_email = None
# Add UserAuth record using named arguments 'user_auth_fields'
if db_adapter.UserAuthClass:
user_auth = db_adapter.add_object(UserAuth, **user_auth_fields)
if db_adapter.UserProfileClass:
user = user_auth
else:
user.user_auth = user_auth
db_adapter.commit()
# Send 'registered' email and delete new User object if send fails
if user_manager.send_registered_email:
try:
# Send 'registered' email
_send_registered_email(user, user_email)
except Exception as e:
# delete new User object if send fails
db_adapter.delete_object(user)
db_adapter.commit()
raise e
# Send user_registered signal
signals.user_registered.send(current_app._get_current_object(), user=user)
# Redirect if USER_ENABLE_CONFIRM_EMAIL is set
if user_manager.enable_confirm_email:
next = request.args.get('next', _endpoint_url(user_manager.after_register_endpoint))
return redirect(next)
# Auto-login after register or redirect to login page
next = request.args.get('next', _endpoint_url(user_manager.after_confirm_endpoint))
if user_manager.auto_login_after_register:
return _do_login_user(user, reg_next) # auto-login
else:
return redirect(url_for('user.login')+'?next='+reg_next) # redirect to login page
# Process GET or invalid POST
return render_template(user_manager.register_template,
form=register_form,
login_form=login_form,
register_form=register_form)
def resend_confirm_email():
"""Prompt for email and re-send email conformation email."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.resend_confirm_email_form(request.form)
# Process valid POST
if request.method=='POST' and form.validate():
email = form.email.data
# Find user by email
user, user_email = user_manager.find_user_by_email(email)
if user:
_send_confirm_email(user, user_email)
# Redirect to the login page
return redirect(_endpoint_url(user_manager.after_resend_confirm_email_endpoint))
# Process GET or invalid POST
return render_template(user_manager.resend_confirm_email_template, form=form)
def reset_password(token):
""" Verify the password reset token, Prompt for new password, and set the user's password."""
# Verify token
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
is_valid, has_expired, user_id = user_manager.verify_token(
token,
user_manager.reset_password_expiration)
if has_expired:
flash(_('Your reset password token has expired.'), 'error')
return redirect(url_for('user.login'))
if not is_valid:
flash(_('Your reset password token is invalid.'), 'error')
return redirect(url_for('user.login'))
user = user_manager.get_user_by_id(user_id)
if user:
# Avoid re-using old tokens
if hasattr(user, 'reset_password_token'):
verified = user.reset_password_token == token
else:
verified = True
if not user or not verified:
flash(_('Your reset password token is invalid.'), 'error')
return redirect(_endpoint_url(user_manager.login_endpoint))
# Initialize form
form = user_manager.reset_password_form(request.form)
# Process valid POST
if request.method=='POST' and form.validate():
# Invalidate the token by clearing the stored token
if hasattr(user, 'reset_password_token'):
db_adapter.update_object(user, reset_password_token='')
# Change password
hashed_password = user_manager.hash_password(form.new_password.data)
user_auth = user.user_auth if db_adapter.UserAuthClass and hasattr(user, 'user_auth') else user
db_adapter.update_object(user_auth, password=hashed_password)
db_adapter.commit()
# Send 'password_changed' email
if user_manager.enable_email and user_manager.send_password_changed_email:
emails.send_password_changed_email(user)
# Prepare one-time system message
flash(_("Your password has been reset successfully. Please sign in with your new password"), 'success')
# Auto-login after reset password or redirect to login page
next = request.args.get('next', _endpoint_url(user_manager.after_reset_password_endpoint))
if user_manager.auto_login_after_reset_password:
return _do_login_user(user, next) # auto-login
else:
return redirect(url_for('user.login')+'?next='+next) # redirect to login page
# Process GET or invalid POST
return render_template(user_manager.reset_password_template, form=form)
def unconfirmed():
""" Prepare a Flash message and redirect to USER_UNCONFIRMED_URL"""
# Prepare Flash message
url = request.script_root + request.path
flash(_("You must confirm your email to access '%(url)s'.", url=url), 'error')
# Redirect to USER_UNCONFIRMED_EMAIL_ENDPOINT
user_manager = current_app.user_manager
return redirect(_endpoint_url(user_manager.unconfirmed_email_endpoint))
def unauthenticated():
""" Prepare a Flash message and redirect to USER_UNAUTHENTICATED_ENDPOINT"""
# Prepare Flash message
url = request.url
flash(_("You must be signed in to access '%(url)s'.", url=url), 'error')
# quote the fully qualified url
quoted_url = quote(url)
# Redirect to USER_UNAUTHENTICATED_ENDPOINT
user_manager = current_app.user_manager
return redirect(_endpoint_url(user_manager.unauthenticated_endpoint)+'?next='+ quoted_url)
def unauthorized():
""" Prepare a Flash message and redirect to USER_UNAUTHORIZED_ENDPOINT"""
# Prepare Flash message
url = request.script_root + request.path
flash(_("You do not have permission to access '%(url)s'.", url=url), 'error')
# Redirect to USER_UNAUTHORIZED_ENDPOINT
user_manager = current_app.user_manager
return redirect(_endpoint_url(user_manager.unauthorized_endpoint))
def _endpoint_url(endpoint):
url = '/'
if endpoint:
url = url_for(endpoint)
return url
|
{
"content_hash": "069f9593bbcfd1363e7a7e4dd59a5327",
"timestamp": "",
"source": "github",
"line_count": 603,
"max_line_length": 164,
"avg_line_length": 38.892205638474294,
"alnum_prop": 0.6412672693160498,
"repo_name": "rcharp/Simple",
"id": "27f46b85bd2197e0bdd0fa68b1b013595316a846",
"size": "23553",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/users/views.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "976329"
},
{
"name": "HTML",
"bytes": "2635185"
},
{
"name": "JavaScript",
"bytes": "118180"
},
{
"name": "Python",
"bytes": "87072"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.