hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7251f33ee91ecf48c4a7be3c6944bab432a6275 | 1,011 | py | Python | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | null | null | null | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | 1 | 2021-02-19T17:09:40.000Z | 2021-02-19T17:09:40.000Z | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
app_name = "main_app"
urlpatterns = [
path('', views.home, name="home"),
path('home/', views.home, name="home"),
path('register/', views.register, name="register"),
path('logout/', views.logout_request, name="logout"),
path('login/', views.login_request, name="login"),
path('emergency_contact/', views.emergency_contact, name="emergency_contact"),
path("create_contact/", views.create_contact , name="create_contact"),
path("update_contact/<str:pk>/", views.update_contact, name="update_contact"),
path("delete_contact/<str:pk>/", views.delete_contact, name="delete_contact"),
path("emergency/", views.emergency, name="emergency"),
path("helpline_numbers/", views.helpline_numbers, name="helpline_numbers"),
path("women_laws/", views.women_laws, name="women_laws"),
path('women_rights/', views.women_rights, name='women_rights'),
path("developers/", views.developers, name="developers")
] | 48.142857 | 82 | 0.691395 | from django.urls import path
from . import views
app_name = "main_app"
urlpatterns = [
path('', views.home, name="home"),
path('home/', views.home, name="home"),
path('register/', views.register, name="register"),
path('logout/', views.logout_request, name="logout"),
path('login/', views.login_request, name="login"),
path('emergency_contact/', views.emergency_contact, name="emergency_contact"),
path("create_contact/", views.create_contact , name="create_contact"),
path("update_contact/<str:pk>/", views.update_contact, name="update_contact"),
path("delete_contact/<str:pk>/", views.delete_contact, name="delete_contact"),
path("emergency/", views.emergency, name="emergency"),
path("helpline_numbers/", views.helpline_numbers, name="helpline_numbers"),
path("women_laws/", views.women_laws, name="women_laws"),
path('women_rights/', views.women_rights, name='women_rights'),
path("developers/", views.developers, name="developers")
] | true | true |
f7251f49ee1a7989325fce02781fdfdd20216e2b | 40,049 | py | Python | chain/core/resources.py | ielm/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 23 | 2015-08-14T02:23:51.000Z | 2021-04-16T14:59:59.000Z | chain/core/resources.py | ResEnv/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 27 | 2015-05-26T22:29:57.000Z | 2020-06-05T16:40:51.000Z | chain/core/resources.py | ielm/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 10 | 2015-07-05T07:15:46.000Z | 2020-06-30T18:28:08.000Z | from chain.core.api import Resource, ResourceField, CollectionField, \
MetadataCollectionField
from chain.core.api import full_reverse, render_error
from chain.core.api import CHAIN_CURIES
from chain.core.api import BadRequestException, HTTP_STATUS_BAD_REQUEST
from chain.core.api import register_resource
from chain.core.models import Site, Device, ScalarSensor, \
PresenceSensor, PresenceData, Person, Metadata
from django.conf.urls import include, patterns, url
from django.utils import timezone
from datetime import timedelta, datetime
import calendar
from chain.localsettings import INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT
from chain.influx_client import InfluxClient
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import cache_control
from django.utils.dateparse import parse_datetime
import json
influx_client = InfluxClient(INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT)
class MetadataResource(Resource):
model = Metadata
display_field = 'timestamp'
resource_name = 'metadata'
resource_type = 'metadata'
required_fields = ['key', 'value']
model_fields = ['timestamp', 'key', 'value']
queryset = Metadata.objects
def get_queryset(self):
queryset = self._queryset.filter(**self._filters).order_by('key', '-timestamp').distinct('key')
return queryset[self._offset:self._offset + self._limit]
def get_total_count(self):
try:
return self._total_count
except AttributeError:
pass
qs = self._queryset.filter(**self._filters).order_by('key').distinct('key')
self._total_count = qs.count()
return self._total_count
def serialize_list(self, embed, cache):
if not embed:
return super(MetadataResource, self).serialize_list(embed, cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'self': {'href': href},
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Metadata'
}
},
'totalCount': self.get_total_count()
}
objs = self.get_queryset()
serialized_data['data'] = [{
'key': obj.key,
'value': obj.value}
for obj in objs]
serialized_data = self.add_page_links(serialized_data, href)
return serialized_data
def serialize_single(self, embed=True, cache=None, rels=True, *args, **kwargs):
return super(
MetadataResource,
self).serialize_single(
embed,
cache,
rels,
*args,
**dict(kwargs, edit=False))
@classmethod
@csrf_exempt
def edit_view(cls, request, id):
return render_error(HTTP_STATUS_BAD_REQUEST,
"Metadata are immutable",
request)
class SensorDataResource(Resource):
def __init__(self, *args, **kwargs):
super(SensorDataResource, self).__init__(*args, **kwargs)
def format_time(self, timestamp):
return calendar.timegm(timestamp.timetuple())
def add_page_links(self, data, href, page_start, page_end):
timespan = page_end - page_start
data['_links']['previous'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start - timespan),
timestamp__lt=self.format_time(page_start)),
'title': '%s to %s' % (page_start - timespan, page_start),
}
data['_links']['self'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start),
timestamp__lt=self.format_time(page_end)),
}
data['_links']['next'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_end),
timestamp__lt=self.format_time(page_end + timespan)),
'title': '%s to %s' % (page_end, page_end + timespan),
}
return data
# shoot to return about 500 values per page
def default_timespan(self):
aggtime = self._filters.get('aggtime', None)
if aggtime is None:
return timedelta(hours=6)
elif aggtime == '1h':
return timedelta(hours=500)
elif aggtime == '1d':
return timedelta(days=500)
elif aggtime == '1w':
return timedelta(weeks=500)
else:
raise BadRequestException('Invalid argument for aggtime. Must be 1h, 1d, or 1w')
class ScalarSensorDataResource(SensorDataResource):
display_field = 'timestamp'
resource_name = 'scalar_data'
resource_type = 'scalar_data'
model_fields = ['timestamp', 'value']
schema_type = {'timestamp': ('string', 'date-time'),
'value': ('number', None)}
required_fields = ['value']
def __init__(self, *args, **kwargs):
super(ScalarSensorDataResource, self).__init__(*args, **kwargs)
if self._state == 'data':
# deserialize data
self.sensor_id = self._filters.get('sensor_id')
self.value = self.sanitize_field_value('value', self._data.get('value'))
self.timestamp = self.sanitize_field_value('timestamp', self._data.get('timestamp'))
# add ids up the hierarchy
sensor = ScalarSensor.objects.select_related('device', 'metric').get(id=self.sensor_id)
self.device_id = sensor.device.id
self.metric = sensor.metric
self.site_id = sensor.device.site_id
# treat sensor data like an object
self._state = 'object'
if 'queryset' in kwargs:
# we want to default to the last page, not the first page
pass
def serialize_single(self, embed=True, cache=None, rels=True):
data = {}
for field_name in self.model_fields:
data[field_name] = self.serialize_field(getattr(self, field_name))
return data
@classmethod
def sanitize_field_value(cls, field_name, value):
if field_name == 'value':
return float(value)
if field_name == 'timestamp':
from django.db import models
if value == None:
return timezone.now()
timestamp = parse_datetime(value)
if timezone.is_aware(timestamp):
return timestamp
return timezone.make_aware(timestamp, timezone.get_current_timezone())
# we store the metric as a tag in Influx for convenience of querying
# for clients that are using influx directly. It's not a real field that's
# handled by Chain
def save(self):
response = influx_client.post_data(self.site_id, self.device_id, self.sensor_id,
self.metric, self.value, self.timestamp)
return response
def serialize_list(self, embed, cache):
'''a "list" of SensorData resources is actually represented
as a single resource with a list of data points'''
if not embed:
return super(
ScalarSensorDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
}
},
'dataType': 'float'
}
request_time = timezone.now()
# if the time filters aren't given then use the most recent timespan,
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'value': obj['value'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
def get_cache_key(self):
return self.sensor_id, self.timestamp
def serialize_stream(self):
'''Serialize this resource for a stream'''
data = self.serialize_single(rels=False)
data['_links'] = {
'ch:sensor': {'href': full_reverse(
'scalar_sensors-single', self._request,
args=(self._filters['sensor_id'],))}
}
return data
def get_single_href(self):
return full_reverse(self.resource_name + '-single',
self._request, args=(self.sensor_id,self.timestamp))
def get_tags(self):
if not self.sensor_id:
raise ValueError(
'Tried to called get_tags on a resource without an id')
db_sensor = ScalarSensor.objects.select_related('device').get(
id=self.sensor_id)
return ['sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
@classmethod
def get_field_schema_type(cls, field_name):
if field_name in cls.model_fields:
return cls.schema_type[field_name]
else:
raise NotImplementedError(
"tried to look up field %s but didn't know where" % field_name)
@classmethod
def model_has_field(cls, field_name):
if field_name in cls.model_fields:
return True
return False
class AggregateScalarSensorDataResource(SensorDataResource):
resource_name = 'aggregate_data'
resource_type = 'aggregate_data'
model_fields = ['timestamp', 'max', 'min', 'mean', 'count']
def __init__(self, *args, **kwargs):
super(AggregateScalarSensorDataResource, self).__init__(*args, **kwargs)
def get_list_href(self, embed=False):
href = super(AggregateScalarSensorDataResource, self).get_list_href()
if not embed:
href += '{&aggtime}'
return href
def serialize_list(self, embed, cache):
if not embed:
return super(
AggregateScalarSensorDataResource,
self).serialize_list(
embed,
cache)
if 'aggtime' not in self._filters:
raise BadRequestException(
"Missing aggtime arguement")
href = self.get_list_href(True)
serialized_data = {
'_links': {
'curies': CHAIN_CURIES
},
'dataType': 'float'
}
request_time = timezone.now()
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'max': obj['max'],
'min': obj['min'],
'mean': obj['mean'],
'count': obj['count'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
@classmethod
def urls(cls):
base_name = cls.resource_name
return patterns('',
url(r'^$',
cls.list_view, name=base_name + '-list'))
class ScalarSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'scalar_sensors'
resource_type = 'scalar_sensor'
required_fields = ['metric', 'unit']
model_fields = ['active']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name', 'unit': 'name'}
queryset = ScalarSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(ScalarSensorDataResource,
reverse_name='sensor'),
'ch:aggregateData': CollectionField(AggregateScalarSensorDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
ScalarSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "scalar"
if embed:
data['dataType'] = 'float'
# this is hammering the influx server, we should switch it
# over to doing a single bulk query. For now disabling the
# data to get things up and running
if not kwargs.get('include_data', True):
return data
else:
last_data = influx_client.get_last_sensor_data(self._obj.id)
if last_data:
# column name returned by last() selector is last
data['value'] = last_data[0]['last']
data['updated'] = last_data[0]['time']
return data
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'scalar_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PresenceDataResource(SensorDataResource):
model = PresenceData
display_field = 'timestamp'
resource_name = 'presencedata'
resource_type = 'presencedata'
model_fields = ['timestamp', 'present', 'person', 'sensor']
required_fields = ['person', 'sensor', 'present']
queryset = PresenceData.objects
def __init__(self, *args, **kwargs):
super(PresenceDataResource, self).__init__(*args, **kwargs)
if 'queryset' in kwargs:
# we want to default to the last page, not the first page
pass
def serialize_single(self, embed, cache):
serialized_data = super(
PresenceDataResource,
self).serialize_single(
embed,
cache)
if 'person' in serialized_data:
del serialized_data['person']
if 'sensor' in serialized_data:
del serialized_data['sensor']
if '_links' not in serialized_data:
serialized_data['_links'] = {}
serialized_data['_links'].update(self.get_additional_links())
return serialized_data
def get_additional_links(self):
return {
'person': {
'href': self.get_person_url(
self._obj.person), 'title': "%s, %s" %
(self._obj.person.last_name, self._obj.person.first_name)}, 'sensor': {
'href': self.get_sensor_url(
self._obj.sensor), 'title': "%s->%s" %
(self._obj.sensor.device.name, self._obj.sensor.metric)}}
def serialize_list(self, embed, cache):
'''a "list" of SensorData resources is actually represented
as a single resource with a list of data points'''
if not embed:
return super(
PresenceDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
items = []
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
},
'items': items
},
'dataType': 'presence'
}
request_time = timezone.now()
# if the time filters aren't given then use the most recent timespan,
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = self._queryset.filter(**self._filters).order_by('timestamp')
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
# Make links:
for obj in objs:
presence_data_resource = PresenceDataResource(
obj=obj,
request=self._request)
items.append(
{
'href': presence_data_resource.get_single_href(),
'title': "%s %s %s at time %s" %
(obj.person.last_name,
"at" if obj.present else "left",
obj.sensor.device,
obj.timestamp.isoformat())})
return serialized_data
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def serialize_stream(self):
'''Serialize this resource for a stream'''
data = self.serialize_single(False, None) # (rels=False)
# TODO: Make useful
data['_links'] = {
'href': self.get_single_href(),
#'person':
}
data['_links'].update(self.get_additional_links())
return data
def get_tags(self):
if not self._obj:
raise ValueError(
'Tried to called get_tags on a resource without an object')
db_sensor = PresenceSensor.objects.select_related('device').get(
id=self._obj.sensor_id)
return ['person-%d' % self._obj.person_id,
'sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
def get_filled_schema(self):
schema = super(PresenceDataResource, self).get_filled_schema()
# we need to replace the sensor and/or person links with just
# the URL instead of the full object
props = schema['properties']
if 'person' in props:
person_default = props['person']['default']
props['person']['default'] = self.get_person_url(person_default)
if 'sensor' in props:
sensor_default = props['sensor']['default']
props['sensor']['default'] = self.get_sensor_url(sensor_default)
return schema
class PresenceSensorResource(Resource):
model = PresenceSensor
display_field = 'metric'
resource_name = 'presence_sensors'
resource_type = 'presence_sensor'
required_fields = ['metric']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = PresenceSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(PresenceDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PresenceSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "presence"
data['dataType'] = "presence"
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' not in data:
data['_links'] = {}
data['_links'].update(self.get_additional_links())
return data
def get_additional_links(self):
links = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]), 'title': "%s at %s" %
(last_data[0].person, last_data[0].timestamp.isoformat())}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_presense_data_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'presense_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PersonResource(Resource):
model = Person
display_field = 'last_name'
resource_name = 'people'
resource_type = 'person'
required_fields = ['first_name', 'last_name']
model_fields = ['first_name', 'last_name', 'twitter_handle', 'rfid']
related_fields = {
'ch:presence-data': CollectionField(PresenceDataResource,
reverse_name='person'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Person.objects
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PersonResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' in data:
data['_links'].update(self.get_additional_links())
return data
def get_presence_data(self):
filters = {
'person': self._obj
}
return PresenceData.objects.filter(**filters).order_by('timestamp')[:1]
def get_additional_links(self):
links = {}
last_data = self.get_presence_data()
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]),
'title': "at %s->%s at time %s" %
(last_data[0].sensor.device,
last_data[0].sensor.metric,
last_data[0].timestamp.isoformat())}
if self._obj.picture_url:
links['picture'] = {
'href': self._obj.picture_url,
'title': 'Picture URL (external)'
}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self.get_presence_data()
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_presense_data_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['person-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
'''
Merge two "JSON" style dictionary/list objects
recursively. Designed for merging schemas from
multiple sensor objects.
If two objects are not merge-able, the version from
obj1 is used.
'''
def json_merge(obj1, obj2):
if isinstance(obj1, list):
# Merge array:
set_used = set(obj1)
new_arr = obj1[:]
for el in obj2:
if el not in set_used:
new_arr.append(el)
return new_arr
elif isinstance(obj1, dict):
# Merge object:
new_obj = {}
for key in obj1:
if key in obj2:
new_obj[key] = json_merge(obj1[key], obj2[key])
else:
new_obj[key] = obj1[key]
for key in obj2:
if key not in new_obj:
new_obj[key] = obj2[key]
return new_obj
else:
# Could not merge. Select the version from
# the first object:
return obj1
class MixedSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'sensors'
resource_type = 'sensor'
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = ScalarSensor.objects
available_sensor_types = {
'scalar': {
'model': ScalarSensor,
'resource': ScalarSensorResource
},
# 'presence': {
# 'model': PresenceSensor,
# 'resource': PresenceSensorResource
# }
}
related_fields = {
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device')
}
@classmethod
def get_schema(cls, filters=None):
schema = {
'required': ['sensor-type'],
'type': 'object',
'properties': {
'sensor-type': {
'type': 'string',
'title': 'sensor-type',
'enum': cls.available_sensor_types.keys()
}
},
'title': 'Create Sensor'
}
for sensor_type in cls.available_sensor_types:
sub_schema = cls.available_sensor_types[
sensor_type]['resource'].get_schema(filters)
schema = json_merge(schema, sub_schema)
return schema
@classmethod
def create_list(cls, data, req):
raise Exception("Not yet implemented.")
@classmethod
def create_single(cls, data, req):
if u'sensor-type' not in data:
# raise Exception("'type' property not found")
# For temporary back-compatability, assume it
# is a ScalarSensor:
return ScalarSensorResource.create_single(data, req)
for sensor_type in cls.available_sensor_types:
if data['sensor-type'] == sensor_type:
del data['sensor-type']
return cls.available_sensor_types[sensor_type][
'resource'].create_single(data, req)
# TODO: Return 400 rather than raising an exception
raise Exception("Unrecognized sensor type.")
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def serialize_list(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_list(
embed=embed,
cache=cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def get_links(self):
mapped_model_to_res = self.map_model_to_resource()
sensors = self.query_models()
items = []
for sensor in sensors:
items.append(
{
'href': (
mapped_model_to_res[
type(sensor)](
obj=sensor,
request=self._request)).get_single_href(),
'title': "%s" %
sensor})
return {'items': items}
def map_model_to_resource(self):
mapped = {}
for sensor_type in self.available_sensor_types:
sensor_details = self.available_sensor_types[sensor_type]
mapped[sensor_details['model']] = sensor_details['resource']
return mapped
def query_models(self):
results = []
for sensor_type in self.available_sensor_types:
modelResults = self.available_sensor_types[sensor_type][
'model'].objects.filter(**self._filters)
results.extend(modelResults)
return results
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class DeviceResource(Resource):
model = Device
display_field = 'name'
resource_name = 'devices'
resource_type = 'device'
required_fields = ['name']
model_fields = ['name', 'description', 'building', 'floor', 'room', 'active']
''''ch:sensors': CollectionField(ScalarSensorResource,
reverse_name='device'),
'ch:sensors': CollectionField(PresenceSensorResource,
reverse_name='device'),'''
related_fields = {
'ch:sensors': CollectionField(MixedSensorResource,
reverse_name='device'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Device.objects
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['device-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
class SiteResource(Resource):
model = Site
# TODO _href should be the external URL if present
resource_name = 'sites'
resource_type = 'site'
display_field = 'name'
model_fields = ['name']
required_fields = ['name']
related_fields = {
'ch:devices': CollectionField(DeviceResource, reverse_name='site'),
# 'ch:people': CollectionField(PersonResource, reverse_name='site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Site.objects
def serialize_single(self, embed, cache):
data = super(SiteResource, self).serialize_single(embed, cache)
if embed:
stream = self._obj.raw_zmq_stream
if stream:
data['_links']['rawZMQStream'] = {
'href': stream,
'title': 'Raw ZMQ Stream'}
data['_links']['ch:siteSummary'] = {
'title': 'Summary',
'href': full_reverse('site-summary', self._request,
args=(self._obj.id,))
}
return data
def get_filled_schema(self):
schema = super(SiteResource, self).get_filled_schema()
schema['properties']['rawZMQStream']['default'] = \
self._obj.raw_zmq_stream
return schema
def deserialize(self):
super(SiteResource, self).deserialize()
if 'rawZMQStream' in self._data:
self._obj.raw_zmq_stream = self._data['rawZMQStream']
return self._obj
def update(self, data):
super(SiteResource, self).update(data)
if 'rawZMQStream' in data:
self._obj.raw_zmq_stream = data['rawZMQStream']
self._obj.save()
def get_tags(self):
return ['site-%d' % self._obj.id]
@classmethod
def get_schema(cls, filters=None):
schema = super(SiteResource, cls).get_schema(filters)
schema['properties']['rawZMQStream'] = {
'type': 'string',
'format': 'uri',
'title': 'rawZMQStream'
}
return schema
# cache for 1hr
@classmethod
@cache_control(max_age=3600)
def site_summary_view(cls, request, id):
#filters = request.GET.dict()
devices = Device.objects.filter(site_id=id).select_related(
'sensors',
'sensors__metric',
'sensors__unit'
)
response = {
'_links': {
'self': {'href': full_reverse('site-summary', request,
args=(id,))},
},
'devices': []
}
sensor_data_list = influx_client.get_last_data_from_all_sensors(id)
sensor_data_dict = {}
for data_point in sensor_data_list:
sensor_data_dict[int(data_point['sensor_id'])] = (data_point['last_value'], data_point['time'])
for device in devices:
dev_resource = DeviceResource(obj=device, request=request)
dev_data = dev_resource.serialize(rels=False)
dev_data['href'] = dev_resource.get_single_href()
response['devices'].append(dev_data)
dev_data['sensors'] = []
for sensor in device.sensors.all():
sensor_resource = ScalarSensorResource(
obj=sensor,
request=request)
sensor_data = sensor_resource.serialize(rels=False, include_data=False)
try:
sensor_data['value'] = sensor_data_dict[sensor.id][0]
sensor_data['updated'] = sensor_data_dict[sensor.id][1]
except KeyError:
# looks like we don't have any data for this sensor
pass
sensor_data['href'] = sensor_resource.get_single_href()
dev_data['sensors'].append(sensor_data)
sensor_data['data'] = []
return cls.render_response(response, request)
@classmethod
def urls(cls):
base_patterns = super(SiteResource, cls).urls()
base_patterns.append(
url(r'^(\d+)/summary$', cls.site_summary_view,
name='site-summary'))
return base_patterns
class ApiRootResource(Resource):
def __init__(self, request):
self._request = request
def serialize(self):
data = {
'_links': {
'self': {'href': full_reverse('api-root', self._request)},
'curies': CHAIN_CURIES,
'ch:sites': {
'title': 'Sites',
'href': full_reverse('sites-list', self._request)
}
}
}
return data
@classmethod
def single_view(cls, request):
resource = cls(request=request)
response_data = resource.serialize()
return cls.render_response(response_data, request)
# URL Setup:
urls = patterns(
'',
url(r'^/?$', ApiRootResource.single_view, name='api-root')
)
# add additional URLS to account for the rename of sensor to scalarsensor.
# unfortunately we can't use redirects in case clients are POSTing to outdated
# URLs. If we WERE redirecting, we would use RedirectView.as_view()
#
# put these first so they are overridden by the later ones, particularly when
# doing URL reverse lookup.
urls += patterns('',
url("^sensordata/", include(ScalarSensorDataResource.urls())),
url("^sensor/", include(ScalarSensorResource.urls())),
)
resources = [
MetadataResource,
ScalarSensorDataResource,
AggregateScalarSensorDataResource,
ScalarSensorResource,
# Disable all the person/presence stuff, which isn't being used anymore
# PresenceDataResource,
# PresenceSensorResource,
# PersonResource,
MixedSensorResource,
DeviceResource,
SiteResource]
for resource in resources:
new_url = url("^%s/" % resource.resource_name, include(resource.urls()))
urls += patterns('', new_url)
register_resource(resource)
| 35.254401 | 107 | 0.571375 | from chain.core.api import Resource, ResourceField, CollectionField, \
MetadataCollectionField
from chain.core.api import full_reverse, render_error
from chain.core.api import CHAIN_CURIES
from chain.core.api import BadRequestException, HTTP_STATUS_BAD_REQUEST
from chain.core.api import register_resource
from chain.core.models import Site, Device, ScalarSensor, \
PresenceSensor, PresenceData, Person, Metadata
from django.conf.urls import include, patterns, url
from django.utils import timezone
from datetime import timedelta, datetime
import calendar
from chain.localsettings import INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT
from chain.influx_client import InfluxClient
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import cache_control
from django.utils.dateparse import parse_datetime
import json
influx_client = InfluxClient(INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT)
class MetadataResource(Resource):
model = Metadata
display_field = 'timestamp'
resource_name = 'metadata'
resource_type = 'metadata'
required_fields = ['key', 'value']
model_fields = ['timestamp', 'key', 'value']
queryset = Metadata.objects
def get_queryset(self):
queryset = self._queryset.filter(**self._filters).order_by('key', '-timestamp').distinct('key')
return queryset[self._offset:self._offset + self._limit]
def get_total_count(self):
try:
return self._total_count
except AttributeError:
pass
qs = self._queryset.filter(**self._filters).order_by('key').distinct('key')
self._total_count = qs.count()
return self._total_count
def serialize_list(self, embed, cache):
if not embed:
return super(MetadataResource, self).serialize_list(embed, cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'self': {'href': href},
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Metadata'
}
},
'totalCount': self.get_total_count()
}
objs = self.get_queryset()
serialized_data['data'] = [{
'key': obj.key,
'value': obj.value}
for obj in objs]
serialized_data = self.add_page_links(serialized_data, href)
return serialized_data
def serialize_single(self, embed=True, cache=None, rels=True, *args, **kwargs):
return super(
MetadataResource,
self).serialize_single(
embed,
cache,
rels,
*args,
**dict(kwargs, edit=False))
@classmethod
@csrf_exempt
def edit_view(cls, request, id):
return render_error(HTTP_STATUS_BAD_REQUEST,
"Metadata are immutable",
request)
class SensorDataResource(Resource):
def __init__(self, *args, **kwargs):
super(SensorDataResource, self).__init__(*args, **kwargs)
def format_time(self, timestamp):
return calendar.timegm(timestamp.timetuple())
def add_page_links(self, data, href, page_start, page_end):
timespan = page_end - page_start
data['_links']['previous'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start - timespan),
timestamp__lt=self.format_time(page_start)),
'title': '%s to %s' % (page_start - timespan, page_start),
}
data['_links']['self'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start),
timestamp__lt=self.format_time(page_end)),
}
data['_links']['next'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_end),
timestamp__lt=self.format_time(page_end + timespan)),
'title': '%s to %s' % (page_end, page_end + timespan),
}
return data
def default_timespan(self):
aggtime = self._filters.get('aggtime', None)
if aggtime is None:
return timedelta(hours=6)
elif aggtime == '1h':
return timedelta(hours=500)
elif aggtime == '1d':
return timedelta(days=500)
elif aggtime == '1w':
return timedelta(weeks=500)
else:
raise BadRequestException('Invalid argument for aggtime. Must be 1h, 1d, or 1w')
class ScalarSensorDataResource(SensorDataResource):
display_field = 'timestamp'
resource_name = 'scalar_data'
resource_type = 'scalar_data'
model_fields = ['timestamp', 'value']
schema_type = {'timestamp': ('string', 'date-time'),
'value': ('number', None)}
required_fields = ['value']
def __init__(self, *args, **kwargs):
super(ScalarSensorDataResource, self).__init__(*args, **kwargs)
if self._state == 'data':
self.sensor_id = self._filters.get('sensor_id')
self.value = self.sanitize_field_value('value', self._data.get('value'))
self.timestamp = self.sanitize_field_value('timestamp', self._data.get('timestamp'))
sensor = ScalarSensor.objects.select_related('device', 'metric').get(id=self.sensor_id)
self.device_id = sensor.device.id
self.metric = sensor.metric
self.site_id = sensor.device.site_id
self._state = 'object'
if 'queryset' in kwargs:
pass
def serialize_single(self, embed=True, cache=None, rels=True):
data = {}
for field_name in self.model_fields:
data[field_name] = self.serialize_field(getattr(self, field_name))
return data
@classmethod
def sanitize_field_value(cls, field_name, value):
if field_name == 'value':
return float(value)
if field_name == 'timestamp':
from django.db import models
if value == None:
return timezone.now()
timestamp = parse_datetime(value)
if timezone.is_aware(timestamp):
return timestamp
return timezone.make_aware(timestamp, timezone.get_current_timezone())
def save(self):
response = influx_client.post_data(self.site_id, self.device_id, self.sensor_id,
self.metric, self.value, self.timestamp)
return response
def serialize_list(self, embed, cache):
if not embed:
return super(
ScalarSensorDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
}
},
'dataType': 'float'
}
request_time = timezone.now()
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'value': obj['value'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
def get_cache_key(self):
return self.sensor_id, self.timestamp
def serialize_stream(self):
data = self.serialize_single(rels=False)
data['_links'] = {
'ch:sensor': {'href': full_reverse(
'scalar_sensors-single', self._request,
args=(self._filters['sensor_id'],))}
}
return data
def get_single_href(self):
return full_reverse(self.resource_name + '-single',
self._request, args=(self.sensor_id,self.timestamp))
def get_tags(self):
if not self.sensor_id:
raise ValueError(
'Tried to called get_tags on a resource without an id')
db_sensor = ScalarSensor.objects.select_related('device').get(
id=self.sensor_id)
return ['sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
@classmethod
def get_field_schema_type(cls, field_name):
if field_name in cls.model_fields:
return cls.schema_type[field_name]
else:
raise NotImplementedError(
"tried to look up field %s but didn't know where" % field_name)
@classmethod
def model_has_field(cls, field_name):
if field_name in cls.model_fields:
return True
return False
class AggregateScalarSensorDataResource(SensorDataResource):
resource_name = 'aggregate_data'
resource_type = 'aggregate_data'
model_fields = ['timestamp', 'max', 'min', 'mean', 'count']
def __init__(self, *args, **kwargs):
super(AggregateScalarSensorDataResource, self).__init__(*args, **kwargs)
def get_list_href(self, embed=False):
href = super(AggregateScalarSensorDataResource, self).get_list_href()
if not embed:
href += '{&aggtime}'
return href
def serialize_list(self, embed, cache):
if not embed:
return super(
AggregateScalarSensorDataResource,
self).serialize_list(
embed,
cache)
if 'aggtime' not in self._filters:
raise BadRequestException(
"Missing aggtime arguement")
href = self.get_list_href(True)
serialized_data = {
'_links': {
'curies': CHAIN_CURIES
},
'dataType': 'float'
}
request_time = timezone.now()
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'max': obj['max'],
'min': obj['min'],
'mean': obj['mean'],
'count': obj['count'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
@classmethod
def urls(cls):
base_name = cls.resource_name
return patterns('',
url(r'^$',
cls.list_view, name=base_name + '-list'))
class ScalarSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'scalar_sensors'
resource_type = 'scalar_sensor'
required_fields = ['metric', 'unit']
model_fields = ['active']
stub_fields = {'metric': 'name', 'unit': 'name'}
queryset = ScalarSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(ScalarSensorDataResource,
reverse_name='sensor'),
'ch:aggregateData': CollectionField(AggregateScalarSensorDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
ScalarSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "scalar"
if embed:
data['dataType'] = 'float'
if not kwargs.get('include_data', True):
return data
else:
last_data = influx_client.get_last_sensor_data(self._obj.id)
if last_data:
data['value'] = last_data[0]['last']
data['updated'] = last_data[0]['time']
return data
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'scalar_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PresenceDataResource(SensorDataResource):
model = PresenceData
display_field = 'timestamp'
resource_name = 'presencedata'
resource_type = 'presencedata'
model_fields = ['timestamp', 'present', 'person', 'sensor']
required_fields = ['person', 'sensor', 'present']
queryset = PresenceData.objects
def __init__(self, *args, **kwargs):
super(PresenceDataResource, self).__init__(*args, **kwargs)
if 'queryset' in kwargs:
pass
def serialize_single(self, embed, cache):
serialized_data = super(
PresenceDataResource,
self).serialize_single(
embed,
cache)
if 'person' in serialized_data:
del serialized_data['person']
if 'sensor' in serialized_data:
del serialized_data['sensor']
if '_links' not in serialized_data:
serialized_data['_links'] = {}
serialized_data['_links'].update(self.get_additional_links())
return serialized_data
def get_additional_links(self):
return {
'person': {
'href': self.get_person_url(
self._obj.person), 'title': "%s, %s" %
(self._obj.person.last_name, self._obj.person.first_name)}, 'sensor': {
'href': self.get_sensor_url(
self._obj.sensor), 'title': "%s->%s" %
(self._obj.sensor.device.name, self._obj.sensor.metric)}}
def serialize_list(self, embed, cache):
if not embed:
return super(
PresenceDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
items = []
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
},
'items': items
},
'dataType': 'presence'
}
request_time = timezone.now()
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = self._queryset.filter(**self._filters).order_by('timestamp')
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
# Make links:
for obj in objs:
presence_data_resource = PresenceDataResource(
obj=obj,
request=self._request)
items.append(
{
'href': presence_data_resource.get_single_href(),
'title': "%s %s %s at time %s" %
(obj.person.last_name,
"at" if obj.present else "left",
obj.sensor.device,
obj.timestamp.isoformat())})
return serialized_data
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def serialize_stream(self):
data = self.serialize_single(False, None) # (rels=False)
# TODO: Make useful
data['_links'] = {
'href': self.get_single_href(),
#'person':
}
data['_links'].update(self.get_additional_links())
return data
def get_tags(self):
if not self._obj:
raise ValueError(
'Tried to called get_tags on a resource without an object')
db_sensor = PresenceSensor.objects.select_related('device').get(
id=self._obj.sensor_id)
return ['person-%d' % self._obj.person_id,
'sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
def get_filled_schema(self):
schema = super(PresenceDataResource, self).get_filled_schema()
# we need to replace the sensor and/or person links with just
# the URL instead of the full object
props = schema['properties']
if 'person' in props:
person_default = props['person']['default']
props['person']['default'] = self.get_person_url(person_default)
if 'sensor' in props:
sensor_default = props['sensor']['default']
props['sensor']['default'] = self.get_sensor_url(sensor_default)
return schema
class PresenceSensorResource(Resource):
model = PresenceSensor
display_field = 'metric'
resource_name = 'presence_sensors'
resource_type = 'presence_sensor'
required_fields = ['metric']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = PresenceSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(PresenceDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PresenceSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "presence"
data['dataType'] = "presence"
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' not in data:
data['_links'] = {}
data['_links'].update(self.get_additional_links())
return data
def get_additional_links(self):
links = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]), 'title': "%s at %s" %
(last_data[0].person, last_data[0].timestamp.isoformat())}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_presense_data_url(self, obj):
if self._request is None:
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'presense_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PersonResource(Resource):
model = Person
display_field = 'last_name'
resource_name = 'people'
resource_type = 'person'
required_fields = ['first_name', 'last_name']
model_fields = ['first_name', 'last_name', 'twitter_handle', 'rfid']
related_fields = {
'ch:presence-data': CollectionField(PresenceDataResource,
reverse_name='person'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Person.objects
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PersonResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' in data:
data['_links'].update(self.get_additional_links())
return data
def get_presence_data(self):
filters = {
'person': self._obj
}
return PresenceData.objects.filter(**filters).order_by('timestamp')[:1]
def get_additional_links(self):
links = {}
last_data = self.get_presence_data()
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]),
'title': "at %s->%s at time %s" %
(last_data[0].sensor.device,
last_data[0].sensor.metric,
last_data[0].timestamp.isoformat())}
if self._obj.picture_url:
links['picture'] = {
'href': self._obj.picture_url,
'title': 'Picture URL (external)'
}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self.get_presence_data()
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_presense_data_url(self, obj):
if self._request is None:
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['person-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
def json_merge(obj1, obj2):
if isinstance(obj1, list):
# Merge array:
set_used = set(obj1)
new_arr = obj1[:]
for el in obj2:
if el not in set_used:
new_arr.append(el)
return new_arr
elif isinstance(obj1, dict):
# Merge object:
new_obj = {}
for key in obj1:
if key in obj2:
new_obj[key] = json_merge(obj1[key], obj2[key])
else:
new_obj[key] = obj1[key]
for key in obj2:
if key not in new_obj:
new_obj[key] = obj2[key]
return new_obj
else:
# Could not merge. Select the version from
# the first object:
return obj1
class MixedSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'sensors'
resource_type = 'sensor'
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = ScalarSensor.objects
available_sensor_types = {
'scalar': {
'model': ScalarSensor,
'resource': ScalarSensorResource
},
# 'presence': {
# 'model': PresenceSensor,
# 'resource': PresenceSensorResource
# }
}
related_fields = {
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device')
}
@classmethod
def get_schema(cls, filters=None):
schema = {
'required': ['sensor-type'],
'type': 'object',
'properties': {
'sensor-type': {
'type': 'string',
'title': 'sensor-type',
'enum': cls.available_sensor_types.keys()
}
},
'title': 'Create Sensor'
}
for sensor_type in cls.available_sensor_types:
sub_schema = cls.available_sensor_types[
sensor_type]['resource'].get_schema(filters)
schema = json_merge(schema, sub_schema)
return schema
@classmethod
def create_list(cls, data, req):
raise Exception("Not yet implemented.")
@classmethod
def create_single(cls, data, req):
if u'sensor-type' not in data:
# raise Exception("'type' property not found")
# For temporary back-compatability, assume it
# is a ScalarSensor:
return ScalarSensorResource.create_single(data, req)
for sensor_type in cls.available_sensor_types:
if data['sensor-type'] == sensor_type:
del data['sensor-type']
return cls.available_sensor_types[sensor_type][
'resource'].create_single(data, req)
# TODO: Return 400 rather than raising an exception
raise Exception("Unrecognized sensor type.")
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def serialize_list(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_list(
embed=embed,
cache=cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def get_links(self):
mapped_model_to_res = self.map_model_to_resource()
sensors = self.query_models()
items = []
for sensor in sensors:
items.append(
{
'href': (
mapped_model_to_res[
type(sensor)](
obj=sensor,
request=self._request)).get_single_href(),
'title': "%s" %
sensor})
return {'items': items}
def map_model_to_resource(self):
mapped = {}
for sensor_type in self.available_sensor_types:
sensor_details = self.available_sensor_types[sensor_type]
mapped[sensor_details['model']] = sensor_details['resource']
return mapped
def query_models(self):
results = []
for sensor_type in self.available_sensor_types:
modelResults = self.available_sensor_types[sensor_type][
'model'].objects.filter(**self._filters)
results.extend(modelResults)
return results
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class DeviceResource(Resource):
model = Device
display_field = 'name'
resource_name = 'devices'
resource_type = 'device'
required_fields = ['name']
model_fields = ['name', 'description', 'building', 'floor', 'room', 'active']
related_fields = {
'ch:sensors': CollectionField(MixedSensorResource,
reverse_name='device'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Device.objects
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['device-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
class SiteResource(Resource):
model = Site
# TODO _href should be the external URL if present
resource_name = 'sites'
resource_type = 'site'
display_field = 'name'
model_fields = ['name']
required_fields = ['name']
related_fields = {
'ch:devices': CollectionField(DeviceResource, reverse_name='site'),
# 'ch:people': CollectionField(PersonResource, reverse_name='site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Site.objects
def serialize_single(self, embed, cache):
data = super(SiteResource, self).serialize_single(embed, cache)
if embed:
stream = self._obj.raw_zmq_stream
if stream:
data['_links']['rawZMQStream'] = {
'href': stream,
'title': 'Raw ZMQ Stream'}
data['_links']['ch:siteSummary'] = {
'title': 'Summary',
'href': full_reverse('site-summary', self._request,
args=(self._obj.id,))
}
return data
def get_filled_schema(self):
schema = super(SiteResource, self).get_filled_schema()
schema['properties']['rawZMQStream']['default'] = \
self._obj.raw_zmq_stream
return schema
def deserialize(self):
super(SiteResource, self).deserialize()
if 'rawZMQStream' in self._data:
self._obj.raw_zmq_stream = self._data['rawZMQStream']
return self._obj
def update(self, data):
super(SiteResource, self).update(data)
if 'rawZMQStream' in data:
self._obj.raw_zmq_stream = data['rawZMQStream']
self._obj.save()
def get_tags(self):
return ['site-%d' % self._obj.id]
@classmethod
def get_schema(cls, filters=None):
schema = super(SiteResource, cls).get_schema(filters)
schema['properties']['rawZMQStream'] = {
'type': 'string',
'format': 'uri',
'title': 'rawZMQStream'
}
return schema
# cache for 1hr
@classmethod
@cache_control(max_age=3600)
def site_summary_view(cls, request, id):
#filters = request.GET.dict()
devices = Device.objects.filter(site_id=id).select_related(
'sensors',
'sensors__metric',
'sensors__unit'
)
response = {
'_links': {
'self': {'href': full_reverse('site-summary', request,
args=(id,))},
},
'devices': []
}
sensor_data_list = influx_client.get_last_data_from_all_sensors(id)
sensor_data_dict = {}
for data_point in sensor_data_list:
sensor_data_dict[int(data_point['sensor_id'])] = (data_point['last_value'], data_point['time'])
for device in devices:
dev_resource = DeviceResource(obj=device, request=request)
dev_data = dev_resource.serialize(rels=False)
dev_data['href'] = dev_resource.get_single_href()
response['devices'].append(dev_data)
dev_data['sensors'] = []
for sensor in device.sensors.all():
sensor_resource = ScalarSensorResource(
obj=sensor,
request=request)
sensor_data = sensor_resource.serialize(rels=False, include_data=False)
try:
sensor_data['value'] = sensor_data_dict[sensor.id][0]
sensor_data['updated'] = sensor_data_dict[sensor.id][1]
except KeyError:
# looks like we don't have any data for this sensor
pass
sensor_data['href'] = sensor_resource.get_single_href()
dev_data['sensors'].append(sensor_data)
sensor_data['data'] = []
return cls.render_response(response, request)
@classmethod
def urls(cls):
base_patterns = super(SiteResource, cls).urls()
base_patterns.append(
url(r'^(\d+)/summary$', cls.site_summary_view,
name='site-summary'))
return base_patterns
class ApiRootResource(Resource):
def __init__(self, request):
self._request = request
def serialize(self):
data = {
'_links': {
'self': {'href': full_reverse('api-root', self._request)},
'curies': CHAIN_CURIES,
'ch:sites': {
'title': 'Sites',
'href': full_reverse('sites-list', self._request)
}
}
}
return data
@classmethod
def single_view(cls, request):
resource = cls(request=request)
response_data = resource.serialize()
return cls.render_response(response_data, request)
urls = patterns(
'',
url(r'^/?$', ApiRootResource.single_view, name='api-root')
)
# URLs. If we WERE redirecting, we would use RedirectView.as_view()
#
# put these first so they are overridden by the later ones, particularly when
# doing URL reverse lookup.
urls += patterns('',
url("^sensordata/", include(ScalarSensorDataResource.urls())),
url("^sensor/", include(ScalarSensorResource.urls())),
)
resources = [
MetadataResource,
ScalarSensorDataResource,
AggregateScalarSensorDataResource,
ScalarSensorResource,
# Disable all the person/presence stuff, which isn't being used anymore
MixedSensorResource,
DeviceResource,
SiteResource]
for resource in resources:
new_url = url("^%s/" % resource.resource_name, include(resource.urls()))
urls += patterns('', new_url)
register_resource(resource)
| true | true |
f7251f653992e182b50932f276e0b927de32b712 | 8,687 | py | Python | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 4,357 | 2020-01-15T23:42:35.000Z | 2022-03-31T08:11:48.000Z | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 386 | 2020-01-16T02:06:37.000Z | 2022-03-30T07:59:47.000Z | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 568 | 2020-01-16T01:08:23.000Z | 2022-03-30T09:18:48.000Z | ######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
import torch
from torch.autograd import Variable
def softBinning(D, mid, Delta):
y = 1 - torch.abs(D - mid) / Delta
return torch.max(torch.tensor([0], dtype=D.dtype).to(D.device), y)
def dSoftBinning(D, mid, Delta):
side1 = (D > (mid - Delta)).type(D.dtype)
side2 = (D <= mid).type(D.dtype)
ind1 = side1 * side2 # .type(torch.uint8)
side1 = (D > mid).type(D.dtype)
side2 = (D <= (mid + Delta)).type(D.dtype)
ind2 = side1 * side2 # .type(torch.uint8)
return (ind1 - ind2) / Delta
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAP(torch.autograd.Function):
"""
FastAP - autograd function definition
This class implements the FastAP loss from the following paper:
"Deep Metric Learning to Rank",
F. Cakir, K. He, X. Xia, B. Kulis, S. Sclaroff. CVPR 2019
NOTE:
Given a input batch, FastAP does not sample triplets from it as it's not
a triplet-based method. Therefore, FastAP does not take a Sampler as input.
Rather, we specify how the input batch is selected.
"""
@staticmethod
def forward(ctx, input, target, num_bins):
"""
Args:
input: torch.Tensor(N x embed_dim), embedding matrix
target: torch.Tensor(N x 1), class labels
num_bins: int, number of bins in distance histogram
"""
N = target.size()[0]
assert input.size()[0] == N, "Batch size donesn't match!"
# 1. get affinity matrix
Y = target.unsqueeze(1)
Aff = 2 * (Y == Y.t()).type(input.dtype) - 1
Aff.masked_fill_(
torch.eye(N, N).bool().to(input.device), 0
) # set diagonal to 0
I_pos = (Aff > 0).type(input.dtype).to(input.device)
I_neg = (Aff < 0).type(input.dtype).to(input.device)
N_pos = torch.sum(I_pos, 1)
# 2. compute distances from embeddings
# squared Euclidean distance with range [0,4]
dist2 = 2 - 2 * torch.mm(input, input.t())
# 3. estimate discrete histograms
Delta = torch.tensor(4.0 / num_bins).to(input.device)
Z = torch.linspace(0.0, 4.0, steps=num_bins + 1).to(input.device)
L = Z.size()[0]
h_pos = torch.zeros((N, L), dtype=input.dtype).to(input.device)
h_neg = torch.zeros((N, L), dtype=input.dtype).to(input.device)
for l in range(L):
pulse = softBinning(dist2, Z[l], Delta)
h_pos[:, l] = torch.sum(pulse * I_pos, 1)
h_neg[:, l] = torch.sum(pulse * I_neg, 1)
H_pos = torch.cumsum(h_pos, 1)
h = h_pos + h_neg
H = torch.cumsum(h, 1)
# 4. compate FastAP
FastAP = h_pos * H_pos / H
FastAP[torch.isnan(FastAP) | torch.isinf(FastAP)] = 0
FastAP = torch.sum(FastAP, 1) / N_pos
FastAP = FastAP[~torch.isnan(FastAP)]
loss = 1 - torch.mean(FastAP)
# 6. save for backward
ctx.save_for_backward(input, target)
ctx.Z = Z
ctx.Delta = Delta
ctx.dist2 = dist2
ctx.I_pos = I_pos
ctx.I_neg = I_neg
ctx.h_pos = h_pos
ctx.h_neg = h_neg
ctx.H_pos = H_pos
ctx.N_pos = N_pos
ctx.h = h
ctx.H = H
ctx.L = torch.tensor(L)
return loss
@staticmethod
def backward(ctx, grad_output):
input, target = ctx.saved_tensors
Z = Variable(ctx.Z, requires_grad=False)
Delta = Variable(ctx.Delta, requires_grad=False)
dist2 = Variable(ctx.dist2, requires_grad=False)
I_pos = Variable(ctx.I_pos, requires_grad=False)
I_neg = Variable(ctx.I_neg, requires_grad=False)
h = Variable(ctx.h, requires_grad=False)
H = Variable(ctx.H, requires_grad=False)
h_pos = Variable(ctx.h_pos, requires_grad=False)
h_neg = Variable(ctx.h_neg, requires_grad=False)
H_pos = Variable(ctx.H_pos, requires_grad=False)
N_pos = Variable(ctx.N_pos, requires_grad=False)
L = Z.size()[0]
H2 = torch.pow(H, 2)
H_neg = H - H_pos
# 1. d(FastAP)/d(h+)
LTM1 = torch.tril(torch.ones(L, L), -1) # lower traingular matrix
tmp1 = h_pos * H_neg / H2
tmp1[torch.isnan(tmp1)] = 0
d_AP_h_pos = (H_pos * H + h_pos * H_neg) / H2
d_AP_h_pos = d_AP_h_pos + torch.mm(tmp1, LTM1.cuda())
d_AP_h_pos = d_AP_h_pos / N_pos.repeat(L, 1).t()
d_AP_h_pos[torch.isnan(d_AP_h_pos) | torch.isinf(d_AP_h_pos)] = 0
# 2. d(FastAP)/d(h-)
LTM0 = torch.tril(torch.ones(L, L), 0) # lower triangular matrix
tmp2 = -h_pos * H_pos / H2
tmp2[torch.isnan(tmp2)] = 0
d_AP_h_neg = torch.mm(tmp2, LTM0.cuda())
d_AP_h_neg = d_AP_h_neg / N_pos.repeat(L, 1).t()
d_AP_h_neg[torch.isnan(d_AP_h_neg) | torch.isinf(d_AP_h_neg)] = 0
# 3. d(FastAP)/d(embedding)
d_AP_x = 0
for l in range(L):
dpulse = dSoftBinning(dist2, Z[l], Delta)
dpulse[torch.isnan(dpulse) | torch.isinf(dpulse)] = 0
ddp = dpulse * I_pos
ddn = dpulse * I_neg
alpha_p = torch.diag(d_AP_h_pos[:, l]) # N*N
alpha_n = torch.diag(d_AP_h_neg[:, l])
Ap = torch.mm(ddp, alpha_p) + torch.mm(alpha_p, ddp)
An = torch.mm(ddn, alpha_n) + torch.mm(alpha_n, ddn)
# accumulate gradient
d_AP_x = d_AP_x - torch.mm(input.t(), (Ap + An))
grad_input = -d_AP_x
return grad_input.t(), None, None
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAPLoss(torch.nn.Module):
"""
FastAP - loss layer definition
This class implements the FastAP loss from the following paper:
"Deep Metric Learning to Rank",
F. Cakir, K. He, X. Xia, B. Kulis, S. Sclaroff. CVPR 2019
"""
def __init__(self, num_bins=10):
super(OriginalImplementationFastAPLoss, self).__init__()
self.num_bins = num_bins
def forward(self, batch, labels):
return OriginalImplementationFastAP.apply(batch, labels, self.num_bins)
### Testing this library's implementation ###
import unittest
from pytorch_metric_learning.losses import FastAPLoss
from .. import TEST_DEVICE, TEST_DTYPES
from ..zzz_testing_utils.testing_utils import angle_to_coord
class TestFastAPLoss(unittest.TestCase):
def test_fast_ap_loss(self):
num_bins = 5
loss_func = FastAPLoss(num_bins)
original_loss_func = OriginalImplementationFastAPLoss(num_bins)
ref_emb = torch.randn(32, 32)
ref_labels = torch.randint(0, 10, (32,))
for dtype in TEST_DTYPES:
embedding_angles = torch.arange(0, 180)
embeddings = torch.tensor(
[angle_to_coord(a) for a in embedding_angles],
requires_grad=True,
dtype=dtype,
).to(
TEST_DEVICE
) # 2D embeddings
labels = torch.randint(low=0, high=10, size=(180,)).to(TEST_DEVICE)
loss = loss_func(embeddings, labels)
loss.backward()
original_loss = original_loss_func(
torch.nn.functional.normalize(embeddings), labels
)
rtol = 1e-2 if dtype == torch.float16 else 1e-5
self.assertTrue(torch.isclose(loss, original_loss, rtol=rtol))
# fastap doesn't support ref_emb
self.assertRaises(
ValueError,
lambda: loss_func(
embeddings, labels, ref_emb=ref_emb, ref_labels=ref_labels
),
)
| 36.045643 | 89 | 0.590998 | torch.mm(input, input.t())
# 3. estimate discrete histograms
Delta = torch.tensor(4.0 / num_bins).to(input.device)
Z = torch.linspace(0.0, 4.0, steps=num_bins + 1).to(input.device)
L = Z.size()[0]
h_pos = torch.zeros((N, L), dtype=input.dtype).to(input.device)
h_neg = torch.zeros((N, L), dtype=input.dtype).to(input.device)
for l in range(L):
pulse = softBinning(dist2, Z[l], Delta)
h_pos[:, l] = torch.sum(pulse * I_pos, 1)
h_neg[:, l] = torch.sum(pulse * I_neg, 1)
H_pos = torch.cumsum(h_pos, 1)
h = h_pos + h_neg
H = torch.cumsum(h, 1)
# 4. compate FastAP
FastAP = h_pos * H_pos / H
FastAP[torch.isnan(FastAP) | torch.isinf(FastAP)] = 0
FastAP = torch.sum(FastAP, 1) / N_pos
FastAP = FastAP[~torch.isnan(FastAP)]
loss = 1 - torch.mean(FastAP)
# 6. save for backward
ctx.save_for_backward(input, target)
ctx.Z = Z
ctx.Delta = Delta
ctx.dist2 = dist2
ctx.I_pos = I_pos
ctx.I_neg = I_neg
ctx.h_pos = h_pos
ctx.h_neg = h_neg
ctx.H_pos = H_pos
ctx.N_pos = N_pos
ctx.h = h
ctx.H = H
ctx.L = torch.tensor(L)
return loss
@staticmethod
def backward(ctx, grad_output):
input, target = ctx.saved_tensors
Z = Variable(ctx.Z, requires_grad=False)
Delta = Variable(ctx.Delta, requires_grad=False)
dist2 = Variable(ctx.dist2, requires_grad=False)
I_pos = Variable(ctx.I_pos, requires_grad=False)
I_neg = Variable(ctx.I_neg, requires_grad=False)
h = Variable(ctx.h, requires_grad=False)
H = Variable(ctx.H, requires_grad=False)
h_pos = Variable(ctx.h_pos, requires_grad=False)
h_neg = Variable(ctx.h_neg, requires_grad=False)
H_pos = Variable(ctx.H_pos, requires_grad=False)
N_pos = Variable(ctx.N_pos, requires_grad=False)
L = Z.size()[0]
H2 = torch.pow(H, 2)
H_neg = H - H_pos
# 1. d(FastAP)/d(h+)
LTM1 = torch.tril(torch.ones(L, L), -1) # lower traingular matrix
tmp1 = h_pos * H_neg / H2
tmp1[torch.isnan(tmp1)] = 0
d_AP_h_pos = (H_pos * H + h_pos * H_neg) / H2
d_AP_h_pos = d_AP_h_pos + torch.mm(tmp1, LTM1.cuda())
d_AP_h_pos = d_AP_h_pos / N_pos.repeat(L, 1).t()
d_AP_h_pos[torch.isnan(d_AP_h_pos) | torch.isinf(d_AP_h_pos)] = 0
# 2. d(FastAP)/d(h-)
LTM0 = torch.tril(torch.ones(L, L), 0) # lower triangular matrix
tmp2 = -h_pos * H_pos / H2
tmp2[torch.isnan(tmp2)] = 0
d_AP_h_neg = torch.mm(tmp2, LTM0.cuda())
d_AP_h_neg = d_AP_h_neg / N_pos.repeat(L, 1).t()
d_AP_h_neg[torch.isnan(d_AP_h_neg) | torch.isinf(d_AP_h_neg)] = 0
# 3. d(FastAP)/d(embedding)
d_AP_x = 0
for l in range(L):
dpulse = dSoftBinning(dist2, Z[l], Delta)
dpulse[torch.isnan(dpulse) | torch.isinf(dpulse)] = 0
ddp = dpulse * I_pos
ddn = dpulse * I_neg
alpha_p = torch.diag(d_AP_h_pos[:, l]) # N*N
alpha_n = torch.diag(d_AP_h_neg[:, l])
Ap = torch.mm(ddp, alpha_p) + torch.mm(alpha_p, ddp)
An = torch.mm(ddn, alpha_n) + torch.mm(alpha_n, ddn)
# accumulate gradient
d_AP_x = d_AP_x - torch.mm(input.t(), (Ap + An))
grad_input = -d_AP_x
return grad_input.t(), None, None
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAPLoss(torch.nn.Module):
def __init__(self, num_bins=10):
super(OriginalImplementationFastAPLoss, self).__init__()
self.num_bins = num_bins
def forward(self, batch, labels):
return OriginalImplementationFastAP.apply(batch, labels, self.num_bins)
TEST_DEVICE, TEST_DTYPES
from ..zzz_testing_utils.testing_utils import angle_to_coord
class TestFastAPLoss(unittest.TestCase):
def test_fast_ap_loss(self):
num_bins = 5
loss_func = FastAPLoss(num_bins)
original_loss_func = OriginalImplementationFastAPLoss(num_bins)
ref_emb = torch.randn(32, 32)
ref_labels = torch.randint(0, 10, (32,))
for dtype in TEST_DTYPES:
embedding_angles = torch.arange(0, 180)
embeddings = torch.tensor(
[angle_to_coord(a) for a in embedding_angles],
requires_grad=True,
dtype=dtype,
).to(
TEST_DEVICE
) # 2D embeddings
labels = torch.randint(low=0, high=10, size=(180,)).to(TEST_DEVICE)
loss = loss_func(embeddings, labels)
loss.backward()
original_loss = original_loss_func(
torch.nn.functional.normalize(embeddings), labels
)
rtol = 1e-2 if dtype == torch.float16 else 1e-5
self.assertTrue(torch.isclose(loss, original_loss, rtol=rtol))
# fastap doesn't support ref_emb
self.assertRaises(
ValueError,
lambda: loss_func(
embeddings, labels, ref_emb=ref_emb, ref_labels=ref_labels
),
)
| true | true |
f72520d5e3e11a9dcb64026d7e62f1b47d0d54fa | 1,554 | py | Python | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | import os
from setuptools import setup
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(PROJECT_ROOT, 'flake8_flask.py')) as file_:
version_line = [line for line in file_ if line.startswith('__version__')][0]
__version__ = version_line.split('=')[1].strip().strip("'").strip('"')
with open(os.path.join(PROJECT_ROOT, 'README.md')) as file_:
long_description = file_.read()
setup(
name='flake8_flask',
version=__version__,
description='Flake8 plugin that checks Flask code against opinionated style rules',
long_description=long_description,
url='https://github.com/pgjones/flake8-flask',
author='P G Jones',
author_email='philip.graham.jones@googlemail.com',
keywords=[
'flake8',
'plugin',
'flask',
],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Framework :: Flake8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
py_modules=['flake8_flask'],
install_requires=[
'flake8',
'setuptools',
],
entry_points={
'flake8.extension': [
'F4 = flake8_flask:Linter',
],
},
zip_safe=False,
)
| 29.320755 | 87 | 0.622909 | import os
from setuptools import setup
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(PROJECT_ROOT, 'flake8_flask.py')) as file_:
version_line = [line for line in file_ if line.startswith('__version__')][0]
__version__ = version_line.split('=')[1].strip().strip("'").strip('"')
with open(os.path.join(PROJECT_ROOT, 'README.md')) as file_:
long_description = file_.read()
setup(
name='flake8_flask',
version=__version__,
description='Flake8 plugin that checks Flask code against opinionated style rules',
long_description=long_description,
url='https://github.com/pgjones/flake8-flask',
author='P G Jones',
author_email='philip.graham.jones@googlemail.com',
keywords=[
'flake8',
'plugin',
'flask',
],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Framework :: Flake8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
py_modules=['flake8_flask'],
install_requires=[
'flake8',
'setuptools',
],
entry_points={
'flake8.extension': [
'F4 = flake8_flask:Linter',
],
},
zip_safe=False,
)
| true | true |
f72520ddf2e5e6b82e03983937f6030c0042fce3 | 4,902 | py | Python | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | null | null | null | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | null | null | null | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | 1 | 2019-09-11T09:13:18.000Z | 2019-09-11T09:13:18.000Z | from reportlab.lib import colors
from reportlab.lib.units import inch
from reportlab.platypus import Image
from testplan.common.exporters.pdf import RowStyle, create_table
from testplan.common.exporters.pdf import format_table_style
from testplan.common.utils.registry import Registry
from testplan.testing.multitest.entries import base
from .. import constants
from ..base import BaseRowRenderer, RowData
class SerializedEntryRegistry(Registry):
"""
Registry that is used for binding assertion classes to PDF renderers.
Keep in mind that we pass around serialized version of assertion objects
(generated via `multitest.entries.schemas`) meaning that lookup
arguments will be dictionary representation instead of assertion object
instances, hence the need to use class names instead of class objects
for `data` keys.
"""
def get_record_key(self, obj):
return obj.__name__
def get_lookup_key(self, obj):
return obj['type']
def get_category(self, obj):
return obj['meta_type']
registry = SerializedEntryRegistry()
@registry.bind_default()
class SerializedEntryRenderer(BaseRowRenderer):
"""Default fallback for all serialized entries."""
def get_header(self, source, depth, row_idx):
"""Display the description or type as the header."""
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth)]
header = source['description'] or source['type']
return RowData(content=[header, '', '', '' ],
style=styles,
start=row_idx)
def get_row_content(self, source):
"""
All entries will either have a description or type,
we display whatever is available.
"""
return [source['description'] or source['type'], '', '', '']
def get_row_data(self, source, depth, row_idx):
"""
Most entries will be rendered as single rows, so we use
`get_row_content` and `get_row_style` for simplicity.
"""
result = RowData(
content=self.get_row_content(source),
style=self.get_row_style(source, depth),
start=row_idx
)
return result
def get_row_style(self, source, depth, **kwargs):
"""Default styling for all serialized entries, with small font size."""
return RowStyle(
font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth,
**kwargs
)
def get_style(self, source):
if 'passed' in source and source['passed'] is False:
return self.style.failing
return self.style.passing
def should_display(self, source):
return self.get_style(source).display_assertion
@registry.bind(base.MatPlot)
class MatPlotRenderer(SerializedEntryRenderer):
"""Render a Matplotlib assertion from a serialized entry."""
def get_row_data(self, source, depth, row_idx):
"""
Load the Matplotlib graph from the saved image, set its height and width
and add it to the row.
"""
header = self.get_header(source, depth, row_idx)
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * (depth + 1),
text_color=colors.black)]
img = Image(source['image_file_path'])
img.drawWidth = source['width'] * inch
img.drawHeight = source['height'] * inch
return header + RowData(content=[img, '', '', ''],
start=header.end,
style=styles)
@registry.bind(base.TableLog)
class TableLogRenderer(SerializedEntryRenderer):
"""Render a Table from a serialized entry."""
def get_row_data(self, source, depth, row_idx):
"""
Reformat the rows from the serialized data into a format ReportLab
accepts. Create a header and a ReportLab table and add it to the row.
"""
header = self.get_header(source, depth, row_idx)
row_style = [RowStyle(left_padding=constants.INDENT * (depth + 1))]
table_style = format_table_style(constants.DISPLAYED_TABLE_STYLE)
max_width = constants.PAGE_WIDTH - (depth * constants.INDENT)
table = create_table(table=source['table'],
columns=source['columns'],
row_indices=source['indices'],
display_index=source['display_index'],
max_width=max_width,
style=table_style)
return header + RowData(content=table,
start=header.end,
style=row_style)
| 36.311111 | 80 | 0.621991 | from reportlab.lib import colors
from reportlab.lib.units import inch
from reportlab.platypus import Image
from testplan.common.exporters.pdf import RowStyle, create_table
from testplan.common.exporters.pdf import format_table_style
from testplan.common.utils.registry import Registry
from testplan.testing.multitest.entries import base
from .. import constants
from ..base import BaseRowRenderer, RowData
class SerializedEntryRegistry(Registry):
def get_record_key(self, obj):
return obj.__name__
def get_lookup_key(self, obj):
return obj['type']
def get_category(self, obj):
return obj['meta_type']
registry = SerializedEntryRegistry()
@registry.bind_default()
class SerializedEntryRenderer(BaseRowRenderer):
def get_header(self, source, depth, row_idx):
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth)]
header = source['description'] or source['type']
return RowData(content=[header, '', '', '' ],
style=styles,
start=row_idx)
def get_row_content(self, source):
return [source['description'] or source['type'], '', '', '']
def get_row_data(self, source, depth, row_idx):
result = RowData(
content=self.get_row_content(source),
style=self.get_row_style(source, depth),
start=row_idx
)
return result
def get_row_style(self, source, depth, **kwargs):
return RowStyle(
font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth,
**kwargs
)
def get_style(self, source):
if 'passed' in source and source['passed'] is False:
return self.style.failing
return self.style.passing
def should_display(self, source):
return self.get_style(source).display_assertion
@registry.bind(base.MatPlot)
class MatPlotRenderer(SerializedEntryRenderer):
def get_row_data(self, source, depth, row_idx):
header = self.get_header(source, depth, row_idx)
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * (depth + 1),
text_color=colors.black)]
img = Image(source['image_file_path'])
img.drawWidth = source['width'] * inch
img.drawHeight = source['height'] * inch
return header + RowData(content=[img, '', '', ''],
start=header.end,
style=styles)
@registry.bind(base.TableLog)
class TableLogRenderer(SerializedEntryRenderer):
def get_row_data(self, source, depth, row_idx):
header = self.get_header(source, depth, row_idx)
row_style = [RowStyle(left_padding=constants.INDENT * (depth + 1))]
table_style = format_table_style(constants.DISPLAYED_TABLE_STYLE)
max_width = constants.PAGE_WIDTH - (depth * constants.INDENT)
table = create_table(table=source['table'],
columns=source['columns'],
row_indices=source['indices'],
display_index=source['display_index'],
max_width=max_width,
style=table_style)
return header + RowData(content=table,
start=header.end,
style=row_style)
| true | true |
f72521063380e9fb0c546147201e0347312a3209 | 560 | py | Python | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | '''
The provided code stub will read in a dictionary containing key/value pairs of
name:[marks] for a list of students. Print the average of the marks array for
the student name provided, showing 2 places after the decimal.
'''
if __name__ == '__main__':
n = int(input())
student_marks = {}
for _ in range(n):
name, *line = input().split()
scores = list(map(float, line))
student_marks[name] = scores
query_name = input()
marks = student_marks[query_name]
print(f'{(sum(marks)/float(len(marks))):.2f}')
| 29.473684 | 78 | 0.648214 | if __name__ == '__main__':
n = int(input())
student_marks = {}
for _ in range(n):
name, *line = input().split()
scores = list(map(float, line))
student_marks[name] = scores
query_name = input()
marks = student_marks[query_name]
print(f'{(sum(marks)/float(len(marks))):.2f}')
| true | true |
f72522049a195e8802f79fdd1006b47629498830 | 287 | py | Python | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | import h5py
filename = './sem_seg/indoor3d_sem_seg_hdf5_data/ply_data_all_0.h5'
#filename = './sem_seg/converted_KITTI/frame_10.h5'
f = h5py.File(filename, 'r')
data_file = f['data'][:]
label_file = f['label'][:]
print (data_file.shape, label_file.shape)
print (type(label_file[0])) | 22.076923 | 67 | 0.728223 | import h5py
filename = './sem_seg/indoor3d_sem_seg_hdf5_data/ply_data_all_0.h5'
f = h5py.File(filename, 'r')
data_file = f['data'][:]
label_file = f['label'][:]
print (data_file.shape, label_file.shape)
print (type(label_file[0])) | true | true |
f725220f95e7ed6a18489ee1563dd48ce5f224d6 | 2,985 | py | Python | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | 3 | 2021-12-22T17:44:39.000Z | 2022-01-14T17:18:15.000Z | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | import re
from itertools import combinations
from utils.solution_base import SolutionBase
class Solution(SolutionBase):
def solve(self, part_num: int):
self.test_runner(part_num)
func = getattr(self, f"part{part_num}")
result = func(self.data)
return result
def test_runner(self, part_num):
test_inputs = self.get_test_input()
test_results = self.get_test_result(part_num)
test_counter = 1
func = getattr(self, f"part{part_num}")
for i, r in zip(test_inputs, test_results):
if len(r):
if func(i) == int(r[0]):
print(f"test {test_counter} passed")
else:
print(func(i))
print(r[0])
print(f"test {test_counter} NOT passed")
test_counter += 1
print()
def part1(self, data):
addition = data[0]
for i in data[1:]:
addition = f"[{addition},{i}]"
while (t := self.reduction(addition)) != addition:
addition = t
return self.calc_magnitude(addition)
def reduction(self, s: str):
# explode
depth = 0
for i, v in enumerate(s):
if v.isnumeric() and depth > 4:
pair_close_pos = s[i:].index("]")
before_pair, pair, after_pair = s[: i - 1], s[i : i + pair_close_pos], s[i + pair_close_pos + 1 :]
pair = [*map(int, pair.split(","))]
before_pair = self.add_exploded_pair(before_pair, pair, 0)
after_pair = self.add_exploded_pair(after_pair, pair, 1)
return before_pair + "0" + after_pair
else:
depth += [1, -1]["[]".index(v)] if v in "[]" else 0
# split
large_regulars = [i for i in re.findall(r"\d+", s) if int(i) > 9]
if len(large_regulars):
reg = large_regulars[0]
reg_pos = s.index(reg)
before_reg, after_reg = s[:reg_pos], s[reg_pos + len(reg) :]
reg = int(reg)
elem_left = reg // 2
elem_right = reg - elem_left
s = before_reg + f"[{elem_left},{elem_right}]" + after_reg
return s
def add_exploded_pair(self, line, pair, pair_index):
all_regulars = re.findall(r"\d+", line)
if len(all_regulars):
reg = all_regulars[pair_index - 1]
reg_pos = [line.rindex, line.index][pair_index](reg)
line = line[:reg_pos] + str(int(reg) + pair[pair_index]) + line[reg_pos + len(reg) :]
return line
def calc_magnitude(self, s: str):
while s.count("["):
pairs = re.findall(r"\[(\d+),(\d+)\]", s)
for a, b in pairs:
s = s.replace(f"[{a},{b}]", str(int(a) * 3 + int(b) * 2))
return int(s)
def part2(self, data):
return max(max(self.part1(i), self.part1(i[::-1])) for i in combinations(data, 2))
| 34.310345 | 114 | 0.524958 | import re
from itertools import combinations
from utils.solution_base import SolutionBase
class Solution(SolutionBase):
def solve(self, part_num: int):
self.test_runner(part_num)
func = getattr(self, f"part{part_num}")
result = func(self.data)
return result
def test_runner(self, part_num):
test_inputs = self.get_test_input()
test_results = self.get_test_result(part_num)
test_counter = 1
func = getattr(self, f"part{part_num}")
for i, r in zip(test_inputs, test_results):
if len(r):
if func(i) == int(r[0]):
print(f"test {test_counter} passed")
else:
print(func(i))
print(r[0])
print(f"test {test_counter} NOT passed")
test_counter += 1
print()
def part1(self, data):
addition = data[0]
for i in data[1:]:
addition = f"[{addition},{i}]"
while (t := self.reduction(addition)) != addition:
addition = t
return self.calc_magnitude(addition)
def reduction(self, s: str):
depth = 0
for i, v in enumerate(s):
if v.isnumeric() and depth > 4:
pair_close_pos = s[i:].index("]")
before_pair, pair, after_pair = s[: i - 1], s[i : i + pair_close_pos], s[i + pair_close_pos + 1 :]
pair = [*map(int, pair.split(","))]
before_pair = self.add_exploded_pair(before_pair, pair, 0)
after_pair = self.add_exploded_pair(after_pair, pair, 1)
return before_pair + "0" + after_pair
else:
depth += [1, -1]["[]".index(v)] if v in "[]" else 0
large_regulars = [i for i in re.findall(r"\d+", s) if int(i) > 9]
if len(large_regulars):
reg = large_regulars[0]
reg_pos = s.index(reg)
before_reg, after_reg = s[:reg_pos], s[reg_pos + len(reg) :]
reg = int(reg)
elem_left = reg // 2
elem_right = reg - elem_left
s = before_reg + f"[{elem_left},{elem_right}]" + after_reg
return s
def add_exploded_pair(self, line, pair, pair_index):
all_regulars = re.findall(r"\d+", line)
if len(all_regulars):
reg = all_regulars[pair_index - 1]
reg_pos = [line.rindex, line.index][pair_index](reg)
line = line[:reg_pos] + str(int(reg) + pair[pair_index]) + line[reg_pos + len(reg) :]
return line
def calc_magnitude(self, s: str):
while s.count("["):
pairs = re.findall(r"\[(\d+),(\d+)\]", s)
for a, b in pairs:
s = s.replace(f"[{a},{b}]", str(int(a) * 3 + int(b) * 2))
return int(s)
def part2(self, data):
return max(max(self.part1(i), self.part1(i[::-1])) for i in combinations(data, 2))
| true | true |
f725222b5c43f17680f783bce12dae0b4c033b90 | 1,813 | py | Python | sdk/purview/azure-purview-administration/tests/testcase.py | praveenkuttappan/azure-sdk-for-python | 4b79413667b7539750a6c7dde15737013a3d4bd5 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/purview/azure-purview-administration/tests/testcase.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/purview/azure-purview-administration/tests/testcase.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
from devtools_testutils import AzureTestCase, PowerShellPreparer
from azure.purview.administration.account import PurviewAccountClient
from azure.purview.administration.metadatapolicies import PurviewMetadataPoliciesClient
class PurviewAccountTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewAccountTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewAccountClient)
return self.create_client_from_credential(
PurviewAccountClient,
credential=credential,
endpoint=endpoint,
)
PurviewAccountPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewaccount",
purviewaccount_endpoint="https://fake_account.account.purview.azure.com"
)
class PurviewMetaPolicyTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewMetaPolicyTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewMetadataPoliciesClient)
return self.create_client_from_credential(
PurviewMetadataPoliciesClient,
credential=credential,
endpoint=endpoint,
)
PurviewMetaPolicyPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewmetapolicy",
purviewmetapolicy_endpoint="https://fake_account.account.purview.azure.com"
)
| 35.54902 | 87 | 0.689465 |
import functools
from devtools_testutils import AzureTestCase, PowerShellPreparer
from azure.purview.administration.account import PurviewAccountClient
from azure.purview.administration.metadatapolicies import PurviewMetadataPoliciesClient
class PurviewAccountTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewAccountTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewAccountClient)
return self.create_client_from_credential(
PurviewAccountClient,
credential=credential,
endpoint=endpoint,
)
PurviewAccountPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewaccount",
purviewaccount_endpoint="https://fake_account.account.purview.azure.com"
)
class PurviewMetaPolicyTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewMetaPolicyTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewMetadataPoliciesClient)
return self.create_client_from_credential(
PurviewMetadataPoliciesClient,
credential=credential,
endpoint=endpoint,
)
PurviewMetaPolicyPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewmetapolicy",
purviewmetapolicy_endpoint="https://fake_account.account.purview.azure.com"
)
| true | true |
f72523871d8c380b139a74c4120ab78813f97178 | 206 | py | Python | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | 9 | 2021-04-26T11:59:29.000Z | 2021-12-20T13:15:27.000Z | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | null | null | null | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | 6 | 2021-07-14T06:32:05.000Z | 2022-02-06T02:32:56.000Z | fieldID = sm.getFieldID()
if fieldID == 811000500:
sm.warpInstanceOut(811000008)
elif not sm.hasMobsInField():
sm.warp(fieldID + 100)
else:
sm.chat("The portal is not opened yet.")
sm.dispose()
| 22.888889 | 44 | 0.699029 | fieldID = sm.getFieldID()
if fieldID == 811000500:
sm.warpInstanceOut(811000008)
elif not sm.hasMobsInField():
sm.warp(fieldID + 100)
else:
sm.chat("The portal is not opened yet.")
sm.dispose()
| true | true |
f725258ae5d9a6973d2e1d634a663f91c4a30abf | 2,281 | py | Python | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | null | null | null | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | null | null | null | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | 2 | 2020-04-19T13:24:12.000Z | 2021-02-14T14:01:31.000Z | import asyncio
from telethon import events
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import ChatBannedRights
from userbot.utils import admin_cmd
import userbot.plugins.sql_helper.antiflood_sql as sql
import userbot.utils
from userbot.utils import humanbytes, progress, time_formatter
CHAT_FLOOD = sql.__load_flood_settings()
# warn mode for anti flood
ANTI_FLOOD_WARN_MODE = ChatBannedRights(
until_date=None,
view_messages=None,
send_messages=True
)
@borg.on(admin_cmd(incoming=True))
async def _(event):
# logger.info(CHAT_FLOOD)
if not CHAT_FLOOD:
return
if not (str(event.chat_id) in CHAT_FLOOD):
return
# TODO: exempt admins from this
should_ban = sql.update_flood(event.chat_id, event.message.from_id)
if not should_ban:
return
try:
await event.client(EditBannedRequest(
event.chat_id,
event.message.from_id,
ANTI_FLOOD_WARN_MODE
))
except Exception as e: # pylint:disable=C0103,W0703
no_admin_privilege_message = await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
@admin [User](tg://user?id={}) is flooding this chat.
`{}`""".format(event.message.from_id, str(e)),
reply_to=event.message.id
)
await asyncio.sleep(10)
await no_admin_privilege_message.edit(
"https://t.me/keralagram/724970",
link_preview=False
)
else:
await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
[User](tg://user?id={}) has been automatically restricted
because he reached the defined flood limit.""".format(event.message.from_id),
reply_to=event.message.id
)
@borg.on(admin_cmd(pattern="setflood (.*)"))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
try:
sql.set_flood(event.chat_id, input_str)
CHAT_FLOOD = sql.__load_flood_settings()
await event.edit("Antiflood updated to {} in the current chat".format(input_str))
except Exception as e: # pylint:disable=C0103,W0703
await event.edit(str(e))
| 31.680556 | 89 | 0.676019 | import asyncio
from telethon import events
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import ChatBannedRights
from userbot.utils import admin_cmd
import userbot.plugins.sql_helper.antiflood_sql as sql
import userbot.utils
from userbot.utils import humanbytes, progress, time_formatter
CHAT_FLOOD = sql.__load_flood_settings()
ANTI_FLOOD_WARN_MODE = ChatBannedRights(
until_date=None,
view_messages=None,
send_messages=True
)
@borg.on(admin_cmd(incoming=True))
async def _(event):
if not CHAT_FLOOD:
return
if not (str(event.chat_id) in CHAT_FLOOD):
return
should_ban = sql.update_flood(event.chat_id, event.message.from_id)
if not should_ban:
return
try:
await event.client(EditBannedRequest(
event.chat_id,
event.message.from_id,
ANTI_FLOOD_WARN_MODE
))
except Exception as e:
no_admin_privilege_message = await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
@admin [User](tg://user?id={}) is flooding this chat.
`{}`""".format(event.message.from_id, str(e)),
reply_to=event.message.id
)
await asyncio.sleep(10)
await no_admin_privilege_message.edit(
"https://t.me/keralagram/724970",
link_preview=False
)
else:
await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
[User](tg://user?id={}) has been automatically restricted
because he reached the defined flood limit.""".format(event.message.from_id),
reply_to=event.message.id
)
@borg.on(admin_cmd(pattern="setflood (.*)"))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
try:
sql.set_flood(event.chat_id, input_str)
CHAT_FLOOD = sql.__load_flood_settings()
await event.edit("Antiflood updated to {} in the current chat".format(input_str))
except Exception as e:
await event.edit(str(e))
| true | true |
f725258bf01873662ce2f235301796c73aa1e811 | 72,727 | py | Python | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 1 | 2019-06-03T09:39:40.000Z | 2019-06-03T09:39:40.000Z | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 4 | 2021-06-05T00:00:45.000Z | 2022-02-10T12:17:14.000Z | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 1 | 2021-12-01T05:36:59.000Z | 2021-12-01T05:36:59.000Z | import MySQLdb
import json
from datetime import timedelta, datetime
from unittest.mock import patch, Mock, ANY
import sqlparse
from django.contrib.auth import get_user_model
from django.test import TestCase
from common.config import SysConfig
from sql.engines import EngineBase
from sql.engines.goinception import GoInceptionEngine
from sql.engines.models import ResultSet, ReviewSet, ReviewResult
from sql.engines.mssql import MssqlEngine
from sql.engines.mysql import MysqlEngine
from sql.engines.redis import RedisEngine
from sql.engines.pgsql import PgSQLEngine
from sql.engines.oracle import OracleEngine
from sql.engines.mongo import MongoEngine
from sql.engines.inception import InceptionEngine, _repair_json_str
from sql.models import Instance, SqlWorkflow, SqlWorkflowContent
User = get_user_model()
class TestReviewSet(TestCase):
def test_review_set(self):
new_review_set = ReviewSet()
new_review_set.rows = [{'id': '1679123'}]
self.assertIn('1679123', new_review_set.json())
class TestEngineBase(TestCase):
@classmethod
def setUpClass(cls):
cls.u1 = User(username='some_user', display='用户1')
cls.u1.save()
cls.ins1 = Instance(instance_name='some_ins', type='master', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.wf1 = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer=cls.u1.username,
engineer_display=cls.u1.display,
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
cls.wfc1 = SqlWorkflowContent.objects.create(
workflow=cls.wf1,
sql_content='some_sql',
execute_result=json.dumps([{
'id': 1,
'sql': 'some_content'
}]))
@classmethod
def tearDownClass(cls):
cls.wfc1.delete()
cls.wf1.delete()
cls.ins1.delete()
cls.u1.delete()
def test_init_with_ins(self):
engine = EngineBase(instance=self.ins1)
self.assertEqual(self.ins1.instance_name, engine.instance_name)
self.assertEqual(self.ins1.user, engine.user)
class TestMssql(TestCase):
@classmethod
def setUpClass(cls):
cls.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.engine = MssqlEngine(instance=cls.ins1)
cls.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=cls.wf, sql_content='insert into some_tb values (1)')
@classmethod
def tearDownClass(cls):
cls.ins1.delete()
cls.wf.delete()
SqlWorkflowContent.objects.all().delete()
@patch('sql.engines.mssql.pyodbc.connect')
def testGetConnection(self, connect):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('sql.engines.mssql.pyodbc.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MssqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MssqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MssqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MssqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MssqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MssqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MssqlEngine(instance=self.ins1)
# 只抽查一个函数
banned_sql = 'select concat(phone,1) from user_table'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
banned_sql = 'select phone from user_table where phone=concat(phone,1)'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
sp_sql = "sp_helptext '[SomeName].[SomeAction]'"
check_result = new_engine.query_check(db_name='some_db', sql=sp_sql)
self.assertFalse(check_result.get('bad_query'))
self.assertEqual(check_result.get('filtered_sql'), sp_sql)
def test_filter_sql(self):
new_engine = MssqlEngine(instance=self.ins1)
# 只抽查一个函数
banned_sql = 'select user from user_table'
check_result = new_engine.filter_sql(sql=banned_sql, limit_num=10)
self.assertEqual(check_result, "select top 10 user from user_table")
def test_execute_check(self):
new_engine = MssqlEngine(instance=self.ins1)
test_sql = 'use database\ngo\nsome sql1\nGO\nsome sql2\n\r\nGo\nsome sql3\n\r\ngO\n'
check_result = new_engine.execute_check(db_name=None, sql=test_sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[1].__dict__['sql'], "use database\n")
self.assertEqual(check_result.rows[2].__dict__['sql'], "\nsome sql1\n")
self.assertEqual(check_result.rows[4].__dict__['sql'], "\nsome sql3\n\r\n")
@patch('sql.engines.mssql.MssqlEngine.execute')
def test_execute_workflow(self, mock_execute):
mock_execute.return_value.error = None
new_engine = MssqlEngine(instance=self.ins1)
new_engine.execute_workflow(self.wf)
# 有多少个备份表, 就需要execute多少次, 另外加上一条实际执行的次数
mock_execute.assert_called()
self.assertEqual(1, mock_execute.call_count)
@patch('sql.engines.mssql.MssqlEngine.get_connection')
def test_execute(self, mock_connect):
mock_cursor = Mock()
mock_connect.return_value.cursor = mock_cursor
new_engine = MssqlEngine(instance=self.ins1)
execute_result = new_engine.execute('some_db', 'some_sql')
# 验证结果, 无异常
self.assertIsNone(execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.execute.assert_called()
mock_cursor.return_value.commit.assert_called()
mock_cursor.reset_mock()
# 验证异常
mock_cursor.return_value.execute.side_effect = Exception('Boom! some exception!')
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIn('Boom! some exception!', execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.commit.assert_not_called()
mock_cursor.return_value.rollback.assert_called()
class TestMysql(TestCase):
def setUp(self):
self.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=1366, user='ins_user', password='some_str')
self.ins1.save()
self.sys_config = SysConfig()
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins1.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_engine_base_info(self, _conn):
new_engine = MysqlEngine(instance=self.ins1)
self.assertEqual(new_engine.name, 'MySQL')
self.assertEqual(new_engine.info, 'MySQL engine')
@patch('MySQLdb.connect')
def testGetConnection(self, connect):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('MySQLdb.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MysqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(size=100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MysqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MysqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MysqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MysqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MysqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = '-- 测试\n select user from usertable'
check_result = new_engine.query_check(db_name='some_db', sql=sql_without_limit)
self.assertEqual(check_result['filtered_sql'], 'select user from usertable')
def test_query_check_wrong_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
wrong_sql = '-- 测试'
check_result = new_engine.query_check(db_name='some_db', sql=wrong_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': '-- 测试', 'has_star': False})
def test_query_check_update_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
update_sql = 'update user set id=0'
check_result = new_engine.query_check(db_name='some_db', sql=update_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': 'update user set id=0',
'has_star': False})
def test_filter_sql_with_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_without_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_with_limit(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_min(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 10;')
def test_filter_sql_with_limit_offset(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10 offset 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_nn(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_upper(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'SELECT USER FROM usertable LIMIT 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'SELECT USER FROM usertable limit 1;')
def test_filter_sql_not_select(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'show create table usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'show create table usertable;')
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='select 1', resultset=query_result)
self.assertIsInstance(masking_result, ResultSet)
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking_not_select(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='explain select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_select_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'select * from user'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_critical_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql_with_Exception(self, _inception_engine):
sql = 'update user set id=1'
_inception_engine.return_value.execute_check.side_effect = RuntimeError()
new_engine = MysqlEngine(instance=self.ins1)
with self.assertRaises(RuntimeError):
new_engine.execute_check(db_name=0, sql=sql)
@patch.object(MysqlEngine, 'query')
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_workflow(self, _inception_engine, _query):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
_inception_engine.return_value.execute.return_value = ReviewSet(full_sql=sql)
_query.return_value.rows = (('0',),)
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute_workflow(self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_execute(self, _connect, _cursor, _execute):
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute(self.wf)
self.assertIsInstance(execute_result, ResultSet)
@patch('MySQLdb.connect')
def test_server_version(self, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
server_version = new_engine.server_version
self.assertTupleEqual(server_version, (5, 7, 20))
@patch.object(MysqlEngine, 'query')
def test_get_variables_not_filter(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables()
_query.assert_called_once()
@patch('MySQLdb.connect')
@patch.object(MysqlEngine, 'query')
def test_get_variables_filter(self, _query, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables(variables=['binlog_format'])
_query.assert_called()
@patch.object(MysqlEngine, 'query')
def test_set_variable(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.set_variable('binlog_format', 'ROW')
_query.assert_called_once_with(sql="set global binlog_format=ROW;")
@patch('sql.engines.mysql.GoInceptionEngine')
def test_osc_go_inception(self, _inception_engine):
self.sys_config.set('inception', 'false')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.mysql.InceptionEngine')
def test_osc_inception(self, _inception_engine):
self.sys_config.set('inception', 'true')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch.object(MysqlEngine, 'query')
def test_kill_connection(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.kill_connection(100)
_query.assert_called_once_with(sql="kill 100")
@patch.object(MysqlEngine, 'query')
def test_seconds_behind_master(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.seconds_behind_master
_query.assert_called_once_with(sql="show slave status", close_conn=False,
cursorclass=MySQLdb.cursors.DictCursor)
class TestRedis(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='redis', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('redis.Redis')
def test_engine_base_info(self, _conn):
new_engine = RedisEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Redis')
self.assertEqual(new_engine.info, 'Redis engine')
@patch('redis.Redis')
def test_get_connection(self, _conn):
new_engine = RedisEngine(instance=self.ins)
new_engine.get_connection()
_conn.assert_called_once()
@patch('redis.Redis.execute_command', return_value=[1, 2, 3])
def test_query_return_list(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, ([1], [2], [3]))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_return_str(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_execute(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.config_get', return_value={"databases": 4})
def test_get_all_databases(self, _config_get):
new_engine = RedisEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['0', '1', '2', '3'])
def test_query_check_safe_cmd(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_query_check_danger_cmd(self):
safe_cmd = "keys *"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_filter_sql(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=safe_cmd, limit_num=100)
self.assertEqual(check_result, 'keys 1*')
def test_query_masking(self):
query_result = ResultSet()
new_engine = RedisEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check(self):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('redis.Redis.execute_command', return_value='text')
def test_execute_workflow_success(self, _execute_command):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = RedisEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestPgSQL(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='pgsql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
cls.sys_config = SysConfig()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
cls.sys_config.purge()
@patch('psycopg2.connect')
def test_engine_base_info(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'PgSQL')
self.assertEqual(new_engine.info, 'PgSQL engine')
@patch('psycopg2.connect')
def test_get_connection(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
new_engine.get_connection("some_dbname")
_conn.assert_called_once()
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=100, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=0, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_get_all_databases(self, query):
new_engine = PgSQLEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('information_schema',), ('archery',), ('pg_catalog',)]))
def test_get_all_schemas(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
schemas = new_engine.get_all_schemas(db_name='archery')
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery', schema_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2', schema_name='archery')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', schema_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1 "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
def test_query_check_star_sql(self):
sql = "select * from xx "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': 'SQL语句中含有 * ', 'bad_query': False, 'filtered_sql': sql.strip(), 'has_star': True})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result, "select * from xx limit 10;")
def test_query_masking(self):
query_result = ResultSet()
new_engine = PgSQLEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_normal_sql(self):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0, )
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
with self.assertRaises(AttributeError):
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestModel(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_result_set_rows_shadow(self):
# 测试默认值为空列表的坑
# 如果默认值是空列表,又使用的是累加的方法更新,会导致残留上次的列表
result_set1 = ResultSet()
for i in range(10):
result_set1.rows += [i]
brand_new_result_set = ResultSet()
self.assertEqual(brand_new_result_set.rows, [])
review_set1 = ReviewSet()
for i in range(10):
review_set1.rows += [i]
brand_new_review_set = ReviewSet()
self.assertEqual(brand_new_review_set.rows, [])
class TestInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='inception',
host='some_host', port=6669)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('MySQLdb.connect')
def test_get_backup_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_backup_connection()
_connect.assert_called_once()
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = InceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_query_print(self, _query):
sql = 'update user set id=100'
row = [1,
'select * from sql_instance limit 100',
0,
'{"command":"select","select_list":[{"type":"FIELD_ITEM","field":"*"}],"table_ref":[{"db":"archery","table":"sql_instance"}],"limit":{"limit":[{"type":"INT_ITEM","value":"100"}]}}',
'None']
column_list = ['ID', 'statement', 'errlevel', 'query_tree', 'errmsg']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
print_result = new_engine.query_print(self.ins, db_name=None, sql=sql)
self.assertDictEqual(print_result, json.loads(_repair_json_str(row[3])))
@patch('MySQLdb.connect')
def test_get_rollback_list(self, _connect):
self.wf.sqlworkflowcontent.execute_result = """[{
"id": 1,
"stage": "RERUN",
"errlevel": 0,
"stagestatus": "Execute Successfully",
"errormessage": "None",
"sql": "use archer_test",
"affected_rows": 0,
"sequence": "'1554135032_13038_0'",
"backup_dbname": "None",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 0
}, {
"id": 2,
"stage": "EXECUTED",
"errlevel": 0,
"stagestatus": "Execute Successfully Backup successfully",
"errormessage": "None",
"sql": "insert into tt1 (user_name)values('A'),('B'),('C')",
"affected_rows": 3,
"sequence": "'1554135032_13038_1'",
"backup_dbname": "mysql_3306_archer_test",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 3
}]"""
self.wf.sqlworkflowcontent.save()
new_engine = InceptionEngine()
new_engine.get_rollback(self.wf)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_get(self, _query):
new_engine = InceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = InceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_not_support(self, _query):
new_engine = InceptionEngine()
command = 'stop'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
with self.assertRaisesMessage(ValueError, 'pt-osc不支持暂停和恢复,需要停止执行请使用终止按钮!'):
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_set_variable(self, _query):
new_engine = InceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestGoInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql',
host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='goinception',
host='some_host', port=4000)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = GoInceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = GoInceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_get(self, _query):
new_engine = GoInceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_pause(self, _query):
new_engine = GoInceptionEngine()
command = 'pause'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_resume(self, _query):
new_engine = GoInceptionEngine()
command = 'resume'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = GoInceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception kill osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables like 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_set_variable(self, _query):
new_engine = GoInceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestOracle(TestCase):
"""Oracle 测试"""
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='oracle',
host='some_host', port=3306, user='ins_user', password='some_str',
sid='some_id')
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
self.sys_config = SysConfig()
def tearDown(self):
self.ins.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('cx_Oracle.makedsn')
@patch('cx_Oracle.connect')
def test_get_connection(self, _connect, _makedsn):
# 填写 sid 测试
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
# 填写 service_name 测试
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = 'some_service'
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
# 都不填写, 检测 ValueError
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = ''
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
with self.assertRaises(ValueError):
new_engine.get_connection()
@patch('cx_Oracle.connect')
def test_engine_base_info(self, _conn):
new_engine = OracleEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Oracle')
self.assertEqual(new_engine.info, 'Oracle engine')
_conn.return_value.version = '12.1.0.2.0'
self.assertTupleEqual(new_engine.server_version, ('12', '1', '0'))
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name='archery', sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test_get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test__get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_databases()
self.assertListEqual(dbs.rows, ['AUD_SYS', 'archery', 'ANONYMOUS'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',)]))
def test__get_all_instances(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_instances()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('ANONYMOUS',), ('archery',), ('SYSTEM',)]))
def test_get_all_schemas(self, _query):
new_engine = OracleEngine(instance=self.ins)
schemas = new_engine._get_all_schemas()
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = OracleEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = OracleEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = OracleEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持语法!', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_star_sql(self, _explain_check):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 * 关键词\n', 'bad_query': False, 'filtered_sql': sql.strip(';'),
'has_star': True})
def test_query_check_IndexError(self):
sql = ""
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '没有有效的SQL语句', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_plus(self, _explain_check):
sql = "select 100+1 from tb;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 + 关键词\n', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_delimiter_and_where(self):
sql = "select * from xx where id>1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx where id>1) sql_audit where rownum <= 100")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx limit 10) sql_audit where rownum <= 1")
def test_query_masking(self):
query_result = ResultSet()
new_engine = OracleEngine(instance=self.ins)
masking_result = new_engine.query_masking(schema_name='', sql='select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
@patch('sql.engines.oracle.OracleEngine.get_sql_first_object_name', return_value='tb')
@patch('sql.engines.oracle.OracleEngine.object_name_check', return_value=True)
def test_execute_check_normal_sql(self, _explain_check, _get_sql_first_object_name, _object_name_check):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=1,
stagestatus='当前平台,此语法不支持审核!',
errormessage='当前平台,此语法不支持审核!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'),
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
review_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='', )
execute_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql,
review_content=ReviewSet(rows=[review_row]).json())
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), execute_row.__dict__.keys())
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[row]).json())
with self.assertRaises(AttributeError):
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class MongoTest(TestCase):
def setUp(self) -> None:
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mongo',
host='some_host', port=3306, user='ins_user')
self.engine = MongoEngine(instance=self.ins)
def tearDown(self) -> None:
self.ins.delete()
@patch('sql.engines.mongo.pymongo')
def test_get_connection(self, mock_pymongo):
_ = self.engine.get_connection()
mock_pymongo.MongoClient.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_query(self, mock_get_connection):
# TODO 正常查询还没做
test_sql = """{"collection": "job","count": true}"""
self.assertIsInstance(self.engine.query('archery', test_sql), ResultSet)
def test_query_check(self):
test_sql = """{"collection": "job","count": true}"""
check_result = self.engine.query_check(sql=test_sql)
self.assertEqual(False, check_result.get('bad_query'))
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_databases(self, mock_get_connection):
db_list = self.engine.get_all_databases()
self.assertIsInstance(db_list, ResultSet)
# mock_get_connection.return_value.list_database_names.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_tables(self, mock_get_connection):
mock_db = Mock()
# 下面是查表示例返回结果
mock_db.list_collection_names.return_value = ['u', 'v', 'w']
mock_get_connection.return_value = {'some_db': mock_db}
table_list = self.engine.get_all_tables('some_db')
mock_db.list_collection_names.assert_called_once()
self.assertEqual(table_list.rows, ['u', 'v', 'w'])
| 45.425984 | 196 | 0.636545 | import MySQLdb
import json
from datetime import timedelta, datetime
from unittest.mock import patch, Mock, ANY
import sqlparse
from django.contrib.auth import get_user_model
from django.test import TestCase
from common.config import SysConfig
from sql.engines import EngineBase
from sql.engines.goinception import GoInceptionEngine
from sql.engines.models import ResultSet, ReviewSet, ReviewResult
from sql.engines.mssql import MssqlEngine
from sql.engines.mysql import MysqlEngine
from sql.engines.redis import RedisEngine
from sql.engines.pgsql import PgSQLEngine
from sql.engines.oracle import OracleEngine
from sql.engines.mongo import MongoEngine
from sql.engines.inception import InceptionEngine, _repair_json_str
from sql.models import Instance, SqlWorkflow, SqlWorkflowContent
User = get_user_model()
class TestReviewSet(TestCase):
def test_review_set(self):
new_review_set = ReviewSet()
new_review_set.rows = [{'id': '1679123'}]
self.assertIn('1679123', new_review_set.json())
class TestEngineBase(TestCase):
@classmethod
def setUpClass(cls):
cls.u1 = User(username='some_user', display='用户1')
cls.u1.save()
cls.ins1 = Instance(instance_name='some_ins', type='master', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.wf1 = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer=cls.u1.username,
engineer_display=cls.u1.display,
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
cls.wfc1 = SqlWorkflowContent.objects.create(
workflow=cls.wf1,
sql_content='some_sql',
execute_result=json.dumps([{
'id': 1,
'sql': 'some_content'
}]))
@classmethod
def tearDownClass(cls):
cls.wfc1.delete()
cls.wf1.delete()
cls.ins1.delete()
cls.u1.delete()
def test_init_with_ins(self):
engine = EngineBase(instance=self.ins1)
self.assertEqual(self.ins1.instance_name, engine.instance_name)
self.assertEqual(self.ins1.user, engine.user)
class TestMssql(TestCase):
@classmethod
def setUpClass(cls):
cls.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.engine = MssqlEngine(instance=cls.ins1)
cls.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=cls.wf, sql_content='insert into some_tb values (1)')
@classmethod
def tearDownClass(cls):
cls.ins1.delete()
cls.wf.delete()
SqlWorkflowContent.objects.all().delete()
@patch('sql.engines.mssql.pyodbc.connect')
def testGetConnection(self, connect):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('sql.engines.mssql.pyodbc.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MssqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MssqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MssqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MssqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MssqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MssqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MssqlEngine(instance=self.ins1)
banned_sql = 'select concat(phone,1) from user_table'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
banned_sql = 'select phone from user_table where phone=concat(phone,1)'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
sp_sql = "sp_helptext '[SomeName].[SomeAction]'"
check_result = new_engine.query_check(db_name='some_db', sql=sp_sql)
self.assertFalse(check_result.get('bad_query'))
self.assertEqual(check_result.get('filtered_sql'), sp_sql)
def test_filter_sql(self):
new_engine = MssqlEngine(instance=self.ins1)
banned_sql = 'select user from user_table'
check_result = new_engine.filter_sql(sql=banned_sql, limit_num=10)
self.assertEqual(check_result, "select top 10 user from user_table")
def test_execute_check(self):
new_engine = MssqlEngine(instance=self.ins1)
test_sql = 'use database\ngo\nsome sql1\nGO\nsome sql2\n\r\nGo\nsome sql3\n\r\ngO\n'
check_result = new_engine.execute_check(db_name=None, sql=test_sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[1].__dict__['sql'], "use database\n")
self.assertEqual(check_result.rows[2].__dict__['sql'], "\nsome sql1\n")
self.assertEqual(check_result.rows[4].__dict__['sql'], "\nsome sql3\n\r\n")
@patch('sql.engines.mssql.MssqlEngine.execute')
def test_execute_workflow(self, mock_execute):
mock_execute.return_value.error = None
new_engine = MssqlEngine(instance=self.ins1)
new_engine.execute_workflow(self.wf)
mock_execute.assert_called()
self.assertEqual(1, mock_execute.call_count)
@patch('sql.engines.mssql.MssqlEngine.get_connection')
def test_execute(self, mock_connect):
mock_cursor = Mock()
mock_connect.return_value.cursor = mock_cursor
new_engine = MssqlEngine(instance=self.ins1)
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIsNone(execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.execute.assert_called()
mock_cursor.return_value.commit.assert_called()
mock_cursor.reset_mock()
mock_cursor.return_value.execute.side_effect = Exception('Boom! some exception!')
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIn('Boom! some exception!', execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.commit.assert_not_called()
mock_cursor.return_value.rollback.assert_called()
class TestMysql(TestCase):
def setUp(self):
self.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=1366, user='ins_user', password='some_str')
self.ins1.save()
self.sys_config = SysConfig()
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins1.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_engine_base_info(self, _conn):
new_engine = MysqlEngine(instance=self.ins1)
self.assertEqual(new_engine.name, 'MySQL')
self.assertEqual(new_engine.info, 'MySQL engine')
@patch('MySQLdb.connect')
def testGetConnection(self, connect):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('MySQLdb.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MysqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(size=100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MysqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MysqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MysqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MysqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MysqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = '-- 测试\n select user from usertable'
check_result = new_engine.query_check(db_name='some_db', sql=sql_without_limit)
self.assertEqual(check_result['filtered_sql'], 'select user from usertable')
def test_query_check_wrong_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
wrong_sql = '-- 测试'
check_result = new_engine.query_check(db_name='some_db', sql=wrong_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': '-- 测试', 'has_star': False})
def test_query_check_update_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
update_sql = 'update user set id=0'
check_result = new_engine.query_check(db_name='some_db', sql=update_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': 'update user set id=0',
'has_star': False})
def test_filter_sql_with_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_without_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_with_limit(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_min(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 10;')
def test_filter_sql_with_limit_offset(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10 offset 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_nn(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_upper(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'SELECT USER FROM usertable LIMIT 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'SELECT USER FROM usertable limit 1;')
def test_filter_sql_not_select(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'show create table usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'show create table usertable;')
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='select 1', resultset=query_result)
self.assertIsInstance(masking_result, ResultSet)
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking_not_select(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='explain select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_select_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'select * from user'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_critical_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql_with_Exception(self, _inception_engine):
sql = 'update user set id=1'
_inception_engine.return_value.execute_check.side_effect = RuntimeError()
new_engine = MysqlEngine(instance=self.ins1)
with self.assertRaises(RuntimeError):
new_engine.execute_check(db_name=0, sql=sql)
@patch.object(MysqlEngine, 'query')
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_workflow(self, _inception_engine, _query):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
_inception_engine.return_value.execute.return_value = ReviewSet(full_sql=sql)
_query.return_value.rows = (('0',),)
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute_workflow(self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_execute(self, _connect, _cursor, _execute):
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute(self.wf)
self.assertIsInstance(execute_result, ResultSet)
@patch('MySQLdb.connect')
def test_server_version(self, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
server_version = new_engine.server_version
self.assertTupleEqual(server_version, (5, 7, 20))
@patch.object(MysqlEngine, 'query')
def test_get_variables_not_filter(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables()
_query.assert_called_once()
@patch('MySQLdb.connect')
@patch.object(MysqlEngine, 'query')
def test_get_variables_filter(self, _query, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables(variables=['binlog_format'])
_query.assert_called()
@patch.object(MysqlEngine, 'query')
def test_set_variable(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.set_variable('binlog_format', 'ROW')
_query.assert_called_once_with(sql="set global binlog_format=ROW;")
@patch('sql.engines.mysql.GoInceptionEngine')
def test_osc_go_inception(self, _inception_engine):
self.sys_config.set('inception', 'false')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.mysql.InceptionEngine')
def test_osc_inception(self, _inception_engine):
self.sys_config.set('inception', 'true')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch.object(MysqlEngine, 'query')
def test_kill_connection(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.kill_connection(100)
_query.assert_called_once_with(sql="kill 100")
@patch.object(MysqlEngine, 'query')
def test_seconds_behind_master(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.seconds_behind_master
_query.assert_called_once_with(sql="show slave status", close_conn=False,
cursorclass=MySQLdb.cursors.DictCursor)
class TestRedis(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='redis', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('redis.Redis')
def test_engine_base_info(self, _conn):
new_engine = RedisEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Redis')
self.assertEqual(new_engine.info, 'Redis engine')
@patch('redis.Redis')
def test_get_connection(self, _conn):
new_engine = RedisEngine(instance=self.ins)
new_engine.get_connection()
_conn.assert_called_once()
@patch('redis.Redis.execute_command', return_value=[1, 2, 3])
def test_query_return_list(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, ([1], [2], [3]))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_return_str(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_execute(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.config_get', return_value={"databases": 4})
def test_get_all_databases(self, _config_get):
new_engine = RedisEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['0', '1', '2', '3'])
def test_query_check_safe_cmd(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_query_check_danger_cmd(self):
safe_cmd = "keys *"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_filter_sql(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=safe_cmd, limit_num=100)
self.assertEqual(check_result, 'keys 1*')
def test_query_masking(self):
query_result = ResultSet()
new_engine = RedisEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check(self):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('redis.Redis.execute_command', return_value='text')
def test_execute_workflow_success(self, _execute_command):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = RedisEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestPgSQL(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='pgsql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
cls.sys_config = SysConfig()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
cls.sys_config.purge()
@patch('psycopg2.connect')
def test_engine_base_info(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'PgSQL')
self.assertEqual(new_engine.info, 'PgSQL engine')
@patch('psycopg2.connect')
def test_get_connection(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
new_engine.get_connection("some_dbname")
_conn.assert_called_once()
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=100, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=0, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_get_all_databases(self, query):
new_engine = PgSQLEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('information_schema',), ('archery',), ('pg_catalog',)]))
def test_get_all_schemas(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
schemas = new_engine.get_all_schemas(db_name='archery')
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery', schema_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2', schema_name='archery')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', schema_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1 "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
def test_query_check_star_sql(self):
sql = "select * from xx "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': 'SQL语句中含有 * ', 'bad_query': False, 'filtered_sql': sql.strip(), 'has_star': True})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result, "select * from xx limit 10;")
def test_query_masking(self):
query_result = ResultSet()
new_engine = PgSQLEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_normal_sql(self):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0, )
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
with self.assertRaises(AttributeError):
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestModel(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_result_set_rows_shadow(self):
result_set1 = ResultSet()
for i in range(10):
result_set1.rows += [i]
brand_new_result_set = ResultSet()
self.assertEqual(brand_new_result_set.rows, [])
review_set1 = ReviewSet()
for i in range(10):
review_set1.rows += [i]
brand_new_review_set = ReviewSet()
self.assertEqual(brand_new_review_set.rows, [])
class TestInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='inception',
host='some_host', port=6669)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('MySQLdb.connect')
def test_get_backup_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_backup_connection()
_connect.assert_called_once()
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = InceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_query_print(self, _query):
sql = 'update user set id=100'
row = [1,
'select * from sql_instance limit 100',
0,
'{"command":"select","select_list":[{"type":"FIELD_ITEM","field":"*"}],"table_ref":[{"db":"archery","table":"sql_instance"}],"limit":{"limit":[{"type":"INT_ITEM","value":"100"}]}}',
'None']
column_list = ['ID', 'statement', 'errlevel', 'query_tree', 'errmsg']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
print_result = new_engine.query_print(self.ins, db_name=None, sql=sql)
self.assertDictEqual(print_result, json.loads(_repair_json_str(row[3])))
@patch('MySQLdb.connect')
def test_get_rollback_list(self, _connect):
self.wf.sqlworkflowcontent.execute_result = """[{
"id": 1,
"stage": "RERUN",
"errlevel": 0,
"stagestatus": "Execute Successfully",
"errormessage": "None",
"sql": "use archer_test",
"affected_rows": 0,
"sequence": "'1554135032_13038_0'",
"backup_dbname": "None",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 0
}, {
"id": 2,
"stage": "EXECUTED",
"errlevel": 0,
"stagestatus": "Execute Successfully Backup successfully",
"errormessage": "None",
"sql": "insert into tt1 (user_name)values('A'),('B'),('C')",
"affected_rows": 3,
"sequence": "'1554135032_13038_1'",
"backup_dbname": "mysql_3306_archer_test",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 3
}]"""
self.wf.sqlworkflowcontent.save()
new_engine = InceptionEngine()
new_engine.get_rollback(self.wf)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_get(self, _query):
new_engine = InceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = InceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_not_support(self, _query):
new_engine = InceptionEngine()
command = 'stop'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
with self.assertRaisesMessage(ValueError, 'pt-osc不支持暂停和恢复,需要停止执行请使用终止按钮!'):
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_set_variable(self, _query):
new_engine = InceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestGoInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql',
host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='goinception',
host='some_host', port=4000)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = GoInceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = GoInceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_get(self, _query):
new_engine = GoInceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_pause(self, _query):
new_engine = GoInceptionEngine()
command = 'pause'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_resume(self, _query):
new_engine = GoInceptionEngine()
command = 'resume'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = GoInceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception kill osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables like 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_set_variable(self, _query):
new_engine = GoInceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestOracle(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='oracle',
host='some_host', port=3306, user='ins_user', password='some_str',
sid='some_id')
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
self.sys_config = SysConfig()
def tearDown(self):
self.ins.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('cx_Oracle.makedsn')
@patch('cx_Oracle.connect')
def test_get_connection(self, _connect, _makedsn):
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = 'some_service'
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = ''
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
with self.assertRaises(ValueError):
new_engine.get_connection()
@patch('cx_Oracle.connect')
def test_engine_base_info(self, _conn):
new_engine = OracleEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Oracle')
self.assertEqual(new_engine.info, 'Oracle engine')
_conn.return_value.version = '12.1.0.2.0'
self.assertTupleEqual(new_engine.server_version, ('12', '1', '0'))
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name='archery', sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test_get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test__get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_databases()
self.assertListEqual(dbs.rows, ['AUD_SYS', 'archery', 'ANONYMOUS'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',)]))
def test__get_all_instances(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_instances()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('ANONYMOUS',), ('archery',), ('SYSTEM',)]))
def test_get_all_schemas(self, _query):
new_engine = OracleEngine(instance=self.ins)
schemas = new_engine._get_all_schemas()
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = OracleEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = OracleEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = OracleEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持语法!', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_star_sql(self, _explain_check):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 * 关键词\n', 'bad_query': False, 'filtered_sql': sql.strip(';'),
'has_star': True})
def test_query_check_IndexError(self):
sql = ""
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '没有有效的SQL语句', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_plus(self, _explain_check):
sql = "select 100+1 from tb;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 + 关键词\n', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_delimiter_and_where(self):
sql = "select * from xx where id>1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx where id>1) sql_audit where rownum <= 100")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx limit 10) sql_audit where rownum <= 1")
def test_query_masking(self):
query_result = ResultSet()
new_engine = OracleEngine(instance=self.ins)
masking_result = new_engine.query_masking(schema_name='', sql='select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
@patch('sql.engines.oracle.OracleEngine.get_sql_first_object_name', return_value='tb')
@patch('sql.engines.oracle.OracleEngine.object_name_check', return_value=True)
def test_execute_check_normal_sql(self, _explain_check, _get_sql_first_object_name, _object_name_check):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=1,
stagestatus='当前平台,此语法不支持审核!',
errormessage='当前平台,此语法不支持审核!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'),
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
review_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='', )
execute_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql,
review_content=ReviewSet(rows=[review_row]).json())
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), execute_row.__dict__.keys())
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[row]).json())
with self.assertRaises(AttributeError):
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class MongoTest(TestCase):
def setUp(self) -> None:
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mongo',
host='some_host', port=3306, user='ins_user')
self.engine = MongoEngine(instance=self.ins)
def tearDown(self) -> None:
self.ins.delete()
@patch('sql.engines.mongo.pymongo')
def test_get_connection(self, mock_pymongo):
_ = self.engine.get_connection()
mock_pymongo.MongoClient.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_query(self, mock_get_connection):
test_sql = """{"collection": "job","count": true}"""
self.assertIsInstance(self.engine.query('archery', test_sql), ResultSet)
def test_query_check(self):
test_sql = """{"collection": "job","count": true}"""
check_result = self.engine.query_check(sql=test_sql)
self.assertEqual(False, check_result.get('bad_query'))
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_databases(self, mock_get_connection):
db_list = self.engine.get_all_databases()
self.assertIsInstance(db_list, ResultSet)
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_tables(self, mock_get_connection):
mock_db = Mock()
mock_db.list_collection_names.return_value = ['u', 'v', 'w']
mock_get_connection.return_value = {'some_db': mock_db}
table_list = self.engine.get_all_tables('some_db')
mock_db.list_collection_names.assert_called_once()
self.assertEqual(table_list.rows, ['u', 'v', 'w'])
| true | true |
f72526b6ad715d5466f1db2a8ab71d035fe309e3 | 567 | py | Python | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | """
作者:邓经纬
功能:BMR 计算器
版本:1.0
日期:26/10/2018
"""
def main():
"""
主函数
"""
# 性别
gender = '男'
# 体重
weight = 70
# 身高(cm)
height = 175
# 年龄
age = 25
if gender == '男':
# 男性
bmr = (13.7 * weight) + (5.0 * height) - (6.8 * age) + 66
elif gender == '女':
# 女性
bmr = (9.6 * weight) + (1.8 * height) - (4.7 * age) +655
else:
bmr = -1
if bmr != -1:
print('基础代谢率(大卡)', bmr)
else:
print('暂时不支持该性别。')
if __name__ == '__main__':
main() | 14.538462 | 65 | 0.391534 |
def main():
gender = '男'
weight = 70
height = 175
age = 25
if gender == '男':
bmr = (13.7 * weight) + (5.0 * height) - (6.8 * age) + 66
elif gender == '女':
bmr = (9.6 * weight) + (1.8 * height) - (4.7 * age) +655
else:
bmr = -1
if bmr != -1:
print('基础代谢率(大卡)', bmr)
else:
print('暂时不支持该性别。')
if __name__ == '__main__':
main() | true | true |
f725288fe158ce24e1007dc29528d22db5be4be5 | 1,248 | py | Python | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | null | null | null | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | 3 | 2021-09-17T06:42:03.000Z | 2021-09-17T06:42:04.000Z | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | 1 | 2020-06-24T15:17:59.000Z | 2020-06-24T15:17:59.000Z | from unittest import TestCase
from justmltools.config.bucket_data_path_config import BucketDataPathConfig
PREFIX = "my_bucket_key_prefix"
class TestBucketDataPathConfig(TestCase):
def setUp(self) -> None:
self.sut: BucketDataPathConfig = BucketDataPathConfig(prefix=PREFIX)
def test_get_prefix(self):
self.assertEqual(PREFIX, self.sut.get_prefix())
def test_get_input_config_path(self):
self.assertEqual(f"{PREFIX}/input/config", self.sut.get_input_config_path())
def test_get_input_data_path(self):
self.assertEqual(f"{PREFIX}/input/data", self.sut.get_input_data_path())
def test_get_model_path(self):
self.assertEqual(f"{PREFIX}/model", self.sut.get_model_path())
def test_get_output_path(self):
self.assertEqual(f"{PREFIX}/output", self.sut.get_output_path())
def test_join_one_more_level(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), "my_file")
self.assertEqual(f"{PREFIX}/output/my_file", joined_path)
def test_join_two_more_levels(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), ["my_sub_dir", "my_file"])
self.assertEqual(f"{PREFIX}/output/my_sub_dir/my_file", joined_path)
| 35.657143 | 95 | 0.729167 | from unittest import TestCase
from justmltools.config.bucket_data_path_config import BucketDataPathConfig
PREFIX = "my_bucket_key_prefix"
class TestBucketDataPathConfig(TestCase):
def setUp(self) -> None:
self.sut: BucketDataPathConfig = BucketDataPathConfig(prefix=PREFIX)
def test_get_prefix(self):
self.assertEqual(PREFIX, self.sut.get_prefix())
def test_get_input_config_path(self):
self.assertEqual(f"{PREFIX}/input/config", self.sut.get_input_config_path())
def test_get_input_data_path(self):
self.assertEqual(f"{PREFIX}/input/data", self.sut.get_input_data_path())
def test_get_model_path(self):
self.assertEqual(f"{PREFIX}/model", self.sut.get_model_path())
def test_get_output_path(self):
self.assertEqual(f"{PREFIX}/output", self.sut.get_output_path())
def test_join_one_more_level(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), "my_file")
self.assertEqual(f"{PREFIX}/output/my_file", joined_path)
def test_join_two_more_levels(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), ["my_sub_dir", "my_file"])
self.assertEqual(f"{PREFIX}/output/my_sub_dir/my_file", joined_path)
| true | true |
f72528dd8a067e6d5e70862cf8082bf8332d66e0 | 572 | py | Python | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | 8 | 2017-02-10T07:24:43.000Z | 2019-06-03T07:45:29.000Z | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | 3 | 2017-02-20T10:24:20.000Z | 2017-08-15T04:54:36.000Z | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | null | null | null | from hook.base import BaseHook
from task.gspread.tasks import send2ws
class MovieformHook(BaseHook):
def main(self) -> int:
channel = self.item.get("channel")
if channel != "movieform":
return 0
data = self.item.get("data")
dt = self.item.get("dt")
count = 1
values = (dt,
data.get("cname"),
data.get("uid"),
data.get("url"),
)
send2ws.delay(data=values,
title_prefix="movie_")
return count
| 22 | 44 | 0.48951 | from hook.base import BaseHook
from task.gspread.tasks import send2ws
class MovieformHook(BaseHook):
def main(self) -> int:
channel = self.item.get("channel")
if channel != "movieform":
return 0
data = self.item.get("data")
dt = self.item.get("dt")
count = 1
values = (dt,
data.get("cname"),
data.get("uid"),
data.get("url"),
)
send2ws.delay(data=values,
title_prefix="movie_")
return count
| true | true |
f72528ea404b5476c1151c85d0a3295f31a0e405 | 6,337 | py | Python | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 28 19:58:07 2020
@author: kb1p
"""
import sys
import PyQt5.QtCore as Core
import PyQt5.QtWidgets as Gui
import PyQt5.QtGui as GuiMisc
import data_models
import dialogs
import json
class MainWindow(Gui.QMainWindow):
__slots__ = "tvStructure", "tblProps", "mdlStructure", "mdlProps", "currentFile", "config", \
"splitter", "editorDlg"
def __init__(self, p = None):
Gui.QMainWindow.__init__(self, parent = p)
# Work area
self.tvStructure = Gui.QTreeView(self)
self.tvStructure.setHeaderHidden(True)
self.tvStructure.setSelectionMode(Gui.QAbstractItemView.SingleSelection)
self.tblProps = Gui.QTableView(self)
self.splitter = Gui.QSplitter(self)
self.splitter.addWidget(self.tvStructure)
self.splitter.addWidget(self.tblProps)
self.setCentralWidget(self.splitter)
# Menu
mnuBar = Gui.QMenuBar(self)
mnuFile = mnuBar.addMenu("File")
mnuFile.addAction("Open", self.openScene, GuiMisc.QKeySequence("Ctrl+O"))
mnuFile.addAction("Save as...", self.saveSceneAs, GuiMisc.QKeySequence("Ctrl+S"))
mnuFile.addSeparator()
mnuFile.addAction("Exit", self.close)
mnuElem = mnuBar.addMenu("Element")
# mnuElem.addAction("Add sub-element", self.addElement, GuiMisc.QKeySequence("Ctrl+A"))
mnuElem.addAction("Edit JSON code", self.editElement, GuiMisc.QKeySequence("Ctrl+E"))
mnuElem.addAction("Remove", self.removeElement, GuiMisc.QKeySequence("Ctrl+R"))
self.setMenuBar(mnuBar)
self.mdlStructure = data_models.JSONTreeModel(self)
self.tvStructure.setModel(self.mdlStructure)
self.mdlProps = data_models.JSONPropertiesModel(self)
self.tblProps.setModel(self.mdlProps)
self.tvStructure.selectionModel().currentChanged.connect(self.showElement)
self.setCurrentFile(None)
self.statusBar().showMessage("No selection")
self.resize(500, 450)
self.config = Core.QSettings("kb1p", "json-inspector")
k = self.config.value("main/geometry")
if k != None:
self.restoreGeometry(k)
k = self.config.value("main/state")
if k != None:
self.restoreState(k)
k = self.config.value("splitter/state")
if k != None:
self.splitter.restoreState(k)
self.editorDlg = dialogs.EditorDialog(self, self.config)
def showElement(self, index, prevIndex):
self.mdlProps.displayElement(index)
assert self.mdlProps.selection != None
self.statusBar().showMessage(self.mdlProps.selection.fullPath())
def editElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Element is not selected")
elm = idx.internalPointer()
jsIn = data_models.serializeTree(elm)
strIn = json.dumps(jsIn, indent = 4, separators = (",", ": "), sort_keys = True)
strOut = self.editorDlg.requestText(elm.fullPath(), strIn)
if strOut != strIn:
jsOut = json.loads(strOut)
self.mdlStructure.layoutAboutToBeChanged.emit()
data_models.rebuildTree(jsOut, elm)
self.mdlStructure.layoutChanged.emit()
self.mdlProps.displayElement(idx)
except json.JSONDecodeError as err:
line = err.doc.splitlines()[err.lineno - 1]
Gui.QMessageBox.critical(self, \
"JSON syntax error", \
"Illegal JSON syntax: %s.\nMalformed line:\n%s" % \
(err.msg, line))
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def removeElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Illegal element selected")
if not idx.parent().isValid():
raise RuntimeError("Cannot remove root element")
name = str(idx.data())
if Gui.QMessageBox.question(self, \
"Confirmation required", \
"Are you sure want to remove element %s?" % name) == Gui.QMessageBox.Yes:
parIdx = idx.parent()
self.mdlStructure.removeRow(idx.row(), parIdx)
self.tvStructure.selectionModel().setCurrentIndex(parIdx, Core.QItemSelectionModel.Current)
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def setCurrentFile(self, fn):
self.currentFile = fn
t = self.currentFile if self.currentFile != None else "<no data>"
self.window().setWindowTitle("JSON inspector: %s" % t)
def openScene(self):
fn, _ = Gui.QFileDialog.getOpenFileName(self, "Select input file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "r") as fin:
d = json.load(fin)
self.mdlStructure.loadData(d)
self.mdlProps.displayElement(None)
self.setCurrentFile(fn)
def saveSceneAs(self):
if self.currentFile == None:
Gui.QMessageBox.warning(self, "Warning", "No data was loaded - nothing to save")
return
fn, _ = Gui.QFileDialog.getSaveFileName(self, "Select output file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "w") as fout:
d = self.mdlStructure.getData()
json.dump(d, fout, indent = 4, separators = (",", ": "), sort_keys = True)
self.setCurrentFile(fn)
def closeEvent(self, evt):
self.config.setValue("main/geometry", self.saveGeometry())
self.config.setValue("main/state", self.saveState())
self.config.setValue("splitter/state", self.splitter.saveState())
Gui.QMainWindow.closeEvent(self, evt)
def main(args):
app = Gui.QApplication(sys.argv)
win = MainWindow()
win.show()
return app.exec_()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 40.107595 | 114 | 0.603125 |
import sys
import PyQt5.QtCore as Core
import PyQt5.QtWidgets as Gui
import PyQt5.QtGui as GuiMisc
import data_models
import dialogs
import json
class MainWindow(Gui.QMainWindow):
__slots__ = "tvStructure", "tblProps", "mdlStructure", "mdlProps", "currentFile", "config", \
"splitter", "editorDlg"
def __init__(self, p = None):
Gui.QMainWindow.__init__(self, parent = p)
self.tvStructure = Gui.QTreeView(self)
self.tvStructure.setHeaderHidden(True)
self.tvStructure.setSelectionMode(Gui.QAbstractItemView.SingleSelection)
self.tblProps = Gui.QTableView(self)
self.splitter = Gui.QSplitter(self)
self.splitter.addWidget(self.tvStructure)
self.splitter.addWidget(self.tblProps)
self.setCentralWidget(self.splitter)
mnuBar = Gui.QMenuBar(self)
mnuFile = mnuBar.addMenu("File")
mnuFile.addAction("Open", self.openScene, GuiMisc.QKeySequence("Ctrl+O"))
mnuFile.addAction("Save as...", self.saveSceneAs, GuiMisc.QKeySequence("Ctrl+S"))
mnuFile.addSeparator()
mnuFile.addAction("Exit", self.close)
mnuElem = mnuBar.addMenu("Element")
mnuElem.addAction("Edit JSON code", self.editElement, GuiMisc.QKeySequence("Ctrl+E"))
mnuElem.addAction("Remove", self.removeElement, GuiMisc.QKeySequence("Ctrl+R"))
self.setMenuBar(mnuBar)
self.mdlStructure = data_models.JSONTreeModel(self)
self.tvStructure.setModel(self.mdlStructure)
self.mdlProps = data_models.JSONPropertiesModel(self)
self.tblProps.setModel(self.mdlProps)
self.tvStructure.selectionModel().currentChanged.connect(self.showElement)
self.setCurrentFile(None)
self.statusBar().showMessage("No selection")
self.resize(500, 450)
self.config = Core.QSettings("kb1p", "json-inspector")
k = self.config.value("main/geometry")
if k != None:
self.restoreGeometry(k)
k = self.config.value("main/state")
if k != None:
self.restoreState(k)
k = self.config.value("splitter/state")
if k != None:
self.splitter.restoreState(k)
self.editorDlg = dialogs.EditorDialog(self, self.config)
def showElement(self, index, prevIndex):
self.mdlProps.displayElement(index)
assert self.mdlProps.selection != None
self.statusBar().showMessage(self.mdlProps.selection.fullPath())
def editElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Element is not selected")
elm = idx.internalPointer()
jsIn = data_models.serializeTree(elm)
strIn = json.dumps(jsIn, indent = 4, separators = (",", ": "), sort_keys = True)
strOut = self.editorDlg.requestText(elm.fullPath(), strIn)
if strOut != strIn:
jsOut = json.loads(strOut)
self.mdlStructure.layoutAboutToBeChanged.emit()
data_models.rebuildTree(jsOut, elm)
self.mdlStructure.layoutChanged.emit()
self.mdlProps.displayElement(idx)
except json.JSONDecodeError as err:
line = err.doc.splitlines()[err.lineno - 1]
Gui.QMessageBox.critical(self, \
"JSON syntax error", \
"Illegal JSON syntax: %s.\nMalformed line:\n%s" % \
(err.msg, line))
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def removeElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Illegal element selected")
if not idx.parent().isValid():
raise RuntimeError("Cannot remove root element")
name = str(idx.data())
if Gui.QMessageBox.question(self, \
"Confirmation required", \
"Are you sure want to remove element %s?" % name) == Gui.QMessageBox.Yes:
parIdx = idx.parent()
self.mdlStructure.removeRow(idx.row(), parIdx)
self.tvStructure.selectionModel().setCurrentIndex(parIdx, Core.QItemSelectionModel.Current)
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def setCurrentFile(self, fn):
self.currentFile = fn
t = self.currentFile if self.currentFile != None else "<no data>"
self.window().setWindowTitle("JSON inspector: %s" % t)
def openScene(self):
fn, _ = Gui.QFileDialog.getOpenFileName(self, "Select input file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "r") as fin:
d = json.load(fin)
self.mdlStructure.loadData(d)
self.mdlProps.displayElement(None)
self.setCurrentFile(fn)
def saveSceneAs(self):
if self.currentFile == None:
Gui.QMessageBox.warning(self, "Warning", "No data was loaded - nothing to save")
return
fn, _ = Gui.QFileDialog.getSaveFileName(self, "Select output file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "w") as fout:
d = self.mdlStructure.getData()
json.dump(d, fout, indent = 4, separators = (",", ": "), sort_keys = True)
self.setCurrentFile(fn)
def closeEvent(self, evt):
self.config.setValue("main/geometry", self.saveGeometry())
self.config.setValue("main/state", self.saveState())
self.config.setValue("splitter/state", self.splitter.saveState())
Gui.QMainWindow.closeEvent(self, evt)
def main(args):
app = Gui.QApplication(sys.argv)
win = MainWindow()
win.show()
return app.exec_()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| true | true |
f7252950efe183c9396feb149b40fe1dc37cdd64 | 10,665 | py | Python | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | """
"Rel objects" for related fields.
"Rel objects" (for lack of a better name) carry information about the relation
modeled by a related field and provide some utility functions. They're stored
in the ``remote_field`` attribute of the field.
They also act as reverse fields for the purposes of the Meta API because
they're the closest concept currently available.
"""
from django.core import exceptions
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
from .mixins import FieldCacheMixin
class ForeignObjectRel(FieldCacheMixin):
"""
Used by ForeignObject to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
# Field flags
auto_created = True
concrete = False
editable = False
is_relation = True
# Reverse relations are always nullable (Django can't enforce that a
# foreign key on the related model points to this model).
null = True
empty_strings_allowed = False
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# __init__ as the field doesn't have its model yet. Calling these methods
# before field.contribute_to_class() has been called will result in
# AttributeError
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
"""
When filtering against this relation, return the field on the remote
model against which the filtering should happen.
"""
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"Can't use target_field for multicolumn relations."
)
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called."
)
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return "<%s: %s.%s>" % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
Analog of django.db.models.fields.Field.get_choices(), provided
initially for utilization by RelatedFieldListFilter.
"""
limit_choices_to = limit_choices_to or self.limit_choices_to
qs = self.related_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [(x.pk, str(x)) for x in qs]
def is_hidden(self):
"""Should the related object be hidden?"""
return bool(self.related_name) and self.related_name[-1] == "+"
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
"""
Set the related field's name, this is not available until later stages
of app loading, so set_field_name is called from
set_attributes_from_rel()
"""
# By default foreign object doesn't relate to any remote field (for
# example custom multicolumn joins currently have no remote field).
self.field_name = None
def get_accessor_name(self, model=None):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lowercased object_name + "_set",
# but this can be overridden with the "related_name" option. Due to
# backwards compatibility ModelForms need to be able to provide an
# alternate model. See BaseInlineFormSet.get_default_prefix().
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
return opts.model_name + ("_set" if self.multiple else "")
def get_path_info(self, filtered_relation=None):
return self.field.get_reverse_path_info(filtered_relation)
def get_cache_name(self):
"""
Return the name of the cache key to use for storing an instance of the
forward model on the reverse model.
"""
return self.get_accessor_name()
class ManyToOneRel(ForeignObjectRel):
"""
Used by the ForeignKey field to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
Note: Because we somewhat abuse the Rel objects by using them as reverse
fields we get the funny situation where
``ManyToOneRel.many_to_one == False`` and
``ManyToOneRel.one_to_many == True``. This is unfortunate but the actual
ManyToOneRel class is a private API and there is work underway to turn
reverse relations into actual fields.
"""
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop("related_model", None)
return state
def get_related_field(self):
"""
Return the Field in the 'to' object to which this relationship is tied.
"""
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist(
"No related field named '%s'" % self.field_name
)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
"""
Used by OneToOneField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
"""
Used by ManyToManyField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=True,
through=None,
through_fields=None,
db_constraint=True,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
"""
Return the field in the 'to' object to which this relationship is tied.
Provided for symmetry with ManyToOneRel.
"""
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, "remote_field", None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
| 31.27566 | 104 | 0.644069 |
from django.core import exceptions
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
from .mixins import FieldCacheMixin
class ForeignObjectRel(FieldCacheMixin):
auto_created = True
concrete = False
editable = False
is_relation = True
# foreign key on the related model points to this model).
null = True
empty_strings_allowed = False
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# before field.contribute_to_class() has been called will result in
# AttributeError
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"Can't use target_field for multicolumn relations."
)
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called."
)
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return "<%s: %s.%s>" % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
limit_choices_to = limit_choices_to or self.limit_choices_to
qs = self.related_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [(x.pk, str(x)) for x in qs]
def is_hidden(self):
return bool(self.related_name) and self.related_name[-1] == "+"
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
# By default foreign object doesn't relate to any remote field (for
self.field_name = None
def get_accessor_name(self, model=None):
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
return opts.model_name + ("_set" if self.multiple else "")
def get_path_info(self, filtered_relation=None):
return self.field.get_reverse_path_info(filtered_relation)
def get_cache_name(self):
return self.get_accessor_name()
class ManyToOneRel(ForeignObjectRel):
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop("related_model", None)
return state
def get_related_field(self):
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist(
"No related field named '%s'" % self.field_name
)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=True,
through=None,
through_fields=None,
db_constraint=True,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, "remote_field", None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
| true | true |
f7252ab67d2d7b09f44add6e1853b7582a5b415f | 1,646 | py | Python | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | from torch.nn.modules.module import Module
from torch.autograd import Function, Variable
import resample2d_cuda
class Resample2dFunction(Function):
@staticmethod
def forward(ctx, input1, input2, kernel_size=1, bilinear= True):
assert input1.is_contiguous()
assert input2.is_contiguous()
ctx.save_for_backward(input1, input2)
ctx.kernel_size = kernel_size
ctx.bilinear = bilinear
_, d, _, _ = input1.size()
b, _, h, w = input2.size()
output = input1.new(b, d, h, w).zero_()
resample2d_cuda.forward(input1, input2, output, kernel_size, bilinear)
return output
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.contiguous()
assert grad_output.is_contiguous()
input1, input2 = ctx.saved_tensors
grad_input1 = Variable(input1.new(input1.size()).zero_())
grad_input2 = Variable(input1.new(input2.size()).zero_())
resample2d_cuda.backward(input1, input2, grad_output.data,
grad_input1.data, grad_input2.data,
ctx.kernel_size, ctx.bilinear)
return grad_input1, grad_input2, None, None
class Resample2d(Module):
def __init__(self, kernel_size=1, bilinear = True):
super(Resample2d, self).__init__()
self.kernel_size = kernel_size
self.bilinear = bilinear
def forward(self, input1, input2):
input1_c = input1.contiguous()
return Resample2dFunction.apply(input1_c, input2, self.kernel_size, self.bilinear)
| 32.92 | 91 | 0.634265 | from torch.nn.modules.module import Module
from torch.autograd import Function, Variable
import resample2d_cuda
class Resample2dFunction(Function):
@staticmethod
def forward(ctx, input1, input2, kernel_size=1, bilinear= True):
assert input1.is_contiguous()
assert input2.is_contiguous()
ctx.save_for_backward(input1, input2)
ctx.kernel_size = kernel_size
ctx.bilinear = bilinear
_, d, _, _ = input1.size()
b, _, h, w = input2.size()
output = input1.new(b, d, h, w).zero_()
resample2d_cuda.forward(input1, input2, output, kernel_size, bilinear)
return output
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.contiguous()
assert grad_output.is_contiguous()
input1, input2 = ctx.saved_tensors
grad_input1 = Variable(input1.new(input1.size()).zero_())
grad_input2 = Variable(input1.new(input2.size()).zero_())
resample2d_cuda.backward(input1, input2, grad_output.data,
grad_input1.data, grad_input2.data,
ctx.kernel_size, ctx.bilinear)
return grad_input1, grad_input2, None, None
class Resample2d(Module):
def __init__(self, kernel_size=1, bilinear = True):
super(Resample2d, self).__init__()
self.kernel_size = kernel_size
self.bilinear = bilinear
def forward(self, input1, input2):
input1_c = input1.contiguous()
return Resample2dFunction.apply(input1_c, input2, self.kernel_size, self.bilinear)
| true | true |
f7252c82c5f17709b13320a16f6f349e51df4253 | 6,134 | py | Python | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | 1 | 2021-01-24T17:42:42.000Z | 2021-01-24T17:42:42.000Z | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | null | null | null | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | null | null | null | from datetime import datetime
from typing import Optional, Union
from discord.embeds import Embed
from discord.ext.commands import Cog, Context, group, has_permissions
from discord.member import Member
from discord.role import Role
from colors import Colors
from log import log
from permission import update_user_permission, list_user_permissions, get_user_permissions, has_own_permission, \
get_role_permissions, update_role_permission, list_role_permissions
from translation import get_user_language
class Permissions(Cog):
def __init__(self, bot):
self.bot = bot
@group(aliases=["permission"])
@has_permissions(administrator=True)
async def permissions(self, ctx: Context, mention: Union[Member, Role], permission: Optional[str] = "",
enabled: Optional[int] = -1):
if isinstance(mention, Member):
await self.member(ctx, mention, permission, enabled)
elif isinstance(mention, Role):
await self.role(ctx, mention, permission, enabled)
async def member(self, ctx: Context, member: Member, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for(str(member)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, str(member)),
value=lang.enabled + f": `{perm}`")
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
update_user_permission(member, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(member)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
embed.set_footer(text=lang.member_id + ": " + str(member.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
async def role(self, ctx: Context, role: Role, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for("@" + str(role)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, "@" + str(role)),
value=lang.enabled + f": `{perm}`")
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
update_role_permission(role, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(role)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
embed.set_footer(text=lang.role_id + ": " + str(role.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
def setup(bot):
bot.add_cog(Permissions(bot))
| 51.546218 | 118 | 0.584773 | from datetime import datetime
from typing import Optional, Union
from discord.embeds import Embed
from discord.ext.commands import Cog, Context, group, has_permissions
from discord.member import Member
from discord.role import Role
from colors import Colors
from log import log
from permission import update_user_permission, list_user_permissions, get_user_permissions, has_own_permission, \
get_role_permissions, update_role_permission, list_role_permissions
from translation import get_user_language
class Permissions(Cog):
def __init__(self, bot):
self.bot = bot
@group(aliases=["permission"])
@has_permissions(administrator=True)
async def permissions(self, ctx: Context, mention: Union[Member, Role], permission: Optional[str] = "",
enabled: Optional[int] = -1):
if isinstance(mention, Member):
await self.member(ctx, mention, permission, enabled)
elif isinstance(mention, Role):
await self.role(ctx, mention, permission, enabled)
async def member(self, ctx: Context, member: Member, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for(str(member)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, str(member)),
value=lang.enabled + f": `{perm}`")
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
update_user_permission(member, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(member)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
embed.set_footer(text=lang.member_id + ": " + str(member.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
async def role(self, ctx: Context, role: Role, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for("@" + str(role)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, "@" + str(role)),
value=lang.enabled + f": `{perm}`")
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
update_role_permission(role, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(role)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
embed.set_footer(text=lang.role_id + ": " + str(role.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
def setup(bot):
bot.add_cog(Permissions(bot))
| true | true |
f7252f66bf5fa79b977b4129cd0bfdf672edf213 | 4,156 | py | Python | dkist/net/globus/tests/test_auth.py | DKISTDC/dkist | 3b97d7c0db144a717cfbe648b7402b8b8f9f2da2 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 21 | 2018-05-18T13:43:59.000Z | 2022-03-16T21:17:39.000Z | dkist/net/globus/tests/test_auth.py | Cadair/dkist | 2f4d930ea0e002db40e8ef17a79b0b4fb2e6d3f3 | [
"BSD-3-Clause"
] | 134 | 2017-12-07T16:09:24.000Z | 2022-03-17T16:13:55.000Z | dkist/net/globus/tests/test_auth.py | Cadair/dkist | 2f4d930ea0e002db40e8ef17a79b0b4fb2e6d3f3 | [
"BSD-3-Clause"
] | 4 | 2017-12-04T10:49:49.000Z | 2022-01-10T12:20:46.000Z | import json
import stat
import pathlib
import platform
import globus_sdk
import requests
from dkist.net.globus.auth import (ensure_globus_authorized, get_cache_contents,
get_cache_file_path, get_refresh_token_authorizer,
save_auth_cache, start_local_server)
def test_http_server():
server = start_local_server()
redirect_uri = "http://{a[0]}:{a[1]}".format(a=server.server_address)
inp_code = "wibble"
requests.get(redirect_uri + f"?code={inp_code}")
code = server.wait_for_code()
assert code == inp_code
def test_get_cache_file_path(mocker):
mocker.patch("appdirs.user_cache_dir", return_value="/tmp/test/")
path = get_cache_file_path()
assert isinstance(path, pathlib.Path)
assert path.parent == pathlib.Path("/tmp/test")
assert path.name == "globus_auth_cache.json"
def test_get_no_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
# Test file not exists
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_get_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
json.dump({"hello": "world"}, fd)
cache = get_cache_contents()
assert isinstance(cache, dict)
assert len(cache) == 1
assert cache == {"hello": "world"}
def test_get_cache_not_json(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
fd.write("aslkjdasdjjdlsajdjklasjdj, akldjaskldjasd, lkjasdkljasldkjas")
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_save_auth_cache(mocker, tmpdir):
filename = tmpdir / "globus_auth_cache.json"
assert not filename.exists() # Sanity check
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
save_auth_cache({"hello": "world"})
assert filename.exists()
statinfo = filename.stat()
# Test that the user can read and write
assert bool(statinfo.mode & stat.S_IRUSR)
assert bool(statinfo.mode & stat.S_IWUSR)
if platform.system() != 'Windows':
# Test that neither "Group" or "Other" have read permissions
assert not bool(statinfo.mode & stat.S_IRGRP)
assert not bool(statinfo.mode & stat.S_IROTH)
def test_get_refresh_token_authorizer(mocker):
# An example cache without real tokens
cache = {
"transfer.api.globus.org": {
"scope": "urn:globus:auth:scope:transfer.api.globus.org:all",
"access_token": "buscVeATmhfB0v1tzu8VmTfFRB1nwlF8bn1R9rQTI3Q",
"refresh_token": "YSbLZowAHfmhxehUqeOF3lFvoC0FlTT11QGupfWAOX4",
"token_type": "Bearer",
"expires_at_seconds": 1553362861,
"resource_server": "transfer.api.globus.org"
}
}
mocker.patch("dkist.net.globus.auth.get_cache_contents", return_value=cache)
auth = get_refresh_token_authorizer()['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
mocker.patch("dkist.net.globus.auth.do_native_app_authentication", return_value=cache)
auth = get_refresh_token_authorizer(force_reauth=True)['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
def test_ensure_auth_decorator(mocker):
error = globus_sdk.AuthAPIError(mocker.MagicMock())
mocker.patch.object(error, "http_status", 400)
mocker.patch.object(error, "message", "invalid_grant")
reauth = mocker.patch("dkist.net.globus.auth.get_refresh_token_authorizer")
called = [False]
@ensure_globus_authorized
def test_func():
if not called[0]:
called[0] = True
raise error
return True
assert test_func()
assert reauth.called_once_with(force_reauth=True)
| 33.788618 | 90 | 0.698027 | import json
import stat
import pathlib
import platform
import globus_sdk
import requests
from dkist.net.globus.auth import (ensure_globus_authorized, get_cache_contents,
get_cache_file_path, get_refresh_token_authorizer,
save_auth_cache, start_local_server)
def test_http_server():
server = start_local_server()
redirect_uri = "http://{a[0]}:{a[1]}".format(a=server.server_address)
inp_code = "wibble"
requests.get(redirect_uri + f"?code={inp_code}")
code = server.wait_for_code()
assert code == inp_code
def test_get_cache_file_path(mocker):
mocker.patch("appdirs.user_cache_dir", return_value="/tmp/test/")
path = get_cache_file_path()
assert isinstance(path, pathlib.Path)
assert path.parent == pathlib.Path("/tmp/test")
assert path.name == "globus_auth_cache.json"
def test_get_no_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_get_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
json.dump({"hello": "world"}, fd)
cache = get_cache_contents()
assert isinstance(cache, dict)
assert len(cache) == 1
assert cache == {"hello": "world"}
def test_get_cache_not_json(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
fd.write("aslkjdasdjjdlsajdjklasjdj, akldjaskldjasd, lkjasdkljasldkjas")
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_save_auth_cache(mocker, tmpdir):
filename = tmpdir / "globus_auth_cache.json"
assert not filename.exists()
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
save_auth_cache({"hello": "world"})
assert filename.exists()
statinfo = filename.stat()
assert bool(statinfo.mode & stat.S_IRUSR)
assert bool(statinfo.mode & stat.S_IWUSR)
if platform.system() != 'Windows':
assert not bool(statinfo.mode & stat.S_IRGRP)
assert not bool(statinfo.mode & stat.S_IROTH)
def test_get_refresh_token_authorizer(mocker):
cache = {
"transfer.api.globus.org": {
"scope": "urn:globus:auth:scope:transfer.api.globus.org:all",
"access_token": "buscVeATmhfB0v1tzu8VmTfFRB1nwlF8bn1R9rQTI3Q",
"refresh_token": "YSbLZowAHfmhxehUqeOF3lFvoC0FlTT11QGupfWAOX4",
"token_type": "Bearer",
"expires_at_seconds": 1553362861,
"resource_server": "transfer.api.globus.org"
}
}
mocker.patch("dkist.net.globus.auth.get_cache_contents", return_value=cache)
auth = get_refresh_token_authorizer()['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
mocker.patch("dkist.net.globus.auth.do_native_app_authentication", return_value=cache)
auth = get_refresh_token_authorizer(force_reauth=True)['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
def test_ensure_auth_decorator(mocker):
error = globus_sdk.AuthAPIError(mocker.MagicMock())
mocker.patch.object(error, "http_status", 400)
mocker.patch.object(error, "message", "invalid_grant")
reauth = mocker.patch("dkist.net.globus.auth.get_refresh_token_authorizer")
called = [False]
@ensure_globus_authorized
def test_func():
if not called[0]:
called[0] = True
raise error
return True
assert test_func()
assert reauth.called_once_with(force_reauth=True)
| true | true |
f7252fa094b50021b73b758dbb88c52ca1b4bf3a | 739 | py | Python | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | 10 | 2017-07-11T16:58:33.000Z | 2021-03-15T23:19:28.000Z | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | 3 | 2017-07-21T03:22:19.000Z | 2018-01-04T14:02:51.000Z | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | null | null | null | from pytuya.devices.base import TuyaDevice
class TuyaHeater(TuyaDevice):
"""
Represents a Tuya Heater.
"""
def __init__(self, id, password, local_key, region):
super(TuyaHeater, self).__init__(id, password, local_key, region)
def state(self):
return self._last_reading.get('1', False)
def is_on(self):
return self.state()
def setting_temperature(self):
return self._last_reading.get('3', None)
def room_temperature(self):
return self._last_reading.get('4', None)
def key_lock(self):
return self._last_reading.get('2', False)
def timer(self):
return self._last_reading.get('5', 0)
def object_type(self):
return "Heater" | 24.633333 | 73 | 0.641407 | from pytuya.devices.base import TuyaDevice
class TuyaHeater(TuyaDevice):
def __init__(self, id, password, local_key, region):
super(TuyaHeater, self).__init__(id, password, local_key, region)
def state(self):
return self._last_reading.get('1', False)
def is_on(self):
return self.state()
def setting_temperature(self):
return self._last_reading.get('3', None)
def room_temperature(self):
return self._last_reading.get('4', None)
def key_lock(self):
return self._last_reading.get('2', False)
def timer(self):
return self._last_reading.get('5', 0)
def object_type(self):
return "Heater" | true | true |
f7252fb0603ecf1d70f00062b7fba0b9355d9f42 | 718 | py | Python | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | null | null | null | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | null | null | null | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | 1 | 2020-11-10T15:09:34.000Z | 2020-11-10T15:09:34.000Z | import readfiles
import learnAlgorithms as learn
from plot import Plot as Plot
class Adapter(object):
def __init__(self, kernel, turnPlot, interactions):
self.log("Lendo Dados")
rf = readfiles.ReadFiles()
self.data = rf.getData()
self.labels = rf.getLabels()
self.la = learn.LearnAlgorithms(self.data, self.labels)
self.kernel = kernel
self.turnPlot = turnPlot
self.interactions = interactions
def run(self):
acs_vector, log_values = self.la.runSVM(self.kernel, self.turnPlot, self.interactions)
if(self.turnPlot):
Plot.plot_c(acs_vector, log_values)
def log(self, msg):
print('[Adapter] {}'.format(msg))
| 28.72 | 92 | 0.650418 | import readfiles
import learnAlgorithms as learn
from plot import Plot as Plot
class Adapter(object):
def __init__(self, kernel, turnPlot, interactions):
self.log("Lendo Dados")
rf = readfiles.ReadFiles()
self.data = rf.getData()
self.labels = rf.getLabels()
self.la = learn.LearnAlgorithms(self.data, self.labels)
self.kernel = kernel
self.turnPlot = turnPlot
self.interactions = interactions
def run(self):
acs_vector, log_values = self.la.runSVM(self.kernel, self.turnPlot, self.interactions)
if(self.turnPlot):
Plot.plot_c(acs_vector, log_values)
def log(self, msg):
print('[Adapter] {}'.format(msg))
| true | true |
f7252fb765341c7239f0b406db114d6920488e05 | 216,381 | py | Python | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module Wellfleet-FRSW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Wellfleet-FRSW-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:40:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Counter32, IpAddress, Counter64, Bits, ModuleIdentity, MibIdentifier, TimeTicks, Unsigned32, iso, NotificationType, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Counter32", "IpAddress", "Counter64", "Bits", "ModuleIdentity", "MibIdentifier", "TimeTicks", "Unsigned32", "iso", "NotificationType", "ObjectIdentity", "Gauge32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
wfFrswGroup, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfFrswGroup")
wfFrSwDlcmiTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1), )
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setDescription('The Parameters for the Data Link Connection Management Interface corresponding to any interface. Incorporates the Error table.')
wfFrSwDlcmiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwDlcmiCircuit"))
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setDescription('The parameters for a particular Data Link Connection Management Interface.')
wfFrSwDlcmiDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3))).clone('init')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiState.setDescription('Indicates which state of DLCMI the interface is in')
wfFrSwDlcmiNniEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setDescription('Indicates whether a NNI is enabled for this entry.')
wfFrSwDlcmiCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setDescription('Instance identifier; the circuit number of this entry.')
wfFrSwDlcmiManagementType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("t1617d", 3), ("t1617b", 4), ("annexa", 5), ("lmiswitch", 6), ("annexdswitch", 7), ("annexaswitch", 8), ("iwfoamenabled", 9), ("iwfoamdisabled", 10))).clone('t1617d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setDescription('Indicates the Data Link Connection Management scheme that is active.')
wfFrSwL3NetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setDescription('Indicates level 3 (IP) address of this frame relay interface')
wfFrSwDlcmiAddressLen = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("twobyte", 2), ("threebyte", 3), ("fourbyte", 4))).clone('twobyte')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setDescription('Indicates the address length, including the control portion.')
wfFrSwDlcmiControlByteDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setDescription('Indicates inclusion of control byte in q922 format.')
wfFrSwDlcmiPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 11), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiMonitoredEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 12), Integer32().clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setDescription('Indicates the events over which error threshold is kept.')
wfFrSwDlcmiRecoveryCounts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setDescription('Indicates the number of correct polling cycles during recovery.')
wfFrSwDlcmiMaxSupportedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1024)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setDescription('Indicates the maximum number of VCs allowed.')
wfFrSwDlcmiVCsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setDescription('Indicates the number of VCs that are currently configured on this interface.')
wfFrSwSwitchHdrErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setDescription('Indicates the number of frames dropped because they were received on the remote side with an invalid switch header.')
wfFrSwDlcmiSequenceCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setDescription("Indicates this switch's sequence counter; value of next to send.")
wfFrSwDlcmiLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiActiveSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiActiveReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiPolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiAlarmTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setDescription('Counter of 1/2 second timeouts. Indicates when to expect poll.')
wfFrSwErrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("reset", 1), ("other", 2), ("short", 3), ("long", 4), ("illegaldlci", 5), ("unknowndlci", 6), ("protoerr", 7), ("unknownie", 8), ("sequenceerr", 9), ("unknownrpt", 10), ("byteerr", 11), ("hdrerr", 12), ("formaterr", 13))).clone('reset')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrType.setDescription('Indicates the type of the last specific monitored error.')
wfFrSwErrData = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 24), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrData.setDescription('Contains as much of the error packet as possible.')
wfFrSwErrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 25), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrTime.setDescription('Indicates the time the last error occurred.')
wfFrSwBcMeasurementInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 2000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setDescription('Indicates the Committed Burst sample window interval in msec')
wfFrSwDlcmiMcastNoBufferErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setDescription('Indicates the number of times a multicast failed partially or wholly because there are insufficient buffers available to create multiple copies of a multicast frame')
wfFrSwDlcmiFrameTooShortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setDescription('Indicates the number of frames dropped that are too short to be accepted.')
wfFrSwDlcmiFrameTooLongErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setDescription('Indicates the number of frames dropped that are too long to be accepted.')
wfFrSwDlcmiIllegalDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setDescription('Indicates the number of frames dropped that had an invalid DLCI value.')
wfFrSwDlcmiUnknownDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setDescription('Indicates the number of frames dropped which had an unknown DLCI value.')
wfFrSwDlcmiProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setDescription('Indicates the number of frames dropped because of a DLCMI protocol violation.')
wfFrSwDlcmiUnknownIEErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setDescription('Indicates the number of frames dropped that had an unknown information element.')
wfFrSwDlcmiSequenceErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setDescription('Indicates the number of frames dropped because of a DLCMI sequence error.')
wfFrSwDlcmiUnknownRPTErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setDescription('Indicates the number of frames dropped which had an unknown report type.')
wfFrSwDlcmiControlByteErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setDescription('Indicates the number of frames dropped that had an unsupported control byte.')
wfFrSwDlcmiFormatErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setDescription('Indicates the number of frames dropped due to a frame format error.')
wfFrSwDlcmiOtherErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setDescription('Indicates the number of frames dropped due to unknown or other errors not counted by any error counter.')
wfFrSwDlcmiStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setDescription('Indicates which state of execution the DLCMI gate is in')
wfFrSwDlcmiNewVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setDescription('Indicates the number of newly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiDeletedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setDescription('Indicates the number of deletedly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiFullStatusSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setDescription('Indicates the expected sequence number for the next Status Enquiry message that will prove that the CPE received our last Full Status Message and knows about the deleted PVCs.')
wfFrSwDlcmiBidirect = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiDteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setDescription('Indicates which state of execution the DLCMI gate is in for bidirectional procedures.')
wfFrSwDlcmiDteSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 45), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiDteReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 46), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiDteLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 47), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiDtePolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiDtePollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 49), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiDteFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 50), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiDteErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 51), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiCrossNetEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiCrossNetPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 53), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 86400)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiCrossNetErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 54), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setDescription('Indicates the number missed heartbeat polls before declaring the cross-net PVC inactive.')
wfFrSwDlcmiCrossNetAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setDescription('Indicates whether we are to send to the other end of the network, status updates for dlcis as soon as there is a change of status for the dlci.')
wfFrSwDlcmiBcMeasurementEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setDescription('Indicates whether Committed Burst Measurement is enabled for this interface. If this attribute is set to DISABLE then DE bit setting in Frame Relay frames at this interface is disabled.')
wfFrSwDlcmiAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setDescription('Indicates whether the link management entity should send an asynchronous single PVC update message when the state of a PVC is changed by a technician or by cross-net polling procedures. ')
wfFrSwDlcmiCrossNetListenEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 58), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setDescription("Indicates whether the link management entity should make a judgement of the PVC's status based on Cross Net updates.")
wfFrSwDlcmiSvcDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 59), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setDescription(' Indicates whether SVC is enabled or disabled for this access channel. ')
wfFrSwDlcmiL2AddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("e164", 1), ("x121", 2))).clone('e164')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setDescription(' Indicates the address type supported on this access channel. This information is needed when wFrSwDlcmiSVCDisable is enabled. ')
wfFrSwDlcmiEscapeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("ingress", 2), ("egress", 3))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setDescription(' Identifies the Escape mode (none, ingress or egress) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiEscapeCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setDescription('Identifies the FR-DTE circuit number corresponding to an Escape PVC. Applies only to PVCs with wfFrSwVcEscapeEnable set to enabled.')
wfFrSwDlcmiEscapeVcCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setDescription(' The number of PVCs on this DLCMI that are configured as Escape VCs')
wfFrSwDlcmiIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("sdlc2frsw", 2))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setDescription(' Identifies the interworking mode (none, SDLC-to-FRSW) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiSvcBillingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setDescription('Indicates whether the SVC Billing on this access channel set to enable.')
wfFrSwDlcmiSpvcAgent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("cra", 2), ("caa", 3), ("craandcaa", 4))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setDescription(' Indicates if an SPVC Call Request Agent, Call Accept Agent, or both are enabled on this FRSW circuit.')
wfFrSwDlcmiCallAccDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setDescription('Indicates to the Call Accept Agent to accept SPVC Call Setup requests for any available DLCI or for a specific DLCI. Call Setup requests with the wrong selection type will be rejected.')
wfFrSwCctTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2), )
if mibBuilder.loadTexts: wfFrSwCctTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctTable.setDescription('Frame Relay Circuit table gives information about a virtual circuit.')
wfFrSwCctEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCctNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwCctDlci"))
if mibBuilder.loadTexts: wfFrSwCctEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwCctDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwCctNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctNumber.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwCctDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwCctState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwCctMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctMulticast.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctMulticast.setDescription('Indicates whether this dlci is used for multicast or single destination.')
wfFrSwCctInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwCctOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctOutBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwCctInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBe.setDescription('Indicates the Incoming Excess Burst bits for this virtual circuit.')
wfFrSwCctOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwCctInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwCctOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwCctCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwCctLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwCctLocalSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the FECN bit .')
wfFrSwCctLocalSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the FECN bit.')
wfFrSwCctLocalSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalSetDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setDescription('Indicates the number of frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setDescription('Indicates the number of frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctLocalDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setDescription('Indicates the number of octets in frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctInactiveVCDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwCctInactiveVCDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwCctLocalRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setDescription('Indicates the number of frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setDescription('Indicates the number of octets in frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setDescription('Indicates the number of frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setDescription('Indicates the number of frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setDescription('Indicates the number of octets received over the local interface during the most recent sampling period.')
wfFrSwCctRemoteSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setDescription('Indicates the number of Non DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setDescription('Indicates the number of Non DE set octets sent over the remote interface.')
wfFrSwCctRemoteSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setDescription('Indicates the number of DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setDescription('Indicates the number of DE set octets sent over the remote interface.')
wfFrSwCctRemoteSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the FECN bit.')
wfFrSwCctRemoteSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface that on which this switch set the FECN bit.')
wfFrSwCctRemoteSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setDescription('Indicates the number of frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setDescription('Indicates the number of frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setDescription('Indicates the number of frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setDescription('Indicates the number of frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setDescription('Indicates the number of frames received over the remote interface with the BECN bit set.')
wfFrSwCctRemoteRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the BECN bit set.')
wfFrSwCctLocalBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setDescription('Indicates the local BECN state')
wfFrSwCctRemoteBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setDescription('Indicates the remote BECN state')
wfFrSwCctLocalOrRemoteConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2))).clone('remote')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setDescription('Indicates whether this connection is Local to Local Connection or Local to Remote connection.')
wfFrSwCctInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwCctStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 63), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctStateSet.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwCctReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwCctReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwCctCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwCctXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwCctXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval. ')
wfFrSwCctXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling ')
wfFrSwTupleTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3), )
if mibBuilder.loadTexts: wfFrSwTupleTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleTable.setDescription('The Parameters for the Tuple table, identifying the endpoints of virtual circuits as pairs of IP addresses and DLCI.')
wfFrSwTupleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrB"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciB"))
if mibBuilder.loadTexts: wfFrSwTupleEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleEntry.setDescription('The parameters for a particular Tuple.')
wfFrSwTupleDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwTupleDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDelete.setDescription('Indication to delete this tuple.')
wfFrSwTupleIpAddrA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setDescription("Instance indentifier; indicates the IP address associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleDlciA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setDescription("Instance identfier; indicates the DLCI associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleIpAddrB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setDescription("Instance identfier; indicates the IP address associated with endpoint 'B' of a virtual circuit.")
wfFrSwTupleDlciB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setDescription("Instance identifier; Indicates the DLCI associated with endpoint 'B' of a virtual circuit.")
wfFrSwMcastTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4), )
if mibBuilder.loadTexts: wfFrSwMcastTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastTable.setDescription('The list of multicast addresses')
wfFrSwMcastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwMcastIndex"))
if mibBuilder.loadTexts: wfFrSwMcastEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastEntry.setDescription('The parameters for a particular Multicast address.')
wfFrSwMcastDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDelete.setDescription('Indication to delete this multicast instance.')
wfFrSwMcastIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwMcastIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndex.setDescription('Index of this multicast DLCI instance')
wfFrSwMcastIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setDescription('IP address of the interface in which this multicast DLCI is defined.')
wfFrSwMcastDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDlci.setDescription('Identifies the multicast DLCI with which the IndividualDlci is associated.')
wfFrSwMcastIndividualDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setDescription('Indicates the DLCI associated with the above multicast DLCI.')
wfFrSwUsage = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5))
wfFrSwUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageEnable.setDescription('Enable/Disable FRSW billing.')
wfFrSwUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolume.setDescription("Indicates the file system volume number to which the billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwUsageVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setDescription('Indicates the backup volume if wfFrSwUsageVolume becomes inoperative. Note: This feature is not implemented in this release.')
wfFrSwUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setDescription('The name of the directory where the billing usage data files are stored. ')
wfFrSwUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setDescription('The base name of billing usage data files.')
wfFrSwUsageTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setDescription('This number determines the timer interval (number of seconds) unit for the Billing process to perform its various timer driven tasks. i.e. updating billing usage data, writing billing usage data to file system and file system management activities.')
wfFrSwUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwUsageLocalTimeZone = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setDescription('Indicates local time zone of the switch')
wfFrSwUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwUsageUpdateInterval timer expiration or the starting time of the current wfFrSwUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwUsageStoreInterval timer expiration or the starting time of the current wfFrSwUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 14), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwUsageFlushInterval timer expiration or the starting time of the current wfFrSwUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 15), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwUsageCleanupInterval timer expiration or the starting time of the current wfFrSwUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageState.setDescription('current state FRSW billing.')
wfFrSwUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setDescription('current file system volume number used. This number is the same as wfFrSwUsageVolume except when the user sets wfFrSwUsageVolume to an invalid number.')
wfFrSwUsageCurVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setDescription('curent backup file system volume number used. This number is the same as wfFrSwUsageVolumeBackUp except when the user sets wfFrSwUsageVolume to an invalid number. Note: This feature is not implemented in this release.')
wfFrSwUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 23), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setDescription('current directory name used. This number is the same as wfFrSwUsageDirectory except when the user sets wfFrSwUsageDirectory to an invalid name.')
wfFrSwUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 24), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setDescription('current base file name used. This number is the same as wfFrSwUsageFilePrefix except when the user sets wfFrSwUsageFilePrefix to an invalid name.')
wfFrSwUsageCurTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setDescription('current timer interval number used. This number is the same as wfFrSwUsageTimerInterval except when the user sets wfFrSwUsageTimerInterval to an invalid value.')
wfFrSwUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwUsageUpdateInterval except when the user sets wfFrSwUsageUpdateInterval to an invalid value.')
wfFrSwUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwUsageStoreInterval except when the user sets wfFrSwUsageStoreInterval to an invalid value.')
wfFrSwUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwUsageFlushInterval except when the user sets wfFrSwUsageFlushInterval to an invalid value.')
wfFrSwUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwUsageCleanupInterval except when the user sets wfFrSwUsageCleanupInterval to an invalid value.')
wfFrSwUsageDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDebug.setDescription('Enable/Disable printing of debug edl (trap) messages. NOTE: Do not enable this attribute in operational enviornment as it will likely flood the logging facility. This attribute is reserved for specialized debugging in a controlled lab enviornment.')
wfFrSwUsageCurDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setDescription('current debug value used. This value is the same as wfFrSwUsageDebug except when the user sets wfFrSwUsageDeubg to an invalid value.')
wfFrSwUsageSwitchId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 32), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setDescription('switch id used in the billing usage data file.')
wfFrSwUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setDescription('number of entries in wfFrSwUsageTable')
wfFrSwSvcUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setDescription('Enable/Disable FRSW SVC billing.')
wfFrSwSvcUsageInterimRecordEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setDescription('Enable/Disable Writing FRSW SVC billing record while SVC connection is still up.')
wfFrSwSvcUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 36), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setDescription("Indicates the file system volume number to which the SVC billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwSvcUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 37), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setDescription('The name of the directory where the SVC billing usage data files are stored. ')
wfFrSwSvcUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 38), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setDescription('The base name of SVC billing usage data files.')
wfFrSwSvcUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 40), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 41), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 42), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the SVC Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 43), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageUpdateInterval timer expiration or the starting time of the current wfFrSwSvcUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwSvcUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 44), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageStoreInterval timer expiration or the starting time of the current wfFrSwSvcUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwSvcUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 45), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageFlushInterval timer expiration or the starting time of the current wfFrSwSvcUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 46), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageCleanupInterval timer expiration or the starting time of the current wfFrSwSvcUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 49), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old SVC billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 51), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setDescription('current state FRSW SVC billing.')
wfFrSwSvcUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 52), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setDescription('current file system volume number used for SVC Billing. This number is the same as wfFrSwSvcUsageVolume except when the user sets wfFrSwSvcUsageVolume to an invalid number.')
wfFrSwSvcUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 53), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setDescription('current directory name used for SVC Billing. This number is the same as wfFrSwSvcUsageDirectory except when the user sets wfFrSwSvcUsageDirectory to an invalid name.')
wfFrSwSvcUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 54), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setDescription('current base file name used for SVC Billing. This name is the same as wfFrSwSvcUsageFilePrefix except when the user sets wfFrSwSvcUsageFilePrefix to an invalid name.')
wfFrSwSvcUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 55), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwSvcUsageUpdateInterval except when the user sets wfFrSwSvcUsageUpdateInterval to an invalid value.')
wfFrSwSvcUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 56), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwSvcUsageStoreInterval except when the user sets wfFrSwSvcUsageStoreInterval to an invalid value.')
wfFrSwSvcUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 57), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwSvcUsageFlushInterval except when the user sets wfFrSwSvcUsageFlushInterval to an invalid value.')
wfFrSwSvcUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 58), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwSvcUsageCleanupInterval except when the user sets wfFrSwSvcUsageCleanupInterval to an invalid value.')
wfFrSwSvcUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setDescription('number of entries in wfFrSwSvcUsageTable')
wfFrSwSvcUsageVersionId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setDescription('The Software Version ID field is a two byte, right justified, binary formated value that identifies the particular version number of the software release. High nibble of byte 1 represents the major version number. Low nibble of byte 1 represents the release number. Byte 2 represents the integration number.')
wfFrSwUsageSwitchName = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 61), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setDescription('The Switch name is a 6-bytes, right justified with leading blanks as necessary. It can be combination of letters, numbers and blanks. This ID identifies the particular networks equipment for SVC billing usage data process.')
wfFrSwPvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setDescription('PVC usage file layout version')
wfFrSwSvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 63), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setDescription('SVC usage file layout version')
wfFrSwUsageTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6), )
if mibBuilder.loadTexts: wfFrSwUsageTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageTable.setDescription('The Billing usage table.')
wfFrSwUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwUsageCircuitNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwUsageDlci"))
if mibBuilder.loadTexts: wfFrSwUsageEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEntry.setDescription('The parameters for Billing Usage.')
wfFrSwUsageDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDelete.setDescription('Indicates status of this entry. FRSW_USAGE_CREATED is the normal case. FRSW_USAGE_DELETED means the corresponding tuple and vc instances were deleted some time during this collection interval. This billing instance will be deleted at the end of the next wfFrSwUsageFlush period after this billing record is written out to the file system.')
wfFrSwUsageCircuitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setDescription('Instance identifier; the circuit number of this interface. ')
wfFrSwUsageDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDlci.setDescription('Instance identifier; this indicates which virtual circuit. ')
wfFrSwUsageIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setDescription('(Local) IP address corresponding to wfFrSwUsageCircuitNumber of this virtual circuit. ')
wfFrSwUsageStartTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setDescription('Time stamp of the starting time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStartTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setDescription('Time stamp of the starting time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setDescription('Time stamp of the ending time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setDescription('Time stamp of the ending time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageSentNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setDescription('Number (the high 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setDescription('Number (the low 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setDescription('Number (the low 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setDescription('Number (the high 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setDescription('Number (the low 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setDescription('Number (the low 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageLastNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEFrames has wrapped around.')
wfFrSwUsageLastNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEOctets has wrapped around.')
wfFrSwUsageLastNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEFrames has wrapped around.')
wfFrSwUsageLastDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEOctets has wrapped around.')
wfFrSwUsageLastDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageRemoteIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 25), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setDescription('IP address of the other side (remote) of this PVC endpoint.')
wfFrSwUsageRemoteDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setDescription('DLCI number of the other side (remote) of this PVC endpoint.')
wfFrSwVcTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7), )
if mibBuilder.loadTexts: wfFrSwVcTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTable.setDescription('Frame Relay Virtual Circuit table gives information about a virtual circuit.')
wfFrSwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVcCircuit"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVcDlci"))
if mibBuilder.loadTexts: wfFrSwVcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwVcDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3), ("svc", 4), ("spvccra", 5), ("spvccaa", 6))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDelete.setDescription('Indication to delete this virtual circuit.')
wfFrSwVcCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCircuit.setDescription('Instance identifier; the circuit number of this interface (logical port).')
wfFrSwVcDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwVcStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcStateSet.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwVcMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcMulticast.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcMulticast.setDescription('Indicates whether this dlci is used for multicast or a single destination.')
wfFrSwVcInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 7), Integer32().clone(2147483647)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBe.setDescription('Indicates the maximum number Incoming Excess Burst bits that are allowed in a configured time interval (T).')
wfFrSwVcOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwVcInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwVcInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwVcInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwVcBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBecnState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBecnState.setDescription('Indicates the BECN state')
wfFrSwVcReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwVcReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwVcCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwVcXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwVcXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval.')
wfFrSwVcCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 20), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setDescription('Indicates the IP address associated with destination of a virtual circuit.')
wfFrSwVcCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setDescription('Indicates the DLCI associated with destination of a virtual circuit.')
wfFrSwVcTrfPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 999))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("three", 3), ("default", 999))).clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setDescription('Defines the traffic priority level of all the incoming packets on this VC. FRSW_VCPRIORITY_DEFAULT - Set all incoming user traffic packets to the default priority used by the port. FRSW_VCPRIORITY_ONE - Set all incoming packets to priority 1. FRSW_VCPRIORITY_TWO - Set all incoming packets to priority 2. FRSW_VCPRIORITY_THREE - Set all incoming packets to priority 3. Priority 0 is reserved for network critical packets like OSPF, FR LMI and SMDS heartbeat and is not available for user traffic.')
wfFrSwVcCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 23), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwVcLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwVcTxNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcTxDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcSetFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcSetDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcDropNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setDescription('Indicates the number of frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setDescription('Indicates the number of octets in frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setDescription('Indicates the number of frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcDropDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setDescription('Indicates the number of octets in frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcInactiveVcDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwVcInactiveVcDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwVcRecvNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setDescription('Indicates the number of frames received on this virtual circuit over the interface.')
wfFrSwVcRecvNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setDescription('Indicates the number of octets received on this virtual circuit over the interface.')
wfFrSwVcRecvDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setDescription('Indicates the number of frames received over the interface with the DE bit set.')
wfFrSwVcRecvDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setDescription('Indicates the number of octets in frames received over the interface with the DE bit set.')
wfFrSwVcRecvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setDescription('Indicates the number of frames received over the interface with the FECN bit set.')
wfFrSwVcRecvFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the FECN bit set.')
wfFrSwVcRecvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setDescription('Indicates the number of frames received over the interface with the BECN bit set.')
wfFrSwVcRecvBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the BECN bit set.')
wfFrSwVcRecentNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setDescription('Indicates the number of octets received over the interface during the most recent sampling period.')
wfFrSwVcXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling.')
wfFrSwVcDropExcessBurstFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setDescription('Indicates the number of Excess Burst Frames dropped on this virtual circuit.')
wfFrSwVcDropExcessBurstOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setDescription('Indicates the number of Excess Burst Octets dropped on this virtual circuit.')
wfFrSwVcInBeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 53), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setDescription('Indicates the maximum number Incoming Excess Burst bytes that are allowed in a configured time interval (T).')
wfFrSwVcCfgInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 54), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setDescription('The number of Excess Burst in bits')
wfFrSwVcRedirectAction = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("redirecttoprimary", 2), ("redirecttobackup", 3), ("switchondemand", 4), ("swondemandtoprimary", 5), ("swondemandtobackup", 6))).clone('redirecttoprimary')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setDescription("Perform pvc source redirect manually or based on cross-net updates: 'redirecttoprimary(2)' will force to switch to primary; 'redirecttobackup(3)' will force to switch to backup; 'switchondemand(4)' will switch based on cross-net status of the primary to and from primary; 'swondemandtoprimary(5)' will switch to primary from backup iff cross-net of primary became active; 'swondemandtobackup(6)' will switch to backup from primary iff cross-net of primary became inactive.")
wfFrSwVcRedirectType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("transparent", 1), ("intrusiven", 2), ("intrusivea", 3))).clone('intrusivea')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setDescription("Type of dte notification at switching time: 'transparent(1)' will not send notification to dte; 'intrusiven(2)' will send async update with NEW bit; 'intrusivea(3)' will send async update with A bit not set.")
wfFrSwVcRedirectState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 12, 13, 21))).clone(namedValues=NamedValues(("backupinactive", 1), ("primaryactive", 2), ("switchtobackup", 3), ("backupactive", 12), ("switchtoprimary", 13), ("holddown", 21))).clone('backupinactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setDescription("PVC Source Redirect State: 'backupinactive(1)' - backup is not configured and/or cross-net status is inactive; will allow traffic only through primary. 'primaryactive(2)' - both primary and backup rx'ed 'active' cross-net status, currently primary is active and traffic only through primary. 'switchtobackup(3)' - primary cross-net status is inactive, but can not switch to backup due to manual (or semi-manual) operation of the redirect; will allow traffic only through primary. 'backupactive(12)' - cross-net status is 'inactive' for primary; will allow traffic only through backup. 'switchtoprimary(13)' - cross-net status is 'active' for primary, should by can not switch to primary due to manual (or semi-manual) operation of the redirect; will allow traffic only through backup. 'holddown(21)' - down state used as intermediate state at switching time (for not more then a second); all traffic is dropped.")
wfFrSwVcBackupCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 58), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setDescription(' Backup Called Ip Address of the remote end of the PVC.')
wfFrSwVcBackupCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 59), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setDescription(' Backup Called Dlci of the remote end of the PVC.')
wfFrSwVcBackupCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setDescription(' Cross net status of the backup remote end of the PVC.')
wfFrSwVcBackupCrossNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 61), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setDescription(' Support counter of missed cross net update from backup remote end of the PVC, range: [0, wfFrSwDlcmiCrossNetErrorThreshold].')
wfFrSwVcAtmIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 62), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("atmDisableIwfMode", 1), ("atmServiceIwfTransparentMode", 2), ("atmServiceIwfTranslationMode", 3), ("atmNetworkIwfMode", 4))).clone('atmDisableIwfMode')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setDescription('This attribute indicates the mode of FR-ATM interworking over this FR PVC or that FR-ATM interworking is not enabled on it.')
wfFrSwVcAtmIwfVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 63), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setDescription('This is relevant only when the ATM/FR interworking is enabled for this PVC. This indicates the ATM virtual path identifier associated with the Frame Relay PVC described by this record virtual circuit identifier.')
wfFrSwVcAtmIwfVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 64), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 65535)).clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setDescription('This is relevant only when FR/ATM interworking is enabled for this PVC. This indicates the ATM virtual circuit identifier associated with the Frame Relay PVC described by this record.')
wfFrSwVcAtmIwfLossPriorityPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapDe", 1), ("atmiwfsetDe1", 2), ("atmiwfsetDe0", 3))).clone('atmiwfmapDe')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy for translating ATM CLP to FR DE on this PVC or simply setting FR DE to a constant value for all frames.')
wfFrSwVcAtmIwfDePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapClp", 1), ("atmiwfsetClp1", 2), ("atmiwfsetClp0", 3))).clone('atmiwfmapClp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR DE to ATM CLP or simply setting CLP to a constant value for all frames.')
wfFrSwVcAtmIwfEfciPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapFecn", 1), ("atmiwfsetFecn1", 2), ("atmiwfsetFecn0", 3))).clone('atmiwfmapFecn')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR FECN to ATM EFCI or simply setting ATM EFCI to a constant value for all frames.')
wfFrSwVcEscapeEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setDescription(' Identifies this PVC as either a standard FRSW PVC (escape disabled) or an Escape PVC (escape enabled). The type of Escape PVC (ingress node or egress node) is specified in the wfFrSwDlcmiEntry Object.')
wfFrSwVcSpvcCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 69), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("inprogress", 2), ("active", 3))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setDescription('Indicates to the state of the SPVC call for this DLCI.')
wfFrSwVcCallReqCalledAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 70), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setDescription('Called E.164/X.121 Address for an SPVC Call Request Agent. The address type is determined by the wfFrSwDlcmiL2AddrType attribute in wfFrSwDlcmiEntry.')
wfFrSwVcCallReqDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 71), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setDescription("Indicates to the Calling End of an SPVC Call Request whether to use any available DLCI, or a specific DLCI. If 'specific' is chosen, the called DLCI value is specified in wfFrSwVcCallReqCalledDlci.")
wfFrSwVcCallReqCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 72), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setDescription("Indicates to the Calling End of an SPVC Call Request the DLCI to be used at the destination of a virtual circuit. This value should be specified when 'specific' wfFrSwVcCallReqDlciSelectionType is chosen.")
wfFrSwVcCallReqRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 73), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setDescription('Indicates the number of minutes the Call Request Agent should wait for an SPVC CONNECT message before declaring a Call Setup request REJECTED.')
wfFrSwVcCallReqMaxRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 74), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setDescription('Indicates the number of times the Call Request Agent should retry failed Call Setup requests before declaring the SPVC invalid.')
wfFrSwIsdnBaseTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8), )
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setDescription('This is a FRSW over ISDN configuration table. This table specifies whether the Calling Party (ANI) or Called Party (DNIS) ISDN Phone Number should be used to map the ISDN call to a particular FRSW UNI. The table is indexed by the Slot Number where the PRI(s) exist.')
wfFrSwIsdnBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnBaseSlotNum"))
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnBaseDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setDescription('Indication to delete this FRSW ISDN interface. ')
wfFrSwIsdnBaseSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setDescription('This number is the Slot Number for the PRI interface(s) that are being configured for FRSW ISDN. There will be one of these tables for every slot where an FRSW ISDN PRI Interface exists.')
wfFrSwIsdnBaseAssocType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dnis", 1), ("ani", 2))).clone('dnis')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setDescription('Indicates which ISDN Phone Number (ANI or DNIS) to use to do the ISDN call to FRSW UNI mapping.')
wfFrSwIsdnAssocTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9), )
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setDescription('This table defines the Association Table to be used for the FRSW over ISDN application. The table contains a list of ISDN Phone Numbers and the associated FRSW UNI Index Number. The table is indexed by the Slot Number and the ISDN Phone Number.')
wfFrSwIsdnAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocSlotNum"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocNum"))
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnAssocDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setDescription('Indication to delete this Association Instance.')
wfFrSwIsdnAssocSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setDescription('Slot with which this ISDN Phone Number is associated.')
wfFrSwIsdnAssocNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setDescription('ISDN Phone Number that is used to look up the appropriate FRSW UNI Index. This number is compared with either the Calling Party Number (ANI) Information Element or the Called Party Number (DNIS) Information Element contained in the ISDN Call Setup Message.')
wfFrSwIsdnAssocScrnEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setDescription('Indicate whether allowed screening should be enabled or disabled for all of the UNIs contained in the FRSW UNI Index.')
wfFrSwIsdnAssocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 5), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setDescription('A number that indicates the FRSW UNI Index that is is associated with the ISDN Phone Number. This FRSW UNI Index is used as a key to obtain the UNIs and the Screening information from the wfFrSwIsdnScrnEntry and wfFrSwIsdnUniEntry mibs. The default for the index is 2**31 - 1 = 2147483647 = 0x7FFFFFFF, which represents an unconfigured index number.')
wfFrSwIsdnUniTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10), )
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setDescription('This table is used by the FRSW over ISDN application. The table defines a list of FRSW UNIs that are to be collected into a hunt group identifiable by an Index Number.')
wfFrSwIsdnUniEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniNum"))
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnUniDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setDescription('Indication to delete this FRSW UNI Index Instance.')
wfFrSwIsdnUniIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setDescription('FRSW UNI Index -- a number that identifies a group of related FRSW UNIs that are collected together as a hunt group. This number ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnUniNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setDescription('A FRSW UNI/Circuit.')
wfFrSwIsdnUniState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("available", 1), ("inuse", 2))).clone('available')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setDescription('State of this UNI (available or in-use).')
wfFrSwIsdnScrnTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11), )
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setDescription('This is the incoming call screening table for the FRSW over ISDN application. The table consists of a FRSW UNI Index and a list of allowable ISDN Phone numbers for that FRSW UNI Index. The table is indexed by both the FRSW UNI Index and the ISDN Phone Number. This table is referenced only when the wfFrSwIsdnAssocScrnEnable is set to Enabled for this FRSW UNI Index.')
wfFrSwIsdnScrnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnNum"))
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setDescription(' Instance Id for this table. ')
wfFrSwIsdnScrnDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setDescription(' Indication to delete this Scrn Instance. ')
wfFrSwIsdnScrnIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setDescription('FRSW UNI Index - A number that ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnScrnNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setDescription('ISDN Phone Number of a user authorized to access the UNIs contained in the FRSW UNI Index. ')
wfFrSwSigTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12), )
if mibBuilder.loadTexts: wfFrSwSigTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTable.setDescription(" The Frame relay signalling table contains frame relay signalling entries indexed by the frame relay access channel circuit number. An instance of wfFrSwSigEntry is required for each frame relay access channel with frame relay signalling enabled. The absence of wfFrSwSigEntry for a given frame relay access channel implies that frame relay signalling is disabled for the circuit. Note that the terms 'incoming' and 'outgoing' refer to the frame mode call with respect to the network side of the interface. The terminology used by CCITT Q.933/Q.931 is different. ")
wfFrSwSigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwSigCircuit"))
if mibBuilder.loadTexts: wfFrSwSigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigEntry.setDescription(' An entry in the Frame Relay signalling port information table. ')
wfFrSwSigDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDelete.setDescription(' Indication to delete this instance ')
wfFrSwSigCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCircuit.setDescription(' The circuit number for this frame relay access channel ')
wfFrSwSigSvcDlciLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setDescription(' Lowest DLCI to be used for SVC, the default value is for 2 octet frame header ')
wfFrSwSigSvcDlciHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(991)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setDescription(' Highest DLCI to be used for SVC, the default value is for 2 octet frame header. ')
wfFrSwSigDlciAssign = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("increment", 1), ("decrement", 2))).clone('decrement')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setDescription(" Determines if DLCI's are assigned starting at wfFrSwSigSvcDlciHigh and working towards wfFrSwSigSvcDlciLow or vice versa. ")
wfFrSwSigMaxNumOfSvcs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setDescription(' Indicates the maximum number of simultaneous switched virtual circuits allowed on the logical line. ')
wfFrSwSigNumOfSvcsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setDescription(' Indicates the number of switched virtual circuits in use on the logical line. ')
wfFrSwSigDefaultThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultMinAcceptThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing minimum acceptable throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Bc fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Be fields when they are not included in the setup message by the user. ')
wfFrSwSigMaxInThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setDescription(' This is the maximum incoming throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigMaxOutThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setDescription(' This is the maximum outgoing throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigTotalInNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setDescription(' This is the total maximum incoming throughput that is available for all frame mode calls on the port. If the sum of the incoming throughput requested by a call and wfFrSwSigTotalInCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalInCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigTotalOutNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setDescription(' This is the total maximum outgoing throughput that is available for all frame mode calls on the port. If the sum of the outgoing throughput requested by a call and wfFrSwSigTotalOutCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalOutCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigXNetClearingDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setDescription(' If cross-net polling (wfFrSwDlcmiCrossNetEnable) is enabled on this interface, and the error threshold (wfFrSwDlcmiCrossNetErrorThreshold) is exceeded, the network can clear the call. ')
wfFrSwSigCallingPartyIEMandatory = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setDescription(' Reject the call if the Calling Party IE is absent in the setup message or if the provided Calling Party IE fails address authentication tests againt the configured address(es) on the ingress logical line.')
wfFrSwSigT301 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT301.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT301.setDescription(' Timer number: T301 default time-out: 3 min state of call: call initiated cause for start: incoming setup normal stop: outgoing connect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT303 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT303.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT303.setDescription(' Timer number: T303 default time-out: 4 s state of call: call present cause for start: outgoing setup normal stop: incoming connect/call-proceeding/ release-complete at the first expiry: retransmit setup, restart T303 at the second expiry: clear call ')
wfFrSwSigT305 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 90)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT305.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT305.setDescription(' Timer number: T305 default time-out: 30 s state of call: disconnect ind cause for start: outgoing disconnect normal stop: incoming release/disconnect at the first expiry: outgoing release at the second expiry: timer not restarted ')
wfFrSwSigT308 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT308.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT308.setDescription(' Timer number: T308 default time-out: 4 s state of call: release req cause for start: outgoing release normal stop: incoming release/release-complete at the first expiry: retransmit release, restart T308 at the second expiry: place access channel in maintenance ')
wfFrSwSigT310 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT310.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT310.setDescription(' Timer number: T310 default time-out: 10 s state of call: incoming call proceeding cause for start: incoming call proceeding normal stop: incoming connect/disconnect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT322 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT322.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT322.setDescription(' Timer number: T322 default time-out: 4 s state of call: any call state cause for start: outgoing status enquiry normal stop: incoming status/disconnect/ release/release-complete at the first expiry: retransmit status-enq, restart T322 at the second expiry: resend status enq and restart T322 ')
wfFrSwSigInSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setDescription(' number of incoming setup packets ')
wfFrSwSigInCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setDescription(' number of incoming call proceeding packets ')
wfFrSwSigInConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setDescription(' number of incoming connect packets ')
wfFrSwSigInDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setDescription(' number of incoming disconnect packets ')
wfFrSwSigInReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setDescription(' number of incoming release packets ')
wfFrSwSigInReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setDescription(' number of incoming release complete packets ')
wfFrSwSigInStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setDescription(' number of incoming status enquiry packets ')
wfFrSwSigInStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setDescription(' number of incoming status packets ')
wfFrSwSigInUnknownPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setDescription(' number of incoming unknown packets ')
wfFrSwSigOutSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setDescription(' number of outgoing setup packets ')
wfFrSwSigOutCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setDescription(' number of outgoing call proceeding packets ')
wfFrSwSigOutConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setDescription(' number of outgoing connect packets ')
wfFrSwSigOutDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setDescription(' number of outgoing disconnect packets ')
wfFrSwSigOutReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setDescription(' number of outgoing release packets ')
wfFrSwSigOutReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setDescription(' number of outgoing release packest ')
wfFrSwSigOutStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setDescription(' number of outgoing status enquiry packets ')
wfFrSwSigOutStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setDescription(' number of outgoing status packets ')
wfFrSwSigRejectedConnRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setDescription(' number of connections rejected ')
wfFrSwSigNwrkAbortedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setDescription(' number of connections aborted by network ')
wfFrSwSigL2Resets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setDescription(' number of L2 resets ')
wfFrSwSigDlciIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setDescription(' Reject the call if the Dlci IE is present in the setup message and wfFrSwSigDlciIEAllowed is set to disabled.')
wfFrSwSigX213PriorityIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 47), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setDescription(' Reject the call if the X213 Priority IE is present in setup message and wfFrSwSigX213PriorityIEAllowed is set to disabled.')
wfFrSwSigMaximumBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 48), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setDescription('This value is the maximum allowed Be for a SVC connection')
wfFrSwGlobalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13), )
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setDescription(' wfFrSwGlobalE164AddrTable is used by Directory Services to translate a range of E.164 addresses into an internal IP network address. E.164 ranges must not ever overlap. ')
wfFrSwGlobalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setDescription(' An entry in the Frame Relay Global E.164 Address Table. ')
wfFrSwGlobalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalE164AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setDescription(' Instance identifier; the low end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setDescription(' Instance identifier; the high end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of E.164 addresses. ')
wfFrSwGlobalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14), )
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setDescription(' wfFrSwGlobalX121AddrTable is used by Directory Services to translate a range of X.121 addresses into an internal IP network address. X.121 ranges must not ever overlap. ')
wfFrSwGlobalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setDescription(' An entry in the Frame Relay Global X.121 Address Table. ')
wfFrSwGlobalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalX121AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setDescription(' Instance identifier; the low end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setDescription(' Instance identifier; the high end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of X.121 addresses. ')
wfFrSwLocalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15), )
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setDescription(' wfFrSwLocalE164AddrTable contains E.164 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164Address"))
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setDescription(' An entry in the Frame Relay Local E.164 Address Table. ')
wfFrSwLocalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalE164AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setDescription(' Instance identifier; internal CCT number associated with this E.164 address. ')
wfFrSwLocalE164Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setDescription(' Instance identifier; an E.164 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalE164AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this E.164 address belongs to a particular COI number within this COI group number. Please note that COI group numbers can not be repeated and that the COI group numbers must be in increasing order in the CUG configuration MIB wfFrSwLocalE164AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalE164Address is allowed to communicate with all users. ')
wfFrSwLocalE164AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwLocalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16), )
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setDescription(' wfFrSwLocalX121AddrTable contains X.121 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121Address"))
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setDescription(' An entry in the Frame Relay Local X.121 Address Table. ')
wfFrSwLocalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalX121AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setDescription(' Instance identifier; internal CCT number associated with this X.121 address. ')
wfFrSwLocalX121Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setDescription(' Instance identifier; a X.121 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalX121AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this X.121 address belongs to a particular COI number within this COI group number. wfFrSwLocalX121AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalX121Address is allowed to communicate with all users. ')
wfFrSwLocalX121AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwBase = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17))
wfFrSwBaseDelete = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseDelete.setDescription(' Indication to delete/create this base group ')
wfFrSwBaseIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setDescription(" Indicates this BNX's (circuit-less) IP address ")
wfFrSwBaseShutDown = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 3), Counter32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setDescription('Bit mask for slots to shutdown, slots 1-14. The MSBit represents slot 1, the next most significant bit represents slot 2, and so forth.')
wfFrSwCngcMonTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18), )
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setDescription('This table is used by FRSW Congestion Control application. The table is used to Monitor the congestion level of a particular circuit.')
wfFrSwCngcMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCngcMonCct"))
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setDescription('Instance Id for this table.')
wfFrSwCngcMonReset = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setDescription('Indication to reset Cngc Monitor Counters.')
wfFrSwCngcMonCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setDescription('Circuit to be monitored. ')
wfFrSwCngcMonP0Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 0 Traffic.')
wfFrSwCngcMonP1Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 1 Traffic.')
wfFrSwCngcMonP2Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 2 Traffic.')
wfFrSwCngcMonP3Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 3 Traffic.')
wfFrSwVirtualIntfTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19), )
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setDescription("The table is used to create 'virtual' FRSW access lines.")
wfFrSwVirtualIntfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfSlot"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfCct"))
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setDescription('Instance Id for this table.')
wfFrSwVirtualIntfDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setDescription('Indication to delete this virtual interface.')
wfFrSwVirtualIntfSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setDescription('Instance identifier; the slot number of this interface.')
wfFrSwVirtualIntfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwVirtualIntfLineNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setDescription('Line number for this virtual interface.')
wfFrSwExtFileSysTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20), )
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setDescription('This table is used by FRSW to extend the file system to DRAM device.')
wfFrSwExtFileSysEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwExtFileSysSlot"))
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setDescription('Instance Id for this table.')
wfFrSwExtFileSysDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwExtFileSysSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setDescription('A unique value for each slot. Its value ranges between 1 and 14.')
wfFrSwExtFileSysSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setDescription('The memory size of the extended file system in byte unit. The value zero also means that extended file system is disabled. Non-zero value means enabled. Its suggested that the size is in multiple of 128k bytes. Some of the well-known memory sizes and their correspond decimal values are as followed: Mem size Decimal Value ^^^^^^^^ ^^^^^^^^^^^^^ 128K 131072 256K 262144 512K 524288 1M 1048576 2M 2097152 4M 4194304 8M 8388608 ')
wfFrSwExtFileSysActualSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setDescription('The actual memory size the system allocated.')
wfFrSwExtFileSysState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("fault", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setDescription('The status of the extended file system. State up indicates that the requested memory size for the extended file system has been allocated successfully and the extended file system is in operational state. State fault indicates that the requested memory size for the extended file system has NOT been allocated successfully and the extended file system is NOT in operational state. One reason for entering the fault state is insufficient available memory. State init indicates that the system is in the initialization cycle. The extended file system is not operational. State notpresent reflects the size of zero.')
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwCctLocalSetFECNFrames=wfFrSwCctLocalSetFECNFrames, wfFrSwDlcmiEscapeMode=wfFrSwDlcmiEscapeMode, wfFrSwVcRedirectAction=wfFrSwVcRedirectAction, wfFrSwSigOutDisconnectPkts=wfFrSwSigOutDisconnectPkts, wfFrSwCctLocalSetDEFrames=wfFrSwCctLocalSetDEFrames, wfFrSwSigOutStatusPkts=wfFrSwSigOutStatusPkts, wfFrSwSigTotalInCurrentThroughput=wfFrSwSigTotalInCurrentThroughput, wfFrSwIsdnScrnTable=wfFrSwIsdnScrnTable, wfFrSwVcAtmIwfLossPriorityPolicy=wfFrSwVcAtmIwfLossPriorityPolicy, wfFrSwSigT303=wfFrSwSigT303, wfFrSwUsageCurVolumeBackup=wfFrSwUsageCurVolumeBackup, wfFrSwVcInactiveVcDropFrames=wfFrSwVcInactiveVcDropFrames, wfFrSwL3NetAddress=wfFrSwL3NetAddress, wfFrSwSigInConnectPkts=wfFrSwSigInConnectPkts, wfFrSwLocalE164AddrDelete=wfFrSwLocalE164AddrDelete, wfFrSwUsageUpdateData=wfFrSwUsageUpdateData, wfFrSwExtFileSysDelete=wfFrSwExtFileSysDelete, wfFrSwDlcmiActiveSeqCount=wfFrSwDlcmiActiveSeqCount, wfFrSwUsageCircuitNumber=wfFrSwUsageCircuitNumber, wfFrSwUsageStartTimeStampHigh=wfFrSwUsageStartTimeStampHigh, wfFrSwVcRecvDeOctets=wfFrSwVcRecvDeOctets, wfFrSwIsdnUniDelete=wfFrSwIsdnUniDelete, wfFrSwCngcMonP0Level1Percent=wfFrSwCngcMonP0Level1Percent, wfFrSwCctLocalRecvDEOctets=wfFrSwCctLocalRecvDEOctets, wfFrSwUsageVolumeBackup=wfFrSwUsageVolumeBackup, wfFrSwSigTotalInNegotiableThroughput=wfFrSwSigTotalInNegotiableThroughput, wfFrSwSigOutReleaseCompletePkts=wfFrSwSigOutReleaseCompletePkts, wfFrSwUsageSentDEOctetsHigh=wfFrSwUsageSentDEOctetsHigh, wfFrSwCctOutThroughput=wfFrSwCctOutThroughput, wfFrSwDlcmiDteLastReceived=wfFrSwDlcmiDteLastReceived, wfFrSwCctRemoteSentDEOctets=wfFrSwCctRemoteSentDEOctets, wfFrSwSvcUsageVersionId=wfFrSwSvcUsageVersionId, wfFrSwCctRemoteRecvNonDEOctets=wfFrSwCctRemoteRecvNonDEOctets, wfFrSwCctRemoteRecvDEOctets=wfFrSwCctRemoteRecvDEOctets, wfFrSwSvcUsageUpdateInterval=wfFrSwSvcUsageUpdateInterval, wfFrSwCctRemoteSentNonDEFrames=wfFrSwCctRemoteSentNonDEFrames, wfFrSwVcCallReqRetryTimer=wfFrSwVcCallReqRetryTimer, wfFrSwMcastDlci=wfFrSwMcastDlci, wfFrSwCctLocalBecnState=wfFrSwCctLocalBecnState, wfFrSwVcRecvBecnOctets=wfFrSwVcRecvBecnOctets, wfFrSwGlobalX121AddrDelete=wfFrSwGlobalX121AddrDelete, wfFrSwUsageVolume=wfFrSwUsageVolume, wfFrSwDlcmiCrossNetListenEnable=wfFrSwDlcmiCrossNetListenEnable, wfFrSwSvcUsageNumEntries=wfFrSwSvcUsageNumEntries, wfFrSwVcInBc=wfFrSwVcInBc, wfFrSwDlcmiDteErrorThreshold=wfFrSwDlcmiDteErrorThreshold, wfFrSwUsageState=wfFrSwUsageState, wfFrSwIsdnScrnNum=wfFrSwIsdnScrnNum, wfFrSwVcOutThroughput=wfFrSwVcOutThroughput, wfFrSwUsageStartTimeStampLow=wfFrSwUsageStartTimeStampLow, wfFrSwUsageCurDebug=wfFrSwUsageCurDebug, wfFrSwMcastIndividualDlci=wfFrSwMcastIndividualDlci, wfFrSwVcXNetReceived=wfFrSwVcXNetReceived, wfFrSwSvcUsageFlushData=wfFrSwSvcUsageFlushData, wfFrSwVcSetBecnOctets=wfFrSwVcSetBecnOctets, wfFrSwIsdnUniTable=wfFrSwIsdnUniTable, wfFrSwDlcmiCircuit=wfFrSwDlcmiCircuit, wfFrSwIsdnAssocNum=wfFrSwIsdnAssocNum, wfFrSwVcEscapeEnable=wfFrSwVcEscapeEnable, wfFrSwDlcmiDeletedVCs=wfFrSwDlcmiDeletedVCs, wfFrSwVcOutBe=wfFrSwVcOutBe, wfFrSwCctReceivedStatus=wfFrSwCctReceivedStatus, wfFrSwCctLocalRecvBECNFrames=wfFrSwCctLocalRecvBECNFrames, wfFrSwDlcmiAsyncUpdateEnable=wfFrSwDlcmiAsyncUpdateEnable, wfFrSwIsdnBaseSlotNum=wfFrSwIsdnBaseSlotNum, wfFrSwUsageFilePrefix=wfFrSwUsageFilePrefix, wfFrSwLocalE164AddrTable=wfFrSwLocalE164AddrTable, wfFrSwGlobalX121AddrTable=wfFrSwGlobalX121AddrTable, wfFrSwDlcmiCrossNetErrorThreshold=wfFrSwDlcmiCrossNetErrorThreshold, wfFrSwCctCreationTime=wfFrSwCctCreationTime, wfFrSwCctRemoteBecnState=wfFrSwCctRemoteBecnState, wfFrSwCctOutBe=wfFrSwCctOutBe, wfFrSwGlobalE164AddrLow=wfFrSwGlobalE164AddrLow, wfFrSwLocalX121AddrTable=wfFrSwLocalX121AddrTable, wfFrSwExtFileSysState=wfFrSwExtFileSysState, wfFrSwCctRemoteSetFECNFrames=wfFrSwCctRemoteSetFECNFrames, wfFrSwIsdnUniEntry=wfFrSwIsdnUniEntry, wfFrSwCctRemoteRecvFECNOctets=wfFrSwCctRemoteRecvFECNOctets, wfFrSwExtFileSysActualSize=wfFrSwExtFileSysActualSize, wfFrSwDlcmiDteFullEnquiryInterval=wfFrSwDlcmiDteFullEnquiryInterval, wfFrSwGlobalX121AddrLow=wfFrSwGlobalX121AddrLow, wfFrSwCctOutBc=wfFrSwCctOutBc, wfFrSwDlcmiDteReceived=wfFrSwDlcmiDteReceived, wfFrSwDlcmiSequenceCount=wfFrSwDlcmiSequenceCount, wfFrSwSigDlciIEAllowed=wfFrSwSigDlciIEAllowed, wfFrSwCctTable=wfFrSwCctTable, wfFrSwDlcmiVCsInUse=wfFrSwDlcmiVCsInUse, wfFrSwVcInactiveVcDropOctets=wfFrSwVcInactiveVcDropOctets, wfFrSwUsageEndTimeStampLow=wfFrSwUsageEndTimeStampLow, wfFrSwVcEntry=wfFrSwVcEntry, wfFrSwUsageCurCleanupInterval=wfFrSwUsageCurCleanupInterval, wfFrSwUsageEnable=wfFrSwUsageEnable, wfFrSwSvcUsageCurVolume=wfFrSwSvcUsageCurVolume, wfFrSwDlcmiTable=wfFrSwDlcmiTable, wfFrSwCctRemoteSentDEFrames=wfFrSwCctRemoteSentDEFrames, wfFrSwCctInThroughput=wfFrSwCctInThroughput, wfFrSwVcState=wfFrSwVcState, wfFrSwIsdnAssocIndex=wfFrSwIsdnAssocIndex, wfFrSwUsageSwitchName=wfFrSwUsageSwitchName, wfFrSwIsdnAssocEntry=wfFrSwIsdnAssocEntry, wfFrSwDlcmiState=wfFrSwDlcmiState, wfFrSwUsageTimerInterval=wfFrSwUsageTimerInterval, wfFrSwVcRecvNonDeFrames=wfFrSwVcRecvNonDeFrames, wfFrSwVcRecvFecnOctets=wfFrSwVcRecvFecnOctets, wfFrSwDlcmiDteStatus=wfFrSwDlcmiDteStatus, wfFrSwSvcUsageCurStoreInterval=wfFrSwSvcUsageCurStoreInterval, wfFrSwLocalX121AddrDelete=wfFrSwLocalX121AddrDelete, wfFrSwUsageStoreTimeStamp=wfFrSwUsageStoreTimeStamp, wfFrSwDlcmiManagementType=wfFrSwDlcmiManagementType, wfFrSwSigInStatusPkts=wfFrSwSigInStatusPkts, wfFrSwUsageLastNonDEFramesLow=wfFrSwUsageLastNonDEFramesLow, wfFrSwVcReceivedStatus=wfFrSwVcReceivedStatus, wfFrSwDlcmiControlByteDisable=wfFrSwDlcmiControlByteDisable, wfFrSwVcXNetSent=wfFrSwVcXNetSent, wfFrSwCngcMonP1Level1Percent=wfFrSwCngcMonP1Level1Percent, wfFrSwCngcMonP2Level2Percent=wfFrSwCngcMonP2Level2Percent, wfFrSwUsageUpdateTimeStamp=wfFrSwUsageUpdateTimeStamp, wfFrSwSigMaxNumOfSvcs=wfFrSwSigMaxNumOfSvcs, wfFrSwDlcmiAddressLen=wfFrSwDlcmiAddressLen, wfFrSwSigNwrkAbortedConnections=wfFrSwSigNwrkAbortedConnections, wfFrSwVcReportedStatus=wfFrSwVcReportedStatus, wfFrSwVirtualIntfLineNum=wfFrSwVirtualIntfLineNum, wfFrSwCngcMonTable=wfFrSwCngcMonTable, wfFrSwCctRemoteRecvBECNOctets=wfFrSwCctRemoteRecvBECNOctets, wfFrSwUsageSwitchId=wfFrSwUsageSwitchId, wfFrSwVcBecnState=wfFrSwVcBecnState, wfFrSwIsdnUniNum=wfFrSwIsdnUniNum, wfFrSwSvcUsageState=wfFrSwSvcUsageState, wfFrSwVcTxDeFrames=wfFrSwVcTxDeFrames, wfFrSwCctLocalSentDEOctets=wfFrSwCctLocalSentDEOctets, wfFrSwCctRemoteRecvFECNFrames=wfFrSwCctRemoteRecvFECNFrames, wfFrSwVcBackupCalledDlci=wfFrSwVcBackupCalledDlci, wfFrSwVcCallReqCalledDlci=wfFrSwVcCallReqCalledDlci, wfFrSwCctLocalRecvBECNOctets=wfFrSwCctLocalRecvBECNOctets, wfFrSwIsdnUniState=wfFrSwIsdnUniState, wfFrSwBcMeasurementInterval=wfFrSwBcMeasurementInterval, wfFrSwUsageStoreData=wfFrSwUsageStoreData, wfFrSwCctLocalRecvFECNFrames=wfFrSwCctLocalRecvFECNFrames, wfFrSwCctRemoteRecvBECNFrames=wfFrSwCctRemoteRecvBECNFrames, wfFrSwPvcUsageFileLayout=wfFrSwPvcUsageFileLayout, wfFrSwGlobalX121AddrHigh=wfFrSwGlobalX121AddrHigh, wfFrSwCngcMonP2Level4Percent=wfFrSwCngcMonP2Level4Percent, wfFrSwDlcmiBidirect=wfFrSwDlcmiBidirect, wfFrSwVcSetDeOctets=wfFrSwVcSetDeOctets, wfFrSwUsageSentDEOctetsLow=wfFrSwUsageSentDEOctetsLow, wfFrSwDlcmiUnknownIEErrors=wfFrSwDlcmiUnknownIEErrors, wfFrSwSigSvcDlciLow=wfFrSwSigSvcDlciLow, wfFrSwDlcmiSequenceErrors=wfFrSwDlcmiSequenceErrors, wfFrSwIsdnAssocSlotNum=wfFrSwIsdnAssocSlotNum, wfFrSwExtFileSysTable=wfFrSwExtFileSysTable, wfFrSwDlcmiControlByteErrors=wfFrSwDlcmiControlByteErrors, wfFrSwVirtualIntfSlot=wfFrSwVirtualIntfSlot, wfFrSwDlcmiStatus=wfFrSwDlcmiStatus, wfFrSwVcBackupCrossNetErrors=wfFrSwVcBackupCrossNetErrors, wfFrSwVirtualIntfEntry=wfFrSwVirtualIntfEntry, wfFrSwDlcmiPolls=wfFrSwDlcmiPolls, wfFrSwUsageDirectory=wfFrSwUsageDirectory, wfFrSwSvcUsageStoreTimeStamp=wfFrSwSvcUsageStoreTimeStamp, wfFrSwErrType=wfFrSwErrType, wfFrSwUsageLastNonDEOctetsHigh=wfFrSwUsageLastNonDEOctetsHigh, wfFrSwUsageCurFlushInterval=wfFrSwUsageCurFlushInterval, wfFrSwLocalX121Address=wfFrSwLocalX121Address, wfFrSwCctLocalSentNonDEFrames=wfFrSwCctLocalSentNonDEFrames, wfFrSwSigInDisconnectPkts=wfFrSwSigInDisconnectPkts, wfFrSwVcDropNonDeFrames=wfFrSwVcDropNonDeFrames, wfFrSwIsdnBaseDelete=wfFrSwIsdnBaseDelete, wfFrSwSigOutConnectPkts=wfFrSwSigOutConnectPkts, wfFrSwCngcMonP1Level2Percent=wfFrSwCngcMonP1Level2Percent, wfFrSwUsageUpdateInterval=wfFrSwUsageUpdateInterval, wfFrSwDlcmiCrossNetAsyncUpdateEnable=wfFrSwDlcmiCrossNetAsyncUpdateEnable, wfFrSwVcSetDeFrames=wfFrSwVcSetDeFrames, wfFrSwGlobalE164AddrDelete=wfFrSwGlobalE164AddrDelete, wfFrSwSigNumOfSvcsInUse=wfFrSwSigNumOfSvcsInUse, wfFrSwSigX213PriorityIEAllowed=wfFrSwSigX213PriorityIEAllowed, wfFrSwSvcUsageUpdateData=wfFrSwSvcUsageUpdateData, wfFrSwGlobalX121AddrIPAddr=wfFrSwGlobalX121AddrIPAddr, wfFrSwUsageSentDEFramesHigh=wfFrSwUsageSentDEFramesHigh, wfFrSwDlcmiBcMeasurementEnable=wfFrSwDlcmiBcMeasurementEnable, wfFrSwVcRecvDeFrames=wfFrSwVcRecvDeFrames, wfFrSwVcInBeOctets=wfFrSwVcInBeOctets, wfFrSwSigRejectedConnRequests=wfFrSwSigRejectedConnRequests, wfFrSwSvcUsageFilePrefix=wfFrSwSvcUsageFilePrefix, wfFrSwMcastIpAddr=wfFrSwMcastIpAddr, wfFrSwCngcMonP0Level2Percent=wfFrSwCngcMonP0Level2Percent, wfFrSwSvcUsageStoreInterval=wfFrSwSvcUsageStoreInterval, wfFrSwDlcmiCrossNetEnable=wfFrSwDlcmiCrossNetEnable, wfFrSwVcCalledDlci=wfFrSwVcCalledDlci, wfFrSwSigMaxInThroughputPerSvc=wfFrSwSigMaxInThroughputPerSvc, wfFrSwCctInBcOctets=wfFrSwCctInBcOctets, wfFrSwSigOutReleasePkts=wfFrSwSigOutReleasePkts, wfFrSwCctEntry=wfFrSwCctEntry, wfFrSwCngcMonP1Level3Percent=wfFrSwCngcMonP1Level3Percent, wfFrSwCctXNetSent=wfFrSwCctXNetSent, wfFrSwCctRemoteDropNonDEOctets=wfFrSwCctRemoteDropNonDEOctets, wfFrSwUsageCleanupTimeStamp=wfFrSwUsageCleanupTimeStamp, wfFrSwLocalE164AddrLocalFlag=wfFrSwLocalE164AddrLocalFlag, wfFrSwVirtualIntfCct=wfFrSwVirtualIntfCct, wfFrSwVcDropDeFrames=wfFrSwVcDropDeFrames, wfFrSwCctXNetReceived=wfFrSwCctXNetReceived, wfFrSwLocalE164AddrCUG=wfFrSwLocalE164AddrCUG, wfFrSwCctState=wfFrSwCctState, wfFrSwSvcUsageCurCleanupInterval=wfFrSwSvcUsageCurCleanupInterval, wfFrSwVcTable=wfFrSwVcTable, wfFrSwCctInactiveVCDropFrames=wfFrSwCctInactiveVCDropFrames, wfFrSwGlobalX121AddrEntry=wfFrSwGlobalX121AddrEntry, wfFrSwSvcUsageCurFilePrefix=wfFrSwSvcUsageCurFilePrefix, wfFrSwCngcMonEntry=wfFrSwCngcMonEntry, wfFrSwCctLocalDropNonDEFrames=wfFrSwCctLocalDropNonDEFrames, wfFrSwUsageFlushData=wfFrSwUsageFlushData, wfFrSwVirtualIntfDelete=wfFrSwVirtualIntfDelete, wfFrSwIsdnAssocScrnEnable=wfFrSwIsdnAssocScrnEnable, wfFrSwCngcMonP0Level4Percent=wfFrSwCngcMonP0Level4Percent, wfFrSwIsdnBaseTable=wfFrSwIsdnBaseTable, wfFrSwUsageDlci=wfFrSwUsageDlci, wfFrSwLocalX121AddrCct=wfFrSwLocalX121AddrCct, wfFrSwCctLocalSetDEOctets=wfFrSwCctLocalSetDEOctets, wfFrSwLocalE164AddrCct=wfFrSwLocalE164AddrCct, wfFrSwVcAtmIwfDePolicy=wfFrSwVcAtmIwfDePolicy, wfFrSwCctRemoteDropDEFrames=wfFrSwCctRemoteDropDEFrames, wfFrSwSvcUsageStoreData=wfFrSwSvcUsageStoreData, wfFrSwTupleDlciA=wfFrSwTupleDlciA, wfFrSwBaseShutDown=wfFrSwBaseShutDown, wfFrSwCctLastTimeChange=wfFrSwCctLastTimeChange, wfFrSwUsageTable=wfFrSwUsageTable, wfFrSwVcCreationTime=wfFrSwVcCreationTime, wfFrSwVcLastTimeChange=wfFrSwVcLastTimeChange, wfFrSwCctInBc=wfFrSwCctInBc, wfFrSwUsageCurDirectory=wfFrSwUsageCurDirectory, wfFrSwCctMulticast=wfFrSwCctMulticast, wfFrSwVcInBe=wfFrSwVcInBe, wfFrSwSigT301=wfFrSwSigT301, wfFrSwCctRemoteSentNonDEOctets=wfFrSwCctRemoteSentNonDEOctets, wfFrSwUsageFlushTimeStamp=wfFrSwUsageFlushTimeStamp, wfFrSwCctRemoteSetBECNOctets=wfFrSwCctRemoteSetBECNOctets, wfFrSwVcBackupCalledIpAddr=wfFrSwVcBackupCalledIpAddr, wfFrSwVcAtmIwfVPI=wfFrSwVcAtmIwfVPI, wfFrSwSigInReleaseCompletePkts=wfFrSwSigInReleaseCompletePkts, wfFrSwLocalX121AddrEntry=wfFrSwLocalX121AddrEntry, wfFrSwCctCrossNetStatus=wfFrSwCctCrossNetStatus, wfFrSwSvcUsageFileLayout=wfFrSwSvcUsageFileLayout, wfFrSwDlcmiFullStatusSeq=wfFrSwDlcmiFullStatusSeq, wfFrSwDlcmiSvcDisable=wfFrSwDlcmiSvcDisable, wfFrSwVcCallReqDlciSelectionType=wfFrSwVcCallReqDlciSelectionType, wfFrSwSigOutStatusEnquiryPkts=wfFrSwSigOutStatusEnquiryPkts, wfFrSwUsageSentNonDEFramesLow=wfFrSwUsageSentNonDEFramesLow, wfFrSwLocalE164AddrEntry=wfFrSwLocalE164AddrEntry, wfFrSwDlcmiDteSeqCount=wfFrSwDlcmiDteSeqCount, wfFrSwUsageFileCleanup=wfFrSwUsageFileCleanup, wfFrSwBaseDelete=wfFrSwBaseDelete, wfFrSwSvcUsageFlushInterval=wfFrSwSvcUsageFlushInterval, wfFrSwUsageLastDEFramesHigh=wfFrSwUsageLastDEFramesHigh, wfFrSwVcRedirectState=wfFrSwVcRedirectState, wfFrSwDlcmiAlarmTimer=wfFrSwDlcmiAlarmTimer, wfFrSwCctLocalOrRemoteConnection=wfFrSwCctLocalOrRemoteConnection)
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwSigOutCallProceedingPkts=wfFrSwSigOutCallProceedingPkts, wfFrSwDlcmiMaxSupportedVCs=wfFrSwDlcmiMaxSupportedVCs, wfFrSwDlcmiSpvcAgent=wfFrSwDlcmiSpvcAgent, wfFrSwCctRemoteDropNonDEFrames=wfFrSwCctRemoteDropNonDEFrames, wfFrSwVcCallReqMaxRetries=wfFrSwVcCallReqMaxRetries, wfFrSwSwitchHdrErrors=wfFrSwSwitchHdrErrors, wfFrSwDlcmiEscapeVcCount=wfFrSwDlcmiEscapeVcCount, wfFrSwVcCalledIpAddr=wfFrSwVcCalledIpAddr, wfFrSwUsageSentNonDEOctetsHigh=wfFrSwUsageSentNonDEOctetsHigh, wfFrSwBase=wfFrSwBase, wfFrSwDlcmiDtePolls=wfFrSwDlcmiDtePolls, wfFrSwCctRemoteSetBECNFrames=wfFrSwCctRemoteSetBECNFrames, wfFrSwVcTxNonDeOctets=wfFrSwVcTxNonDeOctets, wfFrSwDlcmiMcastNoBufferErrors=wfFrSwDlcmiMcastNoBufferErrors, wfFrSwSigCallingPartyIEMandatory=wfFrSwSigCallingPartyIEMandatory, wfFrSwUsageCurStoreInterval=wfFrSwUsageCurStoreInterval, wfFrSwDlcmiFrameTooLongErrors=wfFrSwDlcmiFrameTooLongErrors, wfFrSwSvcUsageFlushTimeStamp=wfFrSwSvcUsageFlushTimeStamp, wfFrSwCngcMonP1Level4Percent=wfFrSwCngcMonP1Level4Percent, wfFrSwVcBackupCrossNetStatus=wfFrSwVcBackupCrossNetStatus, wfFrSwSigXNetClearingDisable=wfFrSwSigXNetClearingDisable, wfFrSwSigTable=wfFrSwSigTable, wfFrSwCngcMonP3Level4Percent=wfFrSwCngcMonP3Level4Percent, wfFrSwCctLocalDropNonDEOctets=wfFrSwCctLocalDropNonDEOctets, wfFrSwCngcMonCct=wfFrSwCngcMonCct, wfFrSwVcDropExcessBurstFrames=wfFrSwVcDropExcessBurstFrames, wfFrSwUsageNumEntries=wfFrSwUsageNumEntries, wfFrSwTupleIpAddrB=wfFrSwTupleIpAddrB, wfFrSwSvcUsageUpdateTimeStamp=wfFrSwSvcUsageUpdateTimeStamp, wfFrSwSvcUsageCurUpdateInterval=wfFrSwSvcUsageCurUpdateInterval, wfFrSwDlcmiNniEnable=wfFrSwDlcmiNniEnable, wfFrSwSigDefaultMinAcceptThroughput=wfFrSwSigDefaultMinAcceptThroughput, wfFrSwUsageEntry=wfFrSwUsageEntry, wfFrSwCngcMonP2Level3Percent=wfFrSwCngcMonP2Level3Percent, wfFrSwCctDlci=wfFrSwCctDlci, wfFrSwUsageLastDEFramesLow=wfFrSwUsageLastDEFramesLow, wfFrSwSigDelete=wfFrSwSigDelete, wfFrSwUsageCurVolume=wfFrSwUsageCurVolume, wfFrSwCngcMonP3Level3Percent=wfFrSwCngcMonP3Level3Percent, wfFrSwSigInReleasePkts=wfFrSwSigInReleasePkts, wfFrSwCctReportedStatus=wfFrSwCctReportedStatus, wfFrSwDlcmiSvcBillingEnable=wfFrSwDlcmiSvcBillingEnable, wfFrSwDlcmiMonitoredEvents=wfFrSwDlcmiMonitoredEvents, wfFrSwVcCallReqCalledAddr=wfFrSwVcCallReqCalledAddr, wfFrSwSigT308=wfFrSwSigT308, wfFrSwVcCircuit=wfFrSwVcCircuit, wfFrSwBaseIpAddr=wfFrSwBaseIpAddr, wfFrSwVcDlci=wfFrSwVcDlci, wfFrSwDlcmiPollingInterval=wfFrSwDlcmiPollingInterval, wfFrSwGlobalE164AddrTable=wfFrSwGlobalE164AddrTable, wfFrSwCngcMonP3Level1Percent=wfFrSwCngcMonP3Level1Percent, wfFrSwUsageCurFilePrefix=wfFrSwUsageCurFilePrefix, wfFrSwCctLocalDropDEOctets=wfFrSwCctLocalDropDEOctets, wfFrSwUsageLocalTimeZone=wfFrSwUsageLocalTimeZone, wfFrSwVcOutBc=wfFrSwVcOutBc, wfFrSwVcAtmIwfVCI=wfFrSwVcAtmIwfVCI, wfFrSwVcCfgInBe=wfFrSwVcCfgInBe, wfFrSwVcDropNonDeOctets=wfFrSwVcDropNonDeOctets, wfFrSwVcInBcOctets=wfFrSwVcInBcOctets, wfFrSwSigCircuit=wfFrSwSigCircuit, wfFrSwVcRecentNonDeOctets=wfFrSwVcRecentNonDeOctets, wfFrSwVcCrossNetStatus=wfFrSwVcCrossNetStatus, wfFrSwTupleEntry=wfFrSwTupleEntry, wfFrSwExtFileSysSlot=wfFrSwExtFileSysSlot, wfFrSwSvcUsageCurDirectory=wfFrSwSvcUsageCurDirectory, wfFrSwUsage=wfFrSwUsage, wfFrSwTupleDlciB=wfFrSwTupleDlciB, wfFrSwUsageDebug=wfFrSwUsageDebug, wfFrSwLocalX121AddrCUG=wfFrSwLocalX121AddrCUG, wfFrSwIsdnUniIndex=wfFrSwIsdnUniIndex, wfFrSwCctLocalSentDEFrames=wfFrSwCctLocalSentDEFrames, wfFrSwSvcUsageDirectory=wfFrSwSvcUsageDirectory, wfFrSwDlcmiErrorThreshold=wfFrSwDlcmiErrorThreshold, wfFrSwDlcmiFormatErrors=wfFrSwDlcmiFormatErrors, wfFrSwDlcmiDtePollingInterval=wfFrSwDlcmiDtePollingInterval, wfFrSwCctLocalRecvNonDEOctets=wfFrSwCctLocalRecvNonDEOctets, wfFrSwSigMaxOutThroughputPerSvc=wfFrSwSigMaxOutThroughputPerSvc, wfFrSwVcInThroughput=wfFrSwVcInThroughput, wfFrSwCctXNetErrors=wfFrSwCctXNetErrors, wfFrSwMcastEntry=wfFrSwMcastEntry, wfFrSwCctStateSet=wfFrSwCctStateSet, wfFrSwCctLocalSetBECNOctets=wfFrSwCctLocalSetBECNOctets, wfFrSwCctLocalRecvNonDEFrames=wfFrSwCctLocalRecvNonDEFrames, wfFrSwVcTxDeOctets=wfFrSwVcTxDeOctets, wfFrSwSvcUsageCleanupInterval=wfFrSwSvcUsageCleanupInterval, wfFrSwUsageEndTimeStampHigh=wfFrSwUsageEndTimeStampHigh, wfFrSwSigDefaultBe=wfFrSwSigDefaultBe, wfFrSwVcSpvcCallState=wfFrSwVcSpvcCallState, wfFrSwVcDropExcessBurstOctets=wfFrSwVcDropExcessBurstOctets, wfFrSwGlobalE164AddrHigh=wfFrSwGlobalE164AddrHigh, wfFrSwTupleDelete=wfFrSwTupleDelete, wfFrSwCctRemoteDropDEOctets=wfFrSwCctRemoteDropDEOctets, wfFrSwSigOutSetupPkts=wfFrSwSigOutSetupPkts, wfFrSwIsdnScrnIndex=wfFrSwIsdnScrnIndex, wfFrSwCctRemoteRecvDEFrames=wfFrSwCctRemoteRecvDEFrames, wfFrSwTupleTable=wfFrSwTupleTable, wfFrSwUsageLastDEOctetsLow=wfFrSwUsageLastDEOctetsLow, wfFrSwIsdnBaseAssocType=wfFrSwIsdnBaseAssocType, wfFrSwLocalX121AddrLocalFlag=wfFrSwLocalX121AddrLocalFlag, wfFrSwVcDropDeOctets=wfFrSwVcDropDeOctets, wfFrSwCctDelete=wfFrSwCctDelete, wfFrSwDlcmiFullEnquiryInterval=wfFrSwDlcmiFullEnquiryInterval, wfFrSwIsdnScrnDelete=wfFrSwIsdnScrnDelete, wfFrSwIsdnBaseEntry=wfFrSwIsdnBaseEntry, wfFrSwVcAtmIwfEfciPolicy=wfFrSwVcAtmIwfEfciPolicy, wfFrSwVcStateSet=wfFrSwVcStateSet, wfFrSwDlcmiEntry=wfFrSwDlcmiEntry, wfFrSwVcTrfPriority=wfFrSwVcTrfPriority, wfFrSwDlcmiActiveReceived=wfFrSwDlcmiActiveReceived, wfFrSwDlcmiProtocolErrors=wfFrSwDlcmiProtocolErrors, wfFrSwSigDlciAssign=wfFrSwSigDlciAssign, wfFrSwExtFileSysSize=wfFrSwExtFileSysSize, wfFrSwSvcUsageInterimRecordEnable=wfFrSwSvcUsageInterimRecordEnable, wfFrSwDlcmiNewVCs=wfFrSwDlcmiNewVCs, wfFrSwUsageLastNonDEOctetsLow=wfFrSwUsageLastNonDEOctetsLow, wfFrSwDlcmiDelete=wfFrSwDlcmiDelete, wfFrSwUsageCurUpdateInterval=wfFrSwUsageCurUpdateInterval, wfFrSwCngcMonP0Level3Percent=wfFrSwCngcMonP0Level3Percent, wfFrSwVcSetBecnFrames=wfFrSwVcSetBecnFrames, wfFrSwUsageRemoteDlci=wfFrSwUsageRemoteDlci, wfFrSwUsageCurTimerInterval=wfFrSwUsageCurTimerInterval, wfFrSwIsdnAssocDelete=wfFrSwIsdnAssocDelete, wfFrSwSigTotalOutCurrentThroughput=wfFrSwSigTotalOutCurrentThroughput, wfFrSwDlcmiIwfMode=wfFrSwDlcmiIwfMode, wfFrSwSigDefaultBc=wfFrSwSigDefaultBc, wfFrSwDlcmiRecoveryCounts=wfFrSwDlcmiRecoveryCounts, wfFrSwUsageLastDEOctetsHigh=wfFrSwUsageLastDEOctetsHigh, wfFrSwVcSetFecnOctets=wfFrSwVcSetFecnOctets, wfFrSwVcDelete=wfFrSwVcDelete, wfFrSwVcRecvBecnFrames=wfFrSwVcRecvBecnFrames, wfFrSwExtFileSysEntry=wfFrSwExtFileSysEntry, wfFrSwCngcMonReset=wfFrSwCngcMonReset, wfFrSwSigMaximumBe=wfFrSwSigMaximumBe, wfFrSwSigT305=wfFrSwSigT305, wfFrSwSvcUsageEnable=wfFrSwSvcUsageEnable, wfFrSwSigT322=wfFrSwSigT322, wfFrSwSvcUsageVolume=wfFrSwSvcUsageVolume, wfFrSwDlcmiIllegalDlciErrors=wfFrSwDlcmiIllegalDlciErrors, wfFrSwIsdnAssocTable=wfFrSwIsdnAssocTable, wfFrSwCctRemoteRecvNonDEFrames=wfFrSwCctRemoteRecvNonDEFrames, wfFrSwDlcmiCrossNetPollingInterval=wfFrSwDlcmiCrossNetPollingInterval, wfFrSwLocalE164Address=wfFrSwLocalE164Address, wfFrSwUsageStoreInterval=wfFrSwUsageStoreInterval, wfFrSwSigInSetupPkts=wfFrSwSigInSetupPkts, wfFrSwUsageSentNonDEOctetsLow=wfFrSwUsageSentNonDEOctetsLow, wfFrSwSigSvcDlciHigh=wfFrSwSigSvcDlciHigh, wfFrSwDlcmiL2AddrType=wfFrSwDlcmiL2AddrType, wfFrSwMcastIndex=wfFrSwMcastIndex, wfFrSwDlcmiUnknownRPTErrors=wfFrSwDlcmiUnknownRPTErrors, wfFrSwUsageSentNonDEFramesHigh=wfFrSwUsageSentNonDEFramesHigh, wfFrSwDlcmiLastReceived=wfFrSwDlcmiLastReceived, wfFrSwCctLocalSentNonDEOctets=wfFrSwCctLocalSentNonDEOctets, wfFrSwSigInCallProceedingPkts=wfFrSwSigInCallProceedingPkts, wfFrSwSvcUsageFileCleanup=wfFrSwSvcUsageFileCleanup, wfFrSwSigEntry=wfFrSwSigEntry, wfFrSwMcastTable=wfFrSwMcastTable, wfFrSwSigL2Resets=wfFrSwSigL2Resets, wfFrSwDlcmiOtherErrors=wfFrSwDlcmiOtherErrors, wfFrSwErrTime=wfFrSwErrTime, wfFrSwUsageFlushInterval=wfFrSwUsageFlushInterval, wfFrSwVcTxNonDeFrames=wfFrSwVcTxNonDeFrames, wfFrSwUsageCleanupInterval=wfFrSwUsageCleanupInterval, wfFrSwIsdnScrnEntry=wfFrSwIsdnScrnEntry, wfFrSwUsageRemoteIPAddress=wfFrSwUsageRemoteIPAddress, wfFrSwSigInStatusEnquiryPkts=wfFrSwSigInStatusEnquiryPkts, wfFrSwVirtualIntfTable=wfFrSwVirtualIntfTable, wfFrSwCngcMonP3Level2Percent=wfFrSwCngcMonP3Level2Percent, wfFrSwUsageLastNonDEFramesHigh=wfFrSwUsageLastNonDEFramesHigh, wfFrSwCctLocalSetFECNOctets=wfFrSwCctLocalSetFECNOctets, wfFrSwVcAtmIwfMode=wfFrSwVcAtmIwfMode, wfFrSwVcRedirectType=wfFrSwVcRedirectType, wfFrSwSigT310=wfFrSwSigT310, wfFrSwCctLocalRecvFECNOctets=wfFrSwCctLocalRecvFECNOctets, wfFrSwGlobalE164AddrIPAddr=wfFrSwGlobalE164AddrIPAddr, wfFrSwDlcmiFrameTooShortErrors=wfFrSwDlcmiFrameTooShortErrors, wfFrSwVcMulticast=wfFrSwVcMulticast, wfFrSwUsageIPAddress=wfFrSwUsageIPAddress, wfFrSwSigDefaultThroughput=wfFrSwSigDefaultThroughput, wfFrSwCctLocalRecentNonDEOctets=wfFrSwCctLocalRecentNonDEOctets, wfFrSwUsageSentDEFramesLow=wfFrSwUsageSentDEFramesLow, wfFrSwSvcUsageCleanupTimeStamp=wfFrSwSvcUsageCleanupTimeStamp, wfFrSwSigInUnknownPkts=wfFrSwSigInUnknownPkts, wfFrSwCctInactiveVCDropOctets=wfFrSwCctInactiveVCDropOctets, wfFrSwDlcmiEscapeCircuit=wfFrSwDlcmiEscapeCircuit, wfFrSwUsageDelete=wfFrSwUsageDelete, wfFrSwCctNumber=wfFrSwCctNumber, wfFrSwMcastDelete=wfFrSwMcastDelete, wfFrSwSigTotalOutNegotiableThroughput=wfFrSwSigTotalOutNegotiableThroughput, wfFrSwVcRecvNonDeOctets=wfFrSwVcRecvNonDeOctets, wfFrSwCngcMonP2Level1Percent=wfFrSwCngcMonP2Level1Percent, wfFrSwVcRecvFecnFrames=wfFrSwVcRecvFecnFrames, wfFrSwCctInBe=wfFrSwCctInBe, wfFrSwCctLocalDropDEFrames=wfFrSwCctLocalDropDEFrames, wfFrSwCctLocalSetBECNFrames=wfFrSwCctLocalSetBECNFrames, wfFrSwDlcmiUnknownDlciErrors=wfFrSwDlcmiUnknownDlciErrors, wfFrSwCctLocalRecvDEFrames=wfFrSwCctLocalRecvDEFrames, wfFrSwSvcUsageCurFlushInterval=wfFrSwSvcUsageCurFlushInterval, wfFrSwGlobalE164AddrEntry=wfFrSwGlobalE164AddrEntry, wfFrSwErrData=wfFrSwErrData, wfFrSwVcSetFecnFrames=wfFrSwVcSetFecnFrames, wfFrSwDlcmiCallAccDlciSelectionType=wfFrSwDlcmiCallAccDlciSelectionType, wfFrSwTupleIpAddrA=wfFrSwTupleIpAddrA, wfFrSwCctRemoteSetFECNOctets=wfFrSwCctRemoteSetFECNOctets, wfFrSwVcXNetErrors=wfFrSwVcXNetErrors)
| 156.006489 | 12,584 | 0.795407 |
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Counter32, IpAddress, Counter64, Bits, ModuleIdentity, MibIdentifier, TimeTicks, Unsigned32, iso, NotificationType, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Counter32", "IpAddress", "Counter64", "Bits", "ModuleIdentity", "MibIdentifier", "TimeTicks", "Unsigned32", "iso", "NotificationType", "ObjectIdentity", "Gauge32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
wfFrswGroup, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfFrswGroup")
wfFrSwDlcmiTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1), )
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setDescription('The Parameters for the Data Link Connection Management Interface corresponding to any interface. Incorporates the Error table.')
wfFrSwDlcmiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwDlcmiCircuit"))
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setDescription('The parameters for a particular Data Link Connection Management Interface.')
wfFrSwDlcmiDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3))).clone('init')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiState.setDescription('Indicates which state of DLCMI the interface is in')
wfFrSwDlcmiNniEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setDescription('Indicates whether a NNI is enabled for this entry.')
wfFrSwDlcmiCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setDescription('Instance identifier; the circuit number of this entry.')
wfFrSwDlcmiManagementType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("t1617d", 3), ("t1617b", 4), ("annexa", 5), ("lmiswitch", 6), ("annexdswitch", 7), ("annexaswitch", 8), ("iwfoamenabled", 9), ("iwfoamdisabled", 10))).clone('t1617d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setDescription('Indicates the Data Link Connection Management scheme that is active.')
wfFrSwL3NetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setDescription('Indicates level 3 (IP) address of this frame relay interface')
wfFrSwDlcmiAddressLen = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("twobyte", 2), ("threebyte", 3), ("fourbyte", 4))).clone('twobyte')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setDescription('Indicates the address length, including the control portion.')
wfFrSwDlcmiControlByteDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setDescription('Indicates inclusion of control byte in q922 format.')
wfFrSwDlcmiPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 11), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiMonitoredEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 12), Integer32().clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setDescription('Indicates the events over which error threshold is kept.')
wfFrSwDlcmiRecoveryCounts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setDescription('Indicates the number of correct polling cycles during recovery.')
wfFrSwDlcmiMaxSupportedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1024)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setDescription('Indicates the maximum number of VCs allowed.')
wfFrSwDlcmiVCsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setDescription('Indicates the number of VCs that are currently configured on this interface.')
wfFrSwSwitchHdrErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setDescription('Indicates the number of frames dropped because they were received on the remote side with an invalid switch header.')
wfFrSwDlcmiSequenceCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setDescription("Indicates this switch's sequence counter; value of next to send.")
wfFrSwDlcmiLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiActiveSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiActiveReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiPolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiAlarmTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setDescription('Counter of 1/2 second timeouts. Indicates when to expect poll.')
wfFrSwErrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("reset", 1), ("other", 2), ("short", 3), ("long", 4), ("illegaldlci", 5), ("unknowndlci", 6), ("protoerr", 7), ("unknownie", 8), ("sequenceerr", 9), ("unknownrpt", 10), ("byteerr", 11), ("hdrerr", 12), ("formaterr", 13))).clone('reset')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrType.setDescription('Indicates the type of the last specific monitored error.')
wfFrSwErrData = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 24), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrData.setDescription('Contains as much of the error packet as possible.')
wfFrSwErrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 25), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrTime.setDescription('Indicates the time the last error occurred.')
wfFrSwBcMeasurementInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 2000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setDescription('Indicates the Committed Burst sample window interval in msec')
wfFrSwDlcmiMcastNoBufferErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setDescription('Indicates the number of times a multicast failed partially or wholly because there are insufficient buffers available to create multiple copies of a multicast frame')
wfFrSwDlcmiFrameTooShortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setDescription('Indicates the number of frames dropped that are too short to be accepted.')
wfFrSwDlcmiFrameTooLongErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setDescription('Indicates the number of frames dropped that are too long to be accepted.')
wfFrSwDlcmiIllegalDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setDescription('Indicates the number of frames dropped that had an invalid DLCI value.')
wfFrSwDlcmiUnknownDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setDescription('Indicates the number of frames dropped which had an unknown DLCI value.')
wfFrSwDlcmiProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setDescription('Indicates the number of frames dropped because of a DLCMI protocol violation.')
wfFrSwDlcmiUnknownIEErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setDescription('Indicates the number of frames dropped that had an unknown information element.')
wfFrSwDlcmiSequenceErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setDescription('Indicates the number of frames dropped because of a DLCMI sequence error.')
wfFrSwDlcmiUnknownRPTErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setDescription('Indicates the number of frames dropped which had an unknown report type.')
wfFrSwDlcmiControlByteErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setDescription('Indicates the number of frames dropped that had an unsupported control byte.')
wfFrSwDlcmiFormatErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setDescription('Indicates the number of frames dropped due to a frame format error.')
wfFrSwDlcmiOtherErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setDescription('Indicates the number of frames dropped due to unknown or other errors not counted by any error counter.')
wfFrSwDlcmiStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setDescription('Indicates which state of execution the DLCMI gate is in')
wfFrSwDlcmiNewVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setDescription('Indicates the number of newly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiDeletedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setDescription('Indicates the number of deletedly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiFullStatusSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setDescription('Indicates the expected sequence number for the next Status Enquiry message that will prove that the CPE received our last Full Status Message and knows about the deleted PVCs.')
wfFrSwDlcmiBidirect = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiDteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setDescription('Indicates which state of execution the DLCMI gate is in for bidirectional procedures.')
wfFrSwDlcmiDteSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 45), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiDteReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 46), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiDteLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 47), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiDtePolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiDtePollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 49), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiDteFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 50), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiDteErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 51), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiCrossNetEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiCrossNetPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 53), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 86400)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiCrossNetErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 54), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setDescription('Indicates the number missed heartbeat polls before declaring the cross-net PVC inactive.')
wfFrSwDlcmiCrossNetAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setDescription('Indicates whether we are to send to the other end of the network, status updates for dlcis as soon as there is a change of status for the dlci.')
wfFrSwDlcmiBcMeasurementEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setDescription('Indicates whether Committed Burst Measurement is enabled for this interface. If this attribute is set to DISABLE then DE bit setting in Frame Relay frames at this interface is disabled.')
wfFrSwDlcmiAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setDescription('Indicates whether the link management entity should send an asynchronous single PVC update message when the state of a PVC is changed by a technician or by cross-net polling procedures. ')
wfFrSwDlcmiCrossNetListenEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 58), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setDescription("Indicates whether the link management entity should make a judgement of the PVC's status based on Cross Net updates.")
wfFrSwDlcmiSvcDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 59), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setDescription(' Indicates whether SVC is enabled or disabled for this access channel. ')
wfFrSwDlcmiL2AddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("e164", 1), ("x121", 2))).clone('e164')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setDescription(' Indicates the address type supported on this access channel. This information is needed when wFrSwDlcmiSVCDisable is enabled. ')
wfFrSwDlcmiEscapeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("ingress", 2), ("egress", 3))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setDescription(' Identifies the Escape mode (none, ingress or egress) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiEscapeCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setDescription('Identifies the FR-DTE circuit number corresponding to an Escape PVC. Applies only to PVCs with wfFrSwVcEscapeEnable set to enabled.')
wfFrSwDlcmiEscapeVcCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setDescription(' The number of PVCs on this DLCMI that are configured as Escape VCs')
wfFrSwDlcmiIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("sdlc2frsw", 2))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setDescription(' Identifies the interworking mode (none, SDLC-to-FRSW) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiSvcBillingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setDescription('Indicates whether the SVC Billing on this access channel set to enable.')
wfFrSwDlcmiSpvcAgent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("cra", 2), ("caa", 3), ("craandcaa", 4))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setDescription(' Indicates if an SPVC Call Request Agent, Call Accept Agent, or both are enabled on this FRSW circuit.')
wfFrSwDlcmiCallAccDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setDescription('Indicates to the Call Accept Agent to accept SPVC Call Setup requests for any available DLCI or for a specific DLCI. Call Setup requests with the wrong selection type will be rejected.')
wfFrSwCctTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2), )
if mibBuilder.loadTexts: wfFrSwCctTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctTable.setDescription('Frame Relay Circuit table gives information about a virtual circuit.')
wfFrSwCctEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCctNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwCctDlci"))
if mibBuilder.loadTexts: wfFrSwCctEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwCctDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwCctNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctNumber.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwCctDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwCctState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwCctMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctMulticast.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctMulticast.setDescription('Indicates whether this dlci is used for multicast or single destination.')
wfFrSwCctInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwCctOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctOutBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwCctInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBe.setDescription('Indicates the Incoming Excess Burst bits for this virtual circuit.')
wfFrSwCctOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwCctInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwCctOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwCctCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwCctLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwCctLocalSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the FECN bit .')
wfFrSwCctLocalSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the FECN bit.')
wfFrSwCctLocalSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalSetDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setDescription('Indicates the number of frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setDescription('Indicates the number of frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctLocalDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setDescription('Indicates the number of octets in frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctInactiveVCDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwCctInactiveVCDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwCctLocalRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setDescription('Indicates the number of frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setDescription('Indicates the number of octets in frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setDescription('Indicates the number of frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setDescription('Indicates the number of frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setDescription('Indicates the number of octets received over the local interface during the most recent sampling period.')
wfFrSwCctRemoteSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setDescription('Indicates the number of Non DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setDescription('Indicates the number of Non DE set octets sent over the remote interface.')
wfFrSwCctRemoteSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setDescription('Indicates the number of DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setDescription('Indicates the number of DE set octets sent over the remote interface.')
wfFrSwCctRemoteSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the FECN bit.')
wfFrSwCctRemoteSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface that on which this switch set the FECN bit.')
wfFrSwCctRemoteSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setDescription('Indicates the number of frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setDescription('Indicates the number of frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setDescription('Indicates the number of frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setDescription('Indicates the number of frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setDescription('Indicates the number of frames received over the remote interface with the BECN bit set.')
wfFrSwCctRemoteRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the BECN bit set.')
wfFrSwCctLocalBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setDescription('Indicates the local BECN state')
wfFrSwCctRemoteBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setDescription('Indicates the remote BECN state')
wfFrSwCctLocalOrRemoteConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2))).clone('remote')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setDescription('Indicates whether this connection is Local to Local Connection or Local to Remote connection.')
wfFrSwCctInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwCctStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 63), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctStateSet.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwCctReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwCctReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwCctCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwCctXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwCctXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval. ')
wfFrSwCctXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling ')
wfFrSwTupleTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3), )
if mibBuilder.loadTexts: wfFrSwTupleTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleTable.setDescription('The Parameters for the Tuple table, identifying the endpoints of virtual circuits as pairs of IP addresses and DLCI.')
wfFrSwTupleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrB"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciB"))
if mibBuilder.loadTexts: wfFrSwTupleEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleEntry.setDescription('The parameters for a particular Tuple.')
wfFrSwTupleDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwTupleDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDelete.setDescription('Indication to delete this tuple.')
wfFrSwTupleIpAddrA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setDescription("Instance indentifier; indicates the IP address associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleDlciA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setDescription("Instance identfier; indicates the DLCI associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleIpAddrB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setDescription("Instance identfier; indicates the IP address associated with endpoint 'B' of a virtual circuit.")
wfFrSwTupleDlciB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setDescription("Instance identifier; Indicates the DLCI associated with endpoint 'B' of a virtual circuit.")
wfFrSwMcastTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4), )
if mibBuilder.loadTexts: wfFrSwMcastTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastTable.setDescription('The list of multicast addresses')
wfFrSwMcastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwMcastIndex"))
if mibBuilder.loadTexts: wfFrSwMcastEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastEntry.setDescription('The parameters for a particular Multicast address.')
wfFrSwMcastDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDelete.setDescription('Indication to delete this multicast instance.')
wfFrSwMcastIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwMcastIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndex.setDescription('Index of this multicast DLCI instance')
wfFrSwMcastIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setDescription('IP address of the interface in which this multicast DLCI is defined.')
wfFrSwMcastDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDlci.setDescription('Identifies the multicast DLCI with which the IndividualDlci is associated.')
wfFrSwMcastIndividualDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setDescription('Indicates the DLCI associated with the above multicast DLCI.')
wfFrSwUsage = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5))
wfFrSwUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageEnable.setDescription('Enable/Disable FRSW billing.')
wfFrSwUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolume.setDescription("Indicates the file system volume number to which the billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwUsageVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setDescription('Indicates the backup volume if wfFrSwUsageVolume becomes inoperative. Note: This feature is not implemented in this release.')
wfFrSwUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setDescription('The name of the directory where the billing usage data files are stored. ')
wfFrSwUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setDescription('The base name of billing usage data files.')
wfFrSwUsageTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setDescription('This number determines the timer interval (number of seconds) unit for the Billing process to perform its various timer driven tasks. i.e. updating billing usage data, writing billing usage data to file system and file system management activities.')
wfFrSwUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwUsageLocalTimeZone = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setDescription('Indicates local time zone of the switch')
wfFrSwUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwUsageUpdateInterval timer expiration or the starting time of the current wfFrSwUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwUsageStoreInterval timer expiration or the starting time of the current wfFrSwUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 14), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwUsageFlushInterval timer expiration or the starting time of the current wfFrSwUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 15), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwUsageCleanupInterval timer expiration or the starting time of the current wfFrSwUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageState.setDescription('current state FRSW billing.')
wfFrSwUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setDescription('current file system volume number used. This number is the same as wfFrSwUsageVolume except when the user sets wfFrSwUsageVolume to an invalid number.')
wfFrSwUsageCurVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setDescription('curent backup file system volume number used. This number is the same as wfFrSwUsageVolumeBackUp except when the user sets wfFrSwUsageVolume to an invalid number. Note: This feature is not implemented in this release.')
wfFrSwUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 23), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setDescription('current directory name used. This number is the same as wfFrSwUsageDirectory except when the user sets wfFrSwUsageDirectory to an invalid name.')
wfFrSwUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 24), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setDescription('current base file name used. This number is the same as wfFrSwUsageFilePrefix except when the user sets wfFrSwUsageFilePrefix to an invalid name.')
wfFrSwUsageCurTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setDescription('current timer interval number used. This number is the same as wfFrSwUsageTimerInterval except when the user sets wfFrSwUsageTimerInterval to an invalid value.')
wfFrSwUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwUsageUpdateInterval except when the user sets wfFrSwUsageUpdateInterval to an invalid value.')
wfFrSwUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwUsageStoreInterval except when the user sets wfFrSwUsageStoreInterval to an invalid value.')
wfFrSwUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwUsageFlushInterval except when the user sets wfFrSwUsageFlushInterval to an invalid value.')
wfFrSwUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwUsageCleanupInterval except when the user sets wfFrSwUsageCleanupInterval to an invalid value.')
wfFrSwUsageDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDebug.setDescription('Enable/Disable printing of debug edl (trap) messages. NOTE: Do not enable this attribute in operational enviornment as it will likely flood the logging facility. This attribute is reserved for specialized debugging in a controlled lab enviornment.')
wfFrSwUsageCurDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setDescription('current debug value used. This value is the same as wfFrSwUsageDebug except when the user sets wfFrSwUsageDeubg to an invalid value.')
wfFrSwUsageSwitchId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 32), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setDescription('switch id used in the billing usage data file.')
wfFrSwUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setDescription('number of entries in wfFrSwUsageTable')
wfFrSwSvcUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setDescription('Enable/Disable FRSW SVC billing.')
wfFrSwSvcUsageInterimRecordEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setDescription('Enable/Disable Writing FRSW SVC billing record while SVC connection is still up.')
wfFrSwSvcUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 36), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setDescription("Indicates the file system volume number to which the SVC billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwSvcUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 37), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setDescription('The name of the directory where the SVC billing usage data files are stored. ')
wfFrSwSvcUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 38), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setDescription('The base name of SVC billing usage data files.')
wfFrSwSvcUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 40), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 41), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 42), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the SVC Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 43), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageUpdateInterval timer expiration or the starting time of the current wfFrSwSvcUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwSvcUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 44), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageStoreInterval timer expiration or the starting time of the current wfFrSwSvcUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwSvcUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 45), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageFlushInterval timer expiration or the starting time of the current wfFrSwSvcUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 46), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageCleanupInterval timer expiration or the starting time of the current wfFrSwSvcUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 49), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old SVC billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 51), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setDescription('current state FRSW SVC billing.')
wfFrSwSvcUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 52), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setDescription('current file system volume number used for SVC Billing. This number is the same as wfFrSwSvcUsageVolume except when the user sets wfFrSwSvcUsageVolume to an invalid number.')
wfFrSwSvcUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 53), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setDescription('current directory name used for SVC Billing. This number is the same as wfFrSwSvcUsageDirectory except when the user sets wfFrSwSvcUsageDirectory to an invalid name.')
wfFrSwSvcUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 54), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setDescription('current base file name used for SVC Billing. This name is the same as wfFrSwSvcUsageFilePrefix except when the user sets wfFrSwSvcUsageFilePrefix to an invalid name.')
wfFrSwSvcUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 55), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwSvcUsageUpdateInterval except when the user sets wfFrSwSvcUsageUpdateInterval to an invalid value.')
wfFrSwSvcUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 56), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwSvcUsageStoreInterval except when the user sets wfFrSwSvcUsageStoreInterval to an invalid value.')
wfFrSwSvcUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 57), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwSvcUsageFlushInterval except when the user sets wfFrSwSvcUsageFlushInterval to an invalid value.')
wfFrSwSvcUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 58), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwSvcUsageCleanupInterval except when the user sets wfFrSwSvcUsageCleanupInterval to an invalid value.')
wfFrSwSvcUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setDescription('number of entries in wfFrSwSvcUsageTable')
wfFrSwSvcUsageVersionId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setDescription('The Software Version ID field is a two byte, right justified, binary formated value that identifies the particular version number of the software release. High nibble of byte 1 represents the major version number. Low nibble of byte 1 represents the release number. Byte 2 represents the integration number.')
wfFrSwUsageSwitchName = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 61), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setDescription('The Switch name is a 6-bytes, right justified with leading blanks as necessary. It can be combination of letters, numbers and blanks. This ID identifies the particular networks equipment for SVC billing usage data process.')
wfFrSwPvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setDescription('PVC usage file layout version')
wfFrSwSvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 63), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setDescription('SVC usage file layout version')
wfFrSwUsageTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6), )
if mibBuilder.loadTexts: wfFrSwUsageTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageTable.setDescription('The Billing usage table.')
wfFrSwUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwUsageCircuitNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwUsageDlci"))
if mibBuilder.loadTexts: wfFrSwUsageEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEntry.setDescription('The parameters for Billing Usage.')
wfFrSwUsageDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDelete.setDescription('Indicates status of this entry. FRSW_USAGE_CREATED is the normal case. FRSW_USAGE_DELETED means the corresponding tuple and vc instances were deleted some time during this collection interval. This billing instance will be deleted at the end of the next wfFrSwUsageFlush period after this billing record is written out to the file system.')
wfFrSwUsageCircuitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setDescription('Instance identifier; the circuit number of this interface. ')
wfFrSwUsageDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDlci.setDescription('Instance identifier; this indicates which virtual circuit. ')
wfFrSwUsageIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setDescription('(Local) IP address corresponding to wfFrSwUsageCircuitNumber of this virtual circuit. ')
wfFrSwUsageStartTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setDescription('Time stamp of the starting time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStartTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setDescription('Time stamp of the starting time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setDescription('Time stamp of the ending time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setDescription('Time stamp of the ending time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageSentNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setDescription('Number (the high 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setDescription('Number (the low 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setDescription('Number (the low 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setDescription('Number (the high 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setDescription('Number (the low 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setDescription('Number (the low 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageLastNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEFrames has wrapped around.')
wfFrSwUsageLastNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEOctets has wrapped around.')
wfFrSwUsageLastNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEFrames has wrapped around.')
wfFrSwUsageLastDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEOctets has wrapped around.')
wfFrSwUsageLastDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageRemoteIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 25), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setDescription('IP address of the other side (remote) of this PVC endpoint.')
wfFrSwUsageRemoteDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setDescription('DLCI number of the other side (remote) of this PVC endpoint.')
wfFrSwVcTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7), )
if mibBuilder.loadTexts: wfFrSwVcTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTable.setDescription('Frame Relay Virtual Circuit table gives information about a virtual circuit.')
wfFrSwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVcCircuit"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVcDlci"))
if mibBuilder.loadTexts: wfFrSwVcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwVcDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3), ("svc", 4), ("spvccra", 5), ("spvccaa", 6))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDelete.setDescription('Indication to delete this virtual circuit.')
wfFrSwVcCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCircuit.setDescription('Instance identifier; the circuit number of this interface (logical port).')
wfFrSwVcDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwVcStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcStateSet.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwVcMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcMulticast.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcMulticast.setDescription('Indicates whether this dlci is used for multicast or a single destination.')
wfFrSwVcInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 7), Integer32().clone(2147483647)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBe.setDescription('Indicates the maximum number Incoming Excess Burst bits that are allowed in a configured time interval (T).')
wfFrSwVcOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwVcInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwVcInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwVcInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwVcBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBecnState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBecnState.setDescription('Indicates the BECN state')
wfFrSwVcReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwVcReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwVcCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwVcXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwVcXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval.')
wfFrSwVcCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 20), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setDescription('Indicates the IP address associated with destination of a virtual circuit.')
wfFrSwVcCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setDescription('Indicates the DLCI associated with destination of a virtual circuit.')
wfFrSwVcTrfPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 999))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("three", 3), ("default", 999))).clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setDescription('Defines the traffic priority level of all the incoming packets on this VC. FRSW_VCPRIORITY_DEFAULT - Set all incoming user traffic packets to the default priority used by the port. FRSW_VCPRIORITY_ONE - Set all incoming packets to priority 1. FRSW_VCPRIORITY_TWO - Set all incoming packets to priority 2. FRSW_VCPRIORITY_THREE - Set all incoming packets to priority 3. Priority 0 is reserved for network critical packets like OSPF, FR LMI and SMDS heartbeat and is not available for user traffic.')
wfFrSwVcCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 23), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwVcLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwVcTxNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcTxDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcSetFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcSetDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcDropNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setDescription('Indicates the number of frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setDescription('Indicates the number of octets in frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setDescription('Indicates the number of frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcDropDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setDescription('Indicates the number of octets in frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcInactiveVcDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwVcInactiveVcDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwVcRecvNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setDescription('Indicates the number of frames received on this virtual circuit over the interface.')
wfFrSwVcRecvNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setDescription('Indicates the number of octets received on this virtual circuit over the interface.')
wfFrSwVcRecvDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setDescription('Indicates the number of frames received over the interface with the DE bit set.')
wfFrSwVcRecvDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setDescription('Indicates the number of octets in frames received over the interface with the DE bit set.')
wfFrSwVcRecvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setDescription('Indicates the number of frames received over the interface with the FECN bit set.')
wfFrSwVcRecvFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the FECN bit set.')
wfFrSwVcRecvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setDescription('Indicates the number of frames received over the interface with the BECN bit set.')
wfFrSwVcRecvBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the BECN bit set.')
wfFrSwVcRecentNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setDescription('Indicates the number of octets received over the interface during the most recent sampling period.')
wfFrSwVcXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling.')
wfFrSwVcDropExcessBurstFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setDescription('Indicates the number of Excess Burst Frames dropped on this virtual circuit.')
wfFrSwVcDropExcessBurstOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setDescription('Indicates the number of Excess Burst Octets dropped on this virtual circuit.')
wfFrSwVcInBeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 53), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setDescription('Indicates the maximum number Incoming Excess Burst bytes that are allowed in a configured time interval (T).')
wfFrSwVcCfgInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 54), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setDescription('The number of Excess Burst in bits')
wfFrSwVcRedirectAction = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("redirecttoprimary", 2), ("redirecttobackup", 3), ("switchondemand", 4), ("swondemandtoprimary", 5), ("swondemandtobackup", 6))).clone('redirecttoprimary')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setDescription("Perform pvc source redirect manually or based on cross-net updates: 'redirecttoprimary(2)' will force to switch to primary; 'redirecttobackup(3)' will force to switch to backup; 'switchondemand(4)' will switch based on cross-net status of the primary to and from primary; 'swondemandtoprimary(5)' will switch to primary from backup iff cross-net of primary became active; 'swondemandtobackup(6)' will switch to backup from primary iff cross-net of primary became inactive.")
wfFrSwVcRedirectType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("transparent", 1), ("intrusiven", 2), ("intrusivea", 3))).clone('intrusivea')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setDescription("Type of dte notification at switching time: 'transparent(1)' will not send notification to dte; 'intrusiven(2)' will send async update with NEW bit; 'intrusivea(3)' will send async update with A bit not set.")
wfFrSwVcRedirectState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 12, 13, 21))).clone(namedValues=NamedValues(("backupinactive", 1), ("primaryactive", 2), ("switchtobackup", 3), ("backupactive", 12), ("switchtoprimary", 13), ("holddown", 21))).clone('backupinactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setDescription("PVC Source Redirect State: 'backupinactive(1)' - backup is not configured and/or cross-net status is inactive; will allow traffic only through primary. 'primaryactive(2)' - both primary and backup rx'ed 'active' cross-net status, currently primary is active and traffic only through primary. 'switchtobackup(3)' - primary cross-net status is inactive, but can not switch to backup due to manual (or semi-manual) operation of the redirect; will allow traffic only through primary. 'backupactive(12)' - cross-net status is 'inactive' for primary; will allow traffic only through backup. 'switchtoprimary(13)' - cross-net status is 'active' for primary, should by can not switch to primary due to manual (or semi-manual) operation of the redirect; will allow traffic only through backup. 'holddown(21)' - down state used as intermediate state at switching time (for not more then a second); all traffic is dropped.")
wfFrSwVcBackupCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 58), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setDescription(' Backup Called Ip Address of the remote end of the PVC.')
wfFrSwVcBackupCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 59), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setDescription(' Backup Called Dlci of the remote end of the PVC.')
wfFrSwVcBackupCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setDescription(' Cross net status of the backup remote end of the PVC.')
wfFrSwVcBackupCrossNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 61), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setDescription(' Support counter of missed cross net update from backup remote end of the PVC, range: [0, wfFrSwDlcmiCrossNetErrorThreshold].')
wfFrSwVcAtmIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 62), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("atmDisableIwfMode", 1), ("atmServiceIwfTransparentMode", 2), ("atmServiceIwfTranslationMode", 3), ("atmNetworkIwfMode", 4))).clone('atmDisableIwfMode')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setDescription('This attribute indicates the mode of FR-ATM interworking over this FR PVC or that FR-ATM interworking is not enabled on it.')
wfFrSwVcAtmIwfVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 63), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setDescription('This is relevant only when the ATM/FR interworking is enabled for this PVC. This indicates the ATM virtual path identifier associated with the Frame Relay PVC described by this record virtual circuit identifier.')
wfFrSwVcAtmIwfVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 64), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 65535)).clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setDescription('This is relevant only when FR/ATM interworking is enabled for this PVC. This indicates the ATM virtual circuit identifier associated with the Frame Relay PVC described by this record.')
wfFrSwVcAtmIwfLossPriorityPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapDe", 1), ("atmiwfsetDe1", 2), ("atmiwfsetDe0", 3))).clone('atmiwfmapDe')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy for translating ATM CLP to FR DE on this PVC or simply setting FR DE to a constant value for all frames.')
wfFrSwVcAtmIwfDePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapClp", 1), ("atmiwfsetClp1", 2), ("atmiwfsetClp0", 3))).clone('atmiwfmapClp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR DE to ATM CLP or simply setting CLP to a constant value for all frames.')
wfFrSwVcAtmIwfEfciPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapFecn", 1), ("atmiwfsetFecn1", 2), ("atmiwfsetFecn0", 3))).clone('atmiwfmapFecn')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR FECN to ATM EFCI or simply setting ATM EFCI to a constant value for all frames.')
wfFrSwVcEscapeEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setDescription(' Identifies this PVC as either a standard FRSW PVC (escape disabled) or an Escape PVC (escape enabled). The type of Escape PVC (ingress node or egress node) is specified in the wfFrSwDlcmiEntry Object.')
wfFrSwVcSpvcCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 69), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("inprogress", 2), ("active", 3))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setDescription('Indicates to the state of the SPVC call for this DLCI.')
wfFrSwVcCallReqCalledAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 70), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setDescription('Called E.164/X.121 Address for an SPVC Call Request Agent. The address type is determined by the wfFrSwDlcmiL2AddrType attribute in wfFrSwDlcmiEntry.')
wfFrSwVcCallReqDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 71), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setDescription("Indicates to the Calling End of an SPVC Call Request whether to use any available DLCI, or a specific DLCI. If 'specific' is chosen, the called DLCI value is specified in wfFrSwVcCallReqCalledDlci.")
wfFrSwVcCallReqCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 72), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setDescription("Indicates to the Calling End of an SPVC Call Request the DLCI to be used at the destination of a virtual circuit. This value should be specified when 'specific' wfFrSwVcCallReqDlciSelectionType is chosen.")
wfFrSwVcCallReqRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 73), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setDescription('Indicates the number of minutes the Call Request Agent should wait for an SPVC CONNECT message before declaring a Call Setup request REJECTED.')
wfFrSwVcCallReqMaxRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 74), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setDescription('Indicates the number of times the Call Request Agent should retry failed Call Setup requests before declaring the SPVC invalid.')
wfFrSwIsdnBaseTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8), )
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setDescription('This is a FRSW over ISDN configuration table. This table specifies whether the Calling Party (ANI) or Called Party (DNIS) ISDN Phone Number should be used to map the ISDN call to a particular FRSW UNI. The table is indexed by the Slot Number where the PRI(s) exist.')
wfFrSwIsdnBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnBaseSlotNum"))
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnBaseDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setDescription('Indication to delete this FRSW ISDN interface. ')
wfFrSwIsdnBaseSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setDescription('This number is the Slot Number for the PRI interface(s) that are being configured for FRSW ISDN. There will be one of these tables for every slot where an FRSW ISDN PRI Interface exists.')
wfFrSwIsdnBaseAssocType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dnis", 1), ("ani", 2))).clone('dnis')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setDescription('Indicates which ISDN Phone Number (ANI or DNIS) to use to do the ISDN call to FRSW UNI mapping.')
wfFrSwIsdnAssocTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9), )
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setDescription('This table defines the Association Table to be used for the FRSW over ISDN application. The table contains a list of ISDN Phone Numbers and the associated FRSW UNI Index Number. The table is indexed by the Slot Number and the ISDN Phone Number.')
wfFrSwIsdnAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocSlotNum"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocNum"))
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnAssocDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setDescription('Indication to delete this Association Instance.')
wfFrSwIsdnAssocSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setDescription('Slot with which this ISDN Phone Number is associated.')
wfFrSwIsdnAssocNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setDescription('ISDN Phone Number that is used to look up the appropriate FRSW UNI Index. This number is compared with either the Calling Party Number (ANI) Information Element or the Called Party Number (DNIS) Information Element contained in the ISDN Call Setup Message.')
wfFrSwIsdnAssocScrnEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setDescription('Indicate whether allowed screening should be enabled or disabled for all of the UNIs contained in the FRSW UNI Index.')
wfFrSwIsdnAssocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 5), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setDescription('A number that indicates the FRSW UNI Index that is is associated with the ISDN Phone Number. This FRSW UNI Index is used as a key to obtain the UNIs and the Screening information from the wfFrSwIsdnScrnEntry and wfFrSwIsdnUniEntry mibs. The default for the index is 2**31 - 1 = 2147483647 = 0x7FFFFFFF, which represents an unconfigured index number.')
wfFrSwIsdnUniTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10), )
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setDescription('This table is used by the FRSW over ISDN application. The table defines a list of FRSW UNIs that are to be collected into a hunt group identifiable by an Index Number.')
wfFrSwIsdnUniEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniNum"))
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnUniDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setDescription('Indication to delete this FRSW UNI Index Instance.')
wfFrSwIsdnUniIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setDescription('FRSW UNI Index -- a number that identifies a group of related FRSW UNIs that are collected together as a hunt group. This number ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnUniNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setDescription('A FRSW UNI/Circuit.')
wfFrSwIsdnUniState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("available", 1), ("inuse", 2))).clone('available')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setDescription('State of this UNI (available or in-use).')
wfFrSwIsdnScrnTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11), )
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setDescription('This is the incoming call screening table for the FRSW over ISDN application. The table consists of a FRSW UNI Index and a list of allowable ISDN Phone numbers for that FRSW UNI Index. The table is indexed by both the FRSW UNI Index and the ISDN Phone Number. This table is referenced only when the wfFrSwIsdnAssocScrnEnable is set to Enabled for this FRSW UNI Index.')
wfFrSwIsdnScrnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnNum"))
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setDescription(' Instance Id for this table. ')
wfFrSwIsdnScrnDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setDescription(' Indication to delete this Scrn Instance. ')
wfFrSwIsdnScrnIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setDescription('FRSW UNI Index - A number that ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnScrnNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setDescription('ISDN Phone Number of a user authorized to access the UNIs contained in the FRSW UNI Index. ')
wfFrSwSigTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12), )
if mibBuilder.loadTexts: wfFrSwSigTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTable.setDescription(" The Frame relay signalling table contains frame relay signalling entries indexed by the frame relay access channel circuit number. An instance of wfFrSwSigEntry is required for each frame relay access channel with frame relay signalling enabled. The absence of wfFrSwSigEntry for a given frame relay access channel implies that frame relay signalling is disabled for the circuit. Note that the terms 'incoming' and 'outgoing' refer to the frame mode call with respect to the network side of the interface. The terminology used by CCITT Q.933/Q.931 is different. ")
wfFrSwSigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwSigCircuit"))
if mibBuilder.loadTexts: wfFrSwSigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigEntry.setDescription(' An entry in the Frame Relay signalling port information table. ')
wfFrSwSigDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDelete.setDescription(' Indication to delete this instance ')
wfFrSwSigCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCircuit.setDescription(' The circuit number for this frame relay access channel ')
wfFrSwSigSvcDlciLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setDescription(' Lowest DLCI to be used for SVC, the default value is for 2 octet frame header ')
wfFrSwSigSvcDlciHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(991)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setDescription(' Highest DLCI to be used for SVC, the default value is for 2 octet frame header. ')
wfFrSwSigDlciAssign = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("increment", 1), ("decrement", 2))).clone('decrement')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setDescription(" Determines if DLCI's are assigned starting at wfFrSwSigSvcDlciHigh and working towards wfFrSwSigSvcDlciLow or vice versa. ")
wfFrSwSigMaxNumOfSvcs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setDescription(' Indicates the maximum number of simultaneous switched virtual circuits allowed on the logical line. ')
wfFrSwSigNumOfSvcsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setDescription(' Indicates the number of switched virtual circuits in use on the logical line. ')
wfFrSwSigDefaultThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultMinAcceptThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing minimum acceptable throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Bc fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Be fields when they are not included in the setup message by the user. ')
wfFrSwSigMaxInThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setDescription(' This is the maximum incoming throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigMaxOutThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setDescription(' This is the maximum outgoing throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigTotalInNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setDescription(' This is the total maximum incoming throughput that is available for all frame mode calls on the port. If the sum of the incoming throughput requested by a call and wfFrSwSigTotalInCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalInCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigTotalOutNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setDescription(' This is the total maximum outgoing throughput that is available for all frame mode calls on the port. If the sum of the outgoing throughput requested by a call and wfFrSwSigTotalOutCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalOutCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigXNetClearingDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setDescription(' If cross-net polling (wfFrSwDlcmiCrossNetEnable) is enabled on this interface, and the error threshold (wfFrSwDlcmiCrossNetErrorThreshold) is exceeded, the network can clear the call. ')
wfFrSwSigCallingPartyIEMandatory = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setDescription(' Reject the call if the Calling Party IE is absent in the setup message or if the provided Calling Party IE fails address authentication tests againt the configured address(es) on the ingress logical line.')
wfFrSwSigT301 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT301.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT301.setDescription(' Timer number: T301 default time-out: 3 min state of call: call initiated cause for start: incoming setup normal stop: outgoing connect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT303 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT303.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT303.setDescription(' Timer number: T303 default time-out: 4 s state of call: call present cause for start: outgoing setup normal stop: incoming connect/call-proceeding/ release-complete at the first expiry: retransmit setup, restart T303 at the second expiry: clear call ')
wfFrSwSigT305 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 90)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT305.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT305.setDescription(' Timer number: T305 default time-out: 30 s state of call: disconnect ind cause for start: outgoing disconnect normal stop: incoming release/disconnect at the first expiry: outgoing release at the second expiry: timer not restarted ')
wfFrSwSigT308 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT308.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT308.setDescription(' Timer number: T308 default time-out: 4 s state of call: release req cause for start: outgoing release normal stop: incoming release/release-complete at the first expiry: retransmit release, restart T308 at the second expiry: place access channel in maintenance ')
wfFrSwSigT310 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT310.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT310.setDescription(' Timer number: T310 default time-out: 10 s state of call: incoming call proceeding cause for start: incoming call proceeding normal stop: incoming connect/disconnect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT322 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT322.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT322.setDescription(' Timer number: T322 default time-out: 4 s state of call: any call state cause for start: outgoing status enquiry normal stop: incoming status/disconnect/ release/release-complete at the first expiry: retransmit status-enq, restart T322 at the second expiry: resend status enq and restart T322 ')
wfFrSwSigInSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setDescription(' number of incoming setup packets ')
wfFrSwSigInCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setDescription(' number of incoming call proceeding packets ')
wfFrSwSigInConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setDescription(' number of incoming connect packets ')
wfFrSwSigInDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setDescription(' number of incoming disconnect packets ')
wfFrSwSigInReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setDescription(' number of incoming release packets ')
wfFrSwSigInReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setDescription(' number of incoming release complete packets ')
wfFrSwSigInStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setDescription(' number of incoming status enquiry packets ')
wfFrSwSigInStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setDescription(' number of incoming status packets ')
wfFrSwSigInUnknownPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setDescription(' number of incoming unknown packets ')
wfFrSwSigOutSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setDescription(' number of outgoing setup packets ')
wfFrSwSigOutCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setDescription(' number of outgoing call proceeding packets ')
wfFrSwSigOutConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setDescription(' number of outgoing connect packets ')
wfFrSwSigOutDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setDescription(' number of outgoing disconnect packets ')
wfFrSwSigOutReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setDescription(' number of outgoing release packets ')
wfFrSwSigOutReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setDescription(' number of outgoing release packest ')
wfFrSwSigOutStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setDescription(' number of outgoing status enquiry packets ')
wfFrSwSigOutStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setDescription(' number of outgoing status packets ')
wfFrSwSigRejectedConnRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setDescription(' number of connections rejected ')
wfFrSwSigNwrkAbortedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setDescription(' number of connections aborted by network ')
wfFrSwSigL2Resets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setDescription(' number of L2 resets ')
wfFrSwSigDlciIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setDescription(' Reject the call if the Dlci IE is present in the setup message and wfFrSwSigDlciIEAllowed is set to disabled.')
wfFrSwSigX213PriorityIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 47), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setDescription(' Reject the call if the X213 Priority IE is present in setup message and wfFrSwSigX213PriorityIEAllowed is set to disabled.')
wfFrSwSigMaximumBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 48), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setDescription('This value is the maximum allowed Be for a SVC connection')
wfFrSwGlobalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13), )
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setDescription(' wfFrSwGlobalE164AddrTable is used by Directory Services to translate a range of E.164 addresses into an internal IP network address. E.164 ranges must not ever overlap. ')
wfFrSwGlobalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setDescription(' An entry in the Frame Relay Global E.164 Address Table. ')
wfFrSwGlobalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalE164AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setDescription(' Instance identifier; the low end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setDescription(' Instance identifier; the high end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of E.164 addresses. ')
wfFrSwGlobalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14), )
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setDescription(' wfFrSwGlobalX121AddrTable is used by Directory Services to translate a range of X.121 addresses into an internal IP network address. X.121 ranges must not ever overlap. ')
wfFrSwGlobalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setDescription(' An entry in the Frame Relay Global X.121 Address Table. ')
wfFrSwGlobalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalX121AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setDescription(' Instance identifier; the low end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setDescription(' Instance identifier; the high end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of X.121 addresses. ')
wfFrSwLocalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15), )
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setDescription(' wfFrSwLocalE164AddrTable contains E.164 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164Address"))
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setDescription(' An entry in the Frame Relay Local E.164 Address Table. ')
wfFrSwLocalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalE164AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setDescription(' Instance identifier; internal CCT number associated with this E.164 address. ')
wfFrSwLocalE164Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setDescription(' Instance identifier; an E.164 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalE164AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this E.164 address belongs to a particular COI number within this COI group number. Please note that COI group numbers can not be repeated and that the COI group numbers must be in increasing order in the CUG configuration MIB wfFrSwLocalE164AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalE164Address is allowed to communicate with all users. ')
wfFrSwLocalE164AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwLocalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16), )
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setDescription(' wfFrSwLocalX121AddrTable contains X.121 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121Address"))
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setDescription(' An entry in the Frame Relay Local X.121 Address Table. ')
wfFrSwLocalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalX121AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setDescription(' Instance identifier; internal CCT number associated with this X.121 address. ')
wfFrSwLocalX121Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setDescription(' Instance identifier; a X.121 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalX121AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this X.121 address belongs to a particular COI number within this COI group number. wfFrSwLocalX121AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalX121Address is allowed to communicate with all users. ')
wfFrSwLocalX121AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwBase = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17))
wfFrSwBaseDelete = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseDelete.setDescription(' Indication to delete/create this base group ')
wfFrSwBaseIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setDescription(" Indicates this BNX's (circuit-less) IP address ")
wfFrSwBaseShutDown = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 3), Counter32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setDescription('Bit mask for slots to shutdown, slots 1-14. The MSBit represents slot 1, the next most significant bit represents slot 2, and so forth.')
wfFrSwCngcMonTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18), )
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setDescription('This table is used by FRSW Congestion Control application. The table is used to Monitor the congestion level of a particular circuit.')
wfFrSwCngcMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCngcMonCct"))
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setDescription('Instance Id for this table.')
wfFrSwCngcMonReset = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setDescription('Indication to reset Cngc Monitor Counters.')
wfFrSwCngcMonCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setDescription('Circuit to be monitored. ')
wfFrSwCngcMonP0Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 0 Traffic.')
wfFrSwCngcMonP1Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 1 Traffic.')
wfFrSwCngcMonP2Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 2 Traffic.')
wfFrSwCngcMonP3Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 3 Traffic.')
wfFrSwVirtualIntfTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19), )
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setDescription("The table is used to create 'virtual' FRSW access lines.")
wfFrSwVirtualIntfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfSlot"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfCct"))
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setDescription('Instance Id for this table.')
wfFrSwVirtualIntfDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setDescription('Indication to delete this virtual interface.')
wfFrSwVirtualIntfSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setDescription('Instance identifier; the slot number of this interface.')
wfFrSwVirtualIntfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwVirtualIntfLineNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setDescription('Line number for this virtual interface.')
wfFrSwExtFileSysTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20), )
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setDescription('This table is used by FRSW to extend the file system to DRAM device.')
wfFrSwExtFileSysEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwExtFileSysSlot"))
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setDescription('Instance Id for this table.')
wfFrSwExtFileSysDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwExtFileSysSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setDescription('A unique value for each slot. Its value ranges between 1 and 14.')
wfFrSwExtFileSysSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setDescription('The memory size of the extended file system in byte unit. The value zero also means that extended file system is disabled. Non-zero value means enabled. Its suggested that the size is in multiple of 128k bytes. Some of the well-known memory sizes and their correspond decimal values are as followed: Mem size Decimal Value ^^^^^^^^ ^^^^^^^^^^^^^ 128K 131072 256K 262144 512K 524288 1M 1048576 2M 2097152 4M 4194304 8M 8388608 ')
wfFrSwExtFileSysActualSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setDescription('The actual memory size the system allocated.')
wfFrSwExtFileSysState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("fault", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setDescription('The status of the extended file system. State up indicates that the requested memory size for the extended file system has been allocated successfully and the extended file system is in operational state. State fault indicates that the requested memory size for the extended file system has NOT been allocated successfully and the extended file system is NOT in operational state. One reason for entering the fault state is insufficient available memory. State init indicates that the system is in the initialization cycle. The extended file system is not operational. State notpresent reflects the size of zero.')
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwCctLocalSetFECNFrames=wfFrSwCctLocalSetFECNFrames, wfFrSwDlcmiEscapeMode=wfFrSwDlcmiEscapeMode, wfFrSwVcRedirectAction=wfFrSwVcRedirectAction, wfFrSwSigOutDisconnectPkts=wfFrSwSigOutDisconnectPkts, wfFrSwCctLocalSetDEFrames=wfFrSwCctLocalSetDEFrames, wfFrSwSigOutStatusPkts=wfFrSwSigOutStatusPkts, wfFrSwSigTotalInCurrentThroughput=wfFrSwSigTotalInCurrentThroughput, wfFrSwIsdnScrnTable=wfFrSwIsdnScrnTable, wfFrSwVcAtmIwfLossPriorityPolicy=wfFrSwVcAtmIwfLossPriorityPolicy, wfFrSwSigT303=wfFrSwSigT303, wfFrSwUsageCurVolumeBackup=wfFrSwUsageCurVolumeBackup, wfFrSwVcInactiveVcDropFrames=wfFrSwVcInactiveVcDropFrames, wfFrSwL3NetAddress=wfFrSwL3NetAddress, wfFrSwSigInConnectPkts=wfFrSwSigInConnectPkts, wfFrSwLocalE164AddrDelete=wfFrSwLocalE164AddrDelete, wfFrSwUsageUpdateData=wfFrSwUsageUpdateData, wfFrSwExtFileSysDelete=wfFrSwExtFileSysDelete, wfFrSwDlcmiActiveSeqCount=wfFrSwDlcmiActiveSeqCount, wfFrSwUsageCircuitNumber=wfFrSwUsageCircuitNumber, wfFrSwUsageStartTimeStampHigh=wfFrSwUsageStartTimeStampHigh, wfFrSwVcRecvDeOctets=wfFrSwVcRecvDeOctets, wfFrSwIsdnUniDelete=wfFrSwIsdnUniDelete, wfFrSwCngcMonP0Level1Percent=wfFrSwCngcMonP0Level1Percent, wfFrSwCctLocalRecvDEOctets=wfFrSwCctLocalRecvDEOctets, wfFrSwUsageVolumeBackup=wfFrSwUsageVolumeBackup, wfFrSwSigTotalInNegotiableThroughput=wfFrSwSigTotalInNegotiableThroughput, wfFrSwSigOutReleaseCompletePkts=wfFrSwSigOutReleaseCompletePkts, wfFrSwUsageSentDEOctetsHigh=wfFrSwUsageSentDEOctetsHigh, wfFrSwCctOutThroughput=wfFrSwCctOutThroughput, wfFrSwDlcmiDteLastReceived=wfFrSwDlcmiDteLastReceived, wfFrSwCctRemoteSentDEOctets=wfFrSwCctRemoteSentDEOctets, wfFrSwSvcUsageVersionId=wfFrSwSvcUsageVersionId, wfFrSwCctRemoteRecvNonDEOctets=wfFrSwCctRemoteRecvNonDEOctets, wfFrSwCctRemoteRecvDEOctets=wfFrSwCctRemoteRecvDEOctets, wfFrSwSvcUsageUpdateInterval=wfFrSwSvcUsageUpdateInterval, wfFrSwCctRemoteSentNonDEFrames=wfFrSwCctRemoteSentNonDEFrames, wfFrSwVcCallReqRetryTimer=wfFrSwVcCallReqRetryTimer, wfFrSwMcastDlci=wfFrSwMcastDlci, wfFrSwCctLocalBecnState=wfFrSwCctLocalBecnState, wfFrSwVcRecvBecnOctets=wfFrSwVcRecvBecnOctets, wfFrSwGlobalX121AddrDelete=wfFrSwGlobalX121AddrDelete, wfFrSwUsageVolume=wfFrSwUsageVolume, wfFrSwDlcmiCrossNetListenEnable=wfFrSwDlcmiCrossNetListenEnable, wfFrSwSvcUsageNumEntries=wfFrSwSvcUsageNumEntries, wfFrSwVcInBc=wfFrSwVcInBc, wfFrSwDlcmiDteErrorThreshold=wfFrSwDlcmiDteErrorThreshold, wfFrSwUsageState=wfFrSwUsageState, wfFrSwIsdnScrnNum=wfFrSwIsdnScrnNum, wfFrSwVcOutThroughput=wfFrSwVcOutThroughput, wfFrSwUsageStartTimeStampLow=wfFrSwUsageStartTimeStampLow, wfFrSwUsageCurDebug=wfFrSwUsageCurDebug, wfFrSwMcastIndividualDlci=wfFrSwMcastIndividualDlci, wfFrSwVcXNetReceived=wfFrSwVcXNetReceived, wfFrSwSvcUsageFlushData=wfFrSwSvcUsageFlushData, wfFrSwVcSetBecnOctets=wfFrSwVcSetBecnOctets, wfFrSwIsdnUniTable=wfFrSwIsdnUniTable, wfFrSwDlcmiCircuit=wfFrSwDlcmiCircuit, wfFrSwIsdnAssocNum=wfFrSwIsdnAssocNum, wfFrSwVcEscapeEnable=wfFrSwVcEscapeEnable, wfFrSwDlcmiDeletedVCs=wfFrSwDlcmiDeletedVCs, wfFrSwVcOutBe=wfFrSwVcOutBe, wfFrSwCctReceivedStatus=wfFrSwCctReceivedStatus, wfFrSwCctLocalRecvBECNFrames=wfFrSwCctLocalRecvBECNFrames, wfFrSwDlcmiAsyncUpdateEnable=wfFrSwDlcmiAsyncUpdateEnable, wfFrSwIsdnBaseSlotNum=wfFrSwIsdnBaseSlotNum, wfFrSwUsageFilePrefix=wfFrSwUsageFilePrefix, wfFrSwLocalE164AddrTable=wfFrSwLocalE164AddrTable, wfFrSwGlobalX121AddrTable=wfFrSwGlobalX121AddrTable, wfFrSwDlcmiCrossNetErrorThreshold=wfFrSwDlcmiCrossNetErrorThreshold, wfFrSwCctCreationTime=wfFrSwCctCreationTime, wfFrSwCctRemoteBecnState=wfFrSwCctRemoteBecnState, wfFrSwCctOutBe=wfFrSwCctOutBe, wfFrSwGlobalE164AddrLow=wfFrSwGlobalE164AddrLow, wfFrSwLocalX121AddrTable=wfFrSwLocalX121AddrTable, wfFrSwExtFileSysState=wfFrSwExtFileSysState, wfFrSwCctRemoteSetFECNFrames=wfFrSwCctRemoteSetFECNFrames, wfFrSwIsdnUniEntry=wfFrSwIsdnUniEntry, wfFrSwCctRemoteRecvFECNOctets=wfFrSwCctRemoteRecvFECNOctets, wfFrSwExtFileSysActualSize=wfFrSwExtFileSysActualSize, wfFrSwDlcmiDteFullEnquiryInterval=wfFrSwDlcmiDteFullEnquiryInterval, wfFrSwGlobalX121AddrLow=wfFrSwGlobalX121AddrLow, wfFrSwCctOutBc=wfFrSwCctOutBc, wfFrSwDlcmiDteReceived=wfFrSwDlcmiDteReceived, wfFrSwDlcmiSequenceCount=wfFrSwDlcmiSequenceCount, wfFrSwSigDlciIEAllowed=wfFrSwSigDlciIEAllowed, wfFrSwCctTable=wfFrSwCctTable, wfFrSwDlcmiVCsInUse=wfFrSwDlcmiVCsInUse, wfFrSwVcInactiveVcDropOctets=wfFrSwVcInactiveVcDropOctets, wfFrSwUsageEndTimeStampLow=wfFrSwUsageEndTimeStampLow, wfFrSwVcEntry=wfFrSwVcEntry, wfFrSwUsageCurCleanupInterval=wfFrSwUsageCurCleanupInterval, wfFrSwUsageEnable=wfFrSwUsageEnable, wfFrSwSvcUsageCurVolume=wfFrSwSvcUsageCurVolume, wfFrSwDlcmiTable=wfFrSwDlcmiTable, wfFrSwCctRemoteSentDEFrames=wfFrSwCctRemoteSentDEFrames, wfFrSwCctInThroughput=wfFrSwCctInThroughput, wfFrSwVcState=wfFrSwVcState, wfFrSwIsdnAssocIndex=wfFrSwIsdnAssocIndex, wfFrSwUsageSwitchName=wfFrSwUsageSwitchName, wfFrSwIsdnAssocEntry=wfFrSwIsdnAssocEntry, wfFrSwDlcmiState=wfFrSwDlcmiState, wfFrSwUsageTimerInterval=wfFrSwUsageTimerInterval, wfFrSwVcRecvNonDeFrames=wfFrSwVcRecvNonDeFrames, wfFrSwVcRecvFecnOctets=wfFrSwVcRecvFecnOctets, wfFrSwDlcmiDteStatus=wfFrSwDlcmiDteStatus, wfFrSwSvcUsageCurStoreInterval=wfFrSwSvcUsageCurStoreInterval, wfFrSwLocalX121AddrDelete=wfFrSwLocalX121AddrDelete, wfFrSwUsageStoreTimeStamp=wfFrSwUsageStoreTimeStamp, wfFrSwDlcmiManagementType=wfFrSwDlcmiManagementType, wfFrSwSigInStatusPkts=wfFrSwSigInStatusPkts, wfFrSwUsageLastNonDEFramesLow=wfFrSwUsageLastNonDEFramesLow, wfFrSwVcReceivedStatus=wfFrSwVcReceivedStatus, wfFrSwDlcmiControlByteDisable=wfFrSwDlcmiControlByteDisable, wfFrSwVcXNetSent=wfFrSwVcXNetSent, wfFrSwCngcMonP1Level1Percent=wfFrSwCngcMonP1Level1Percent, wfFrSwCngcMonP2Level2Percent=wfFrSwCngcMonP2Level2Percent, wfFrSwUsageUpdateTimeStamp=wfFrSwUsageUpdateTimeStamp, wfFrSwSigMaxNumOfSvcs=wfFrSwSigMaxNumOfSvcs, wfFrSwDlcmiAddressLen=wfFrSwDlcmiAddressLen, wfFrSwSigNwrkAbortedConnections=wfFrSwSigNwrkAbortedConnections, wfFrSwVcReportedStatus=wfFrSwVcReportedStatus, wfFrSwVirtualIntfLineNum=wfFrSwVirtualIntfLineNum, wfFrSwCngcMonTable=wfFrSwCngcMonTable, wfFrSwCctRemoteRecvBECNOctets=wfFrSwCctRemoteRecvBECNOctets, wfFrSwUsageSwitchId=wfFrSwUsageSwitchId, wfFrSwVcBecnState=wfFrSwVcBecnState, wfFrSwIsdnUniNum=wfFrSwIsdnUniNum, wfFrSwSvcUsageState=wfFrSwSvcUsageState, wfFrSwVcTxDeFrames=wfFrSwVcTxDeFrames, wfFrSwCctLocalSentDEOctets=wfFrSwCctLocalSentDEOctets, wfFrSwCctRemoteRecvFECNFrames=wfFrSwCctRemoteRecvFECNFrames, wfFrSwVcBackupCalledDlci=wfFrSwVcBackupCalledDlci, wfFrSwVcCallReqCalledDlci=wfFrSwVcCallReqCalledDlci, wfFrSwCctLocalRecvBECNOctets=wfFrSwCctLocalRecvBECNOctets, wfFrSwIsdnUniState=wfFrSwIsdnUniState, wfFrSwBcMeasurementInterval=wfFrSwBcMeasurementInterval, wfFrSwUsageStoreData=wfFrSwUsageStoreData, wfFrSwCctLocalRecvFECNFrames=wfFrSwCctLocalRecvFECNFrames, wfFrSwCctRemoteRecvBECNFrames=wfFrSwCctRemoteRecvBECNFrames, wfFrSwPvcUsageFileLayout=wfFrSwPvcUsageFileLayout, wfFrSwGlobalX121AddrHigh=wfFrSwGlobalX121AddrHigh, wfFrSwCngcMonP2Level4Percent=wfFrSwCngcMonP2Level4Percent, wfFrSwDlcmiBidirect=wfFrSwDlcmiBidirect, wfFrSwVcSetDeOctets=wfFrSwVcSetDeOctets, wfFrSwUsageSentDEOctetsLow=wfFrSwUsageSentDEOctetsLow, wfFrSwDlcmiUnknownIEErrors=wfFrSwDlcmiUnknownIEErrors, wfFrSwSigSvcDlciLow=wfFrSwSigSvcDlciLow, wfFrSwDlcmiSequenceErrors=wfFrSwDlcmiSequenceErrors, wfFrSwIsdnAssocSlotNum=wfFrSwIsdnAssocSlotNum, wfFrSwExtFileSysTable=wfFrSwExtFileSysTable, wfFrSwDlcmiControlByteErrors=wfFrSwDlcmiControlByteErrors, wfFrSwVirtualIntfSlot=wfFrSwVirtualIntfSlot, wfFrSwDlcmiStatus=wfFrSwDlcmiStatus, wfFrSwVcBackupCrossNetErrors=wfFrSwVcBackupCrossNetErrors, wfFrSwVirtualIntfEntry=wfFrSwVirtualIntfEntry, wfFrSwDlcmiPolls=wfFrSwDlcmiPolls, wfFrSwUsageDirectory=wfFrSwUsageDirectory, wfFrSwSvcUsageStoreTimeStamp=wfFrSwSvcUsageStoreTimeStamp, wfFrSwErrType=wfFrSwErrType, wfFrSwUsageLastNonDEOctetsHigh=wfFrSwUsageLastNonDEOctetsHigh, wfFrSwUsageCurFlushInterval=wfFrSwUsageCurFlushInterval, wfFrSwLocalX121Address=wfFrSwLocalX121Address, wfFrSwCctLocalSentNonDEFrames=wfFrSwCctLocalSentNonDEFrames, wfFrSwSigInDisconnectPkts=wfFrSwSigInDisconnectPkts, wfFrSwVcDropNonDeFrames=wfFrSwVcDropNonDeFrames, wfFrSwIsdnBaseDelete=wfFrSwIsdnBaseDelete, wfFrSwSigOutConnectPkts=wfFrSwSigOutConnectPkts, wfFrSwCngcMonP1Level2Percent=wfFrSwCngcMonP1Level2Percent, wfFrSwUsageUpdateInterval=wfFrSwUsageUpdateInterval, wfFrSwDlcmiCrossNetAsyncUpdateEnable=wfFrSwDlcmiCrossNetAsyncUpdateEnable, wfFrSwVcSetDeFrames=wfFrSwVcSetDeFrames, wfFrSwGlobalE164AddrDelete=wfFrSwGlobalE164AddrDelete, wfFrSwSigNumOfSvcsInUse=wfFrSwSigNumOfSvcsInUse, wfFrSwSigX213PriorityIEAllowed=wfFrSwSigX213PriorityIEAllowed, wfFrSwSvcUsageUpdateData=wfFrSwSvcUsageUpdateData, wfFrSwGlobalX121AddrIPAddr=wfFrSwGlobalX121AddrIPAddr, wfFrSwUsageSentDEFramesHigh=wfFrSwUsageSentDEFramesHigh, wfFrSwDlcmiBcMeasurementEnable=wfFrSwDlcmiBcMeasurementEnable, wfFrSwVcRecvDeFrames=wfFrSwVcRecvDeFrames, wfFrSwVcInBeOctets=wfFrSwVcInBeOctets, wfFrSwSigRejectedConnRequests=wfFrSwSigRejectedConnRequests, wfFrSwSvcUsageFilePrefix=wfFrSwSvcUsageFilePrefix, wfFrSwMcastIpAddr=wfFrSwMcastIpAddr, wfFrSwCngcMonP0Level2Percent=wfFrSwCngcMonP0Level2Percent, wfFrSwSvcUsageStoreInterval=wfFrSwSvcUsageStoreInterval, wfFrSwDlcmiCrossNetEnable=wfFrSwDlcmiCrossNetEnable, wfFrSwVcCalledDlci=wfFrSwVcCalledDlci, wfFrSwSigMaxInThroughputPerSvc=wfFrSwSigMaxInThroughputPerSvc, wfFrSwCctInBcOctets=wfFrSwCctInBcOctets, wfFrSwSigOutReleasePkts=wfFrSwSigOutReleasePkts, wfFrSwCctEntry=wfFrSwCctEntry, wfFrSwCngcMonP1Level3Percent=wfFrSwCngcMonP1Level3Percent, wfFrSwCctXNetSent=wfFrSwCctXNetSent, wfFrSwCctRemoteDropNonDEOctets=wfFrSwCctRemoteDropNonDEOctets, wfFrSwUsageCleanupTimeStamp=wfFrSwUsageCleanupTimeStamp, wfFrSwLocalE164AddrLocalFlag=wfFrSwLocalE164AddrLocalFlag, wfFrSwVirtualIntfCct=wfFrSwVirtualIntfCct, wfFrSwVcDropDeFrames=wfFrSwVcDropDeFrames, wfFrSwCctXNetReceived=wfFrSwCctXNetReceived, wfFrSwLocalE164AddrCUG=wfFrSwLocalE164AddrCUG, wfFrSwCctState=wfFrSwCctState, wfFrSwSvcUsageCurCleanupInterval=wfFrSwSvcUsageCurCleanupInterval, wfFrSwVcTable=wfFrSwVcTable, wfFrSwCctInactiveVCDropFrames=wfFrSwCctInactiveVCDropFrames, wfFrSwGlobalX121AddrEntry=wfFrSwGlobalX121AddrEntry, wfFrSwSvcUsageCurFilePrefix=wfFrSwSvcUsageCurFilePrefix, wfFrSwCngcMonEntry=wfFrSwCngcMonEntry, wfFrSwCctLocalDropNonDEFrames=wfFrSwCctLocalDropNonDEFrames, wfFrSwUsageFlushData=wfFrSwUsageFlushData, wfFrSwVirtualIntfDelete=wfFrSwVirtualIntfDelete, wfFrSwIsdnAssocScrnEnable=wfFrSwIsdnAssocScrnEnable, wfFrSwCngcMonP0Level4Percent=wfFrSwCngcMonP0Level4Percent, wfFrSwIsdnBaseTable=wfFrSwIsdnBaseTable, wfFrSwUsageDlci=wfFrSwUsageDlci, wfFrSwLocalX121AddrCct=wfFrSwLocalX121AddrCct, wfFrSwCctLocalSetDEOctets=wfFrSwCctLocalSetDEOctets, wfFrSwLocalE164AddrCct=wfFrSwLocalE164AddrCct, wfFrSwVcAtmIwfDePolicy=wfFrSwVcAtmIwfDePolicy, wfFrSwCctRemoteDropDEFrames=wfFrSwCctRemoteDropDEFrames, wfFrSwSvcUsageStoreData=wfFrSwSvcUsageStoreData, wfFrSwTupleDlciA=wfFrSwTupleDlciA, wfFrSwBaseShutDown=wfFrSwBaseShutDown, wfFrSwCctLastTimeChange=wfFrSwCctLastTimeChange, wfFrSwUsageTable=wfFrSwUsageTable, wfFrSwVcCreationTime=wfFrSwVcCreationTime, wfFrSwVcLastTimeChange=wfFrSwVcLastTimeChange, wfFrSwCctInBc=wfFrSwCctInBc, wfFrSwUsageCurDirectory=wfFrSwUsageCurDirectory, wfFrSwCctMulticast=wfFrSwCctMulticast, wfFrSwVcInBe=wfFrSwVcInBe, wfFrSwSigT301=wfFrSwSigT301, wfFrSwCctRemoteSentNonDEOctets=wfFrSwCctRemoteSentNonDEOctets, wfFrSwUsageFlushTimeStamp=wfFrSwUsageFlushTimeStamp, wfFrSwCctRemoteSetBECNOctets=wfFrSwCctRemoteSetBECNOctets, wfFrSwVcBackupCalledIpAddr=wfFrSwVcBackupCalledIpAddr, wfFrSwVcAtmIwfVPI=wfFrSwVcAtmIwfVPI, wfFrSwSigInReleaseCompletePkts=wfFrSwSigInReleaseCompletePkts, wfFrSwLocalX121AddrEntry=wfFrSwLocalX121AddrEntry, wfFrSwCctCrossNetStatus=wfFrSwCctCrossNetStatus, wfFrSwSvcUsageFileLayout=wfFrSwSvcUsageFileLayout, wfFrSwDlcmiFullStatusSeq=wfFrSwDlcmiFullStatusSeq, wfFrSwDlcmiSvcDisable=wfFrSwDlcmiSvcDisable, wfFrSwVcCallReqDlciSelectionType=wfFrSwVcCallReqDlciSelectionType, wfFrSwSigOutStatusEnquiryPkts=wfFrSwSigOutStatusEnquiryPkts, wfFrSwUsageSentNonDEFramesLow=wfFrSwUsageSentNonDEFramesLow, wfFrSwLocalE164AddrEntry=wfFrSwLocalE164AddrEntry, wfFrSwDlcmiDteSeqCount=wfFrSwDlcmiDteSeqCount, wfFrSwUsageFileCleanup=wfFrSwUsageFileCleanup, wfFrSwBaseDelete=wfFrSwBaseDelete, wfFrSwSvcUsageFlushInterval=wfFrSwSvcUsageFlushInterval, wfFrSwUsageLastDEFramesHigh=wfFrSwUsageLastDEFramesHigh, wfFrSwVcRedirectState=wfFrSwVcRedirectState, wfFrSwDlcmiAlarmTimer=wfFrSwDlcmiAlarmTimer, wfFrSwCctLocalOrRemoteConnection=wfFrSwCctLocalOrRemoteConnection)
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwSigOutCallProceedingPkts=wfFrSwSigOutCallProceedingPkts, wfFrSwDlcmiMaxSupportedVCs=wfFrSwDlcmiMaxSupportedVCs, wfFrSwDlcmiSpvcAgent=wfFrSwDlcmiSpvcAgent, wfFrSwCctRemoteDropNonDEFrames=wfFrSwCctRemoteDropNonDEFrames, wfFrSwVcCallReqMaxRetries=wfFrSwVcCallReqMaxRetries, wfFrSwSwitchHdrErrors=wfFrSwSwitchHdrErrors, wfFrSwDlcmiEscapeVcCount=wfFrSwDlcmiEscapeVcCount, wfFrSwVcCalledIpAddr=wfFrSwVcCalledIpAddr, wfFrSwUsageSentNonDEOctetsHigh=wfFrSwUsageSentNonDEOctetsHigh, wfFrSwBase=wfFrSwBase, wfFrSwDlcmiDtePolls=wfFrSwDlcmiDtePolls, wfFrSwCctRemoteSetBECNFrames=wfFrSwCctRemoteSetBECNFrames, wfFrSwVcTxNonDeOctets=wfFrSwVcTxNonDeOctets, wfFrSwDlcmiMcastNoBufferErrors=wfFrSwDlcmiMcastNoBufferErrors, wfFrSwSigCallingPartyIEMandatory=wfFrSwSigCallingPartyIEMandatory, wfFrSwUsageCurStoreInterval=wfFrSwUsageCurStoreInterval, wfFrSwDlcmiFrameTooLongErrors=wfFrSwDlcmiFrameTooLongErrors, wfFrSwSvcUsageFlushTimeStamp=wfFrSwSvcUsageFlushTimeStamp, wfFrSwCngcMonP1Level4Percent=wfFrSwCngcMonP1Level4Percent, wfFrSwVcBackupCrossNetStatus=wfFrSwVcBackupCrossNetStatus, wfFrSwSigXNetClearingDisable=wfFrSwSigXNetClearingDisable, wfFrSwSigTable=wfFrSwSigTable, wfFrSwCngcMonP3Level4Percent=wfFrSwCngcMonP3Level4Percent, wfFrSwCctLocalDropNonDEOctets=wfFrSwCctLocalDropNonDEOctets, wfFrSwCngcMonCct=wfFrSwCngcMonCct, wfFrSwVcDropExcessBurstFrames=wfFrSwVcDropExcessBurstFrames, wfFrSwUsageNumEntries=wfFrSwUsageNumEntries, wfFrSwTupleIpAddrB=wfFrSwTupleIpAddrB, wfFrSwSvcUsageUpdateTimeStamp=wfFrSwSvcUsageUpdateTimeStamp, wfFrSwSvcUsageCurUpdateInterval=wfFrSwSvcUsageCurUpdateInterval, wfFrSwDlcmiNniEnable=wfFrSwDlcmiNniEnable, wfFrSwSigDefaultMinAcceptThroughput=wfFrSwSigDefaultMinAcceptThroughput, wfFrSwUsageEntry=wfFrSwUsageEntry, wfFrSwCngcMonP2Level3Percent=wfFrSwCngcMonP2Level3Percent, wfFrSwCctDlci=wfFrSwCctDlci, wfFrSwUsageLastDEFramesLow=wfFrSwUsageLastDEFramesLow, wfFrSwSigDelete=wfFrSwSigDelete, wfFrSwUsageCurVolume=wfFrSwUsageCurVolume, wfFrSwCngcMonP3Level3Percent=wfFrSwCngcMonP3Level3Percent, wfFrSwSigInReleasePkts=wfFrSwSigInReleasePkts, wfFrSwCctReportedStatus=wfFrSwCctReportedStatus, wfFrSwDlcmiSvcBillingEnable=wfFrSwDlcmiSvcBillingEnable, wfFrSwDlcmiMonitoredEvents=wfFrSwDlcmiMonitoredEvents, wfFrSwVcCallReqCalledAddr=wfFrSwVcCallReqCalledAddr, wfFrSwSigT308=wfFrSwSigT308, wfFrSwVcCircuit=wfFrSwVcCircuit, wfFrSwBaseIpAddr=wfFrSwBaseIpAddr, wfFrSwVcDlci=wfFrSwVcDlci, wfFrSwDlcmiPollingInterval=wfFrSwDlcmiPollingInterval, wfFrSwGlobalE164AddrTable=wfFrSwGlobalE164AddrTable, wfFrSwCngcMonP3Level1Percent=wfFrSwCngcMonP3Level1Percent, wfFrSwUsageCurFilePrefix=wfFrSwUsageCurFilePrefix, wfFrSwCctLocalDropDEOctets=wfFrSwCctLocalDropDEOctets, wfFrSwUsageLocalTimeZone=wfFrSwUsageLocalTimeZone, wfFrSwVcOutBc=wfFrSwVcOutBc, wfFrSwVcAtmIwfVCI=wfFrSwVcAtmIwfVCI, wfFrSwVcCfgInBe=wfFrSwVcCfgInBe, wfFrSwVcDropNonDeOctets=wfFrSwVcDropNonDeOctets, wfFrSwVcInBcOctets=wfFrSwVcInBcOctets, wfFrSwSigCircuit=wfFrSwSigCircuit, wfFrSwVcRecentNonDeOctets=wfFrSwVcRecentNonDeOctets, wfFrSwVcCrossNetStatus=wfFrSwVcCrossNetStatus, wfFrSwTupleEntry=wfFrSwTupleEntry, wfFrSwExtFileSysSlot=wfFrSwExtFileSysSlot, wfFrSwSvcUsageCurDirectory=wfFrSwSvcUsageCurDirectory, wfFrSwUsage=wfFrSwUsage, wfFrSwTupleDlciB=wfFrSwTupleDlciB, wfFrSwUsageDebug=wfFrSwUsageDebug, wfFrSwLocalX121AddrCUG=wfFrSwLocalX121AddrCUG, wfFrSwIsdnUniIndex=wfFrSwIsdnUniIndex, wfFrSwCctLocalSentDEFrames=wfFrSwCctLocalSentDEFrames, wfFrSwSvcUsageDirectory=wfFrSwSvcUsageDirectory, wfFrSwDlcmiErrorThreshold=wfFrSwDlcmiErrorThreshold, wfFrSwDlcmiFormatErrors=wfFrSwDlcmiFormatErrors, wfFrSwDlcmiDtePollingInterval=wfFrSwDlcmiDtePollingInterval, wfFrSwCctLocalRecvNonDEOctets=wfFrSwCctLocalRecvNonDEOctets, wfFrSwSigMaxOutThroughputPerSvc=wfFrSwSigMaxOutThroughputPerSvc, wfFrSwVcInThroughput=wfFrSwVcInThroughput, wfFrSwCctXNetErrors=wfFrSwCctXNetErrors, wfFrSwMcastEntry=wfFrSwMcastEntry, wfFrSwCctStateSet=wfFrSwCctStateSet, wfFrSwCctLocalSetBECNOctets=wfFrSwCctLocalSetBECNOctets, wfFrSwCctLocalRecvNonDEFrames=wfFrSwCctLocalRecvNonDEFrames, wfFrSwVcTxDeOctets=wfFrSwVcTxDeOctets, wfFrSwSvcUsageCleanupInterval=wfFrSwSvcUsageCleanupInterval, wfFrSwUsageEndTimeStampHigh=wfFrSwUsageEndTimeStampHigh, wfFrSwSigDefaultBe=wfFrSwSigDefaultBe, wfFrSwVcSpvcCallState=wfFrSwVcSpvcCallState, wfFrSwVcDropExcessBurstOctets=wfFrSwVcDropExcessBurstOctets, wfFrSwGlobalE164AddrHigh=wfFrSwGlobalE164AddrHigh, wfFrSwTupleDelete=wfFrSwTupleDelete, wfFrSwCctRemoteDropDEOctets=wfFrSwCctRemoteDropDEOctets, wfFrSwSigOutSetupPkts=wfFrSwSigOutSetupPkts, wfFrSwIsdnScrnIndex=wfFrSwIsdnScrnIndex, wfFrSwCctRemoteRecvDEFrames=wfFrSwCctRemoteRecvDEFrames, wfFrSwTupleTable=wfFrSwTupleTable, wfFrSwUsageLastDEOctetsLow=wfFrSwUsageLastDEOctetsLow, wfFrSwIsdnBaseAssocType=wfFrSwIsdnBaseAssocType, wfFrSwLocalX121AddrLocalFlag=wfFrSwLocalX121AddrLocalFlag, wfFrSwVcDropDeOctets=wfFrSwVcDropDeOctets, wfFrSwCctDelete=wfFrSwCctDelete, wfFrSwDlcmiFullEnquiryInterval=wfFrSwDlcmiFullEnquiryInterval, wfFrSwIsdnScrnDelete=wfFrSwIsdnScrnDelete, wfFrSwIsdnBaseEntry=wfFrSwIsdnBaseEntry, wfFrSwVcAtmIwfEfciPolicy=wfFrSwVcAtmIwfEfciPolicy, wfFrSwVcStateSet=wfFrSwVcStateSet, wfFrSwDlcmiEntry=wfFrSwDlcmiEntry, wfFrSwVcTrfPriority=wfFrSwVcTrfPriority, wfFrSwDlcmiActiveReceived=wfFrSwDlcmiActiveReceived, wfFrSwDlcmiProtocolErrors=wfFrSwDlcmiProtocolErrors, wfFrSwSigDlciAssign=wfFrSwSigDlciAssign, wfFrSwExtFileSysSize=wfFrSwExtFileSysSize, wfFrSwSvcUsageInterimRecordEnable=wfFrSwSvcUsageInterimRecordEnable, wfFrSwDlcmiNewVCs=wfFrSwDlcmiNewVCs, wfFrSwUsageLastNonDEOctetsLow=wfFrSwUsageLastNonDEOctetsLow, wfFrSwDlcmiDelete=wfFrSwDlcmiDelete, wfFrSwUsageCurUpdateInterval=wfFrSwUsageCurUpdateInterval, wfFrSwCngcMonP0Level3Percent=wfFrSwCngcMonP0Level3Percent, wfFrSwVcSetBecnFrames=wfFrSwVcSetBecnFrames, wfFrSwUsageRemoteDlci=wfFrSwUsageRemoteDlci, wfFrSwUsageCurTimerInterval=wfFrSwUsageCurTimerInterval, wfFrSwIsdnAssocDelete=wfFrSwIsdnAssocDelete, wfFrSwSigTotalOutCurrentThroughput=wfFrSwSigTotalOutCurrentThroughput, wfFrSwDlcmiIwfMode=wfFrSwDlcmiIwfMode, wfFrSwSigDefaultBc=wfFrSwSigDefaultBc, wfFrSwDlcmiRecoveryCounts=wfFrSwDlcmiRecoveryCounts, wfFrSwUsageLastDEOctetsHigh=wfFrSwUsageLastDEOctetsHigh, wfFrSwVcSetFecnOctets=wfFrSwVcSetFecnOctets, wfFrSwVcDelete=wfFrSwVcDelete, wfFrSwVcRecvBecnFrames=wfFrSwVcRecvBecnFrames, wfFrSwExtFileSysEntry=wfFrSwExtFileSysEntry, wfFrSwCngcMonReset=wfFrSwCngcMonReset, wfFrSwSigMaximumBe=wfFrSwSigMaximumBe, wfFrSwSigT305=wfFrSwSigT305, wfFrSwSvcUsageEnable=wfFrSwSvcUsageEnable, wfFrSwSigT322=wfFrSwSigT322, wfFrSwSvcUsageVolume=wfFrSwSvcUsageVolume, wfFrSwDlcmiIllegalDlciErrors=wfFrSwDlcmiIllegalDlciErrors, wfFrSwIsdnAssocTable=wfFrSwIsdnAssocTable, wfFrSwCctRemoteRecvNonDEFrames=wfFrSwCctRemoteRecvNonDEFrames, wfFrSwDlcmiCrossNetPollingInterval=wfFrSwDlcmiCrossNetPollingInterval, wfFrSwLocalE164Address=wfFrSwLocalE164Address, wfFrSwUsageStoreInterval=wfFrSwUsageStoreInterval, wfFrSwSigInSetupPkts=wfFrSwSigInSetupPkts, wfFrSwUsageSentNonDEOctetsLow=wfFrSwUsageSentNonDEOctetsLow, wfFrSwSigSvcDlciHigh=wfFrSwSigSvcDlciHigh, wfFrSwDlcmiL2AddrType=wfFrSwDlcmiL2AddrType, wfFrSwMcastIndex=wfFrSwMcastIndex, wfFrSwDlcmiUnknownRPTErrors=wfFrSwDlcmiUnknownRPTErrors, wfFrSwUsageSentNonDEFramesHigh=wfFrSwUsageSentNonDEFramesHigh, wfFrSwDlcmiLastReceived=wfFrSwDlcmiLastReceived, wfFrSwCctLocalSentNonDEOctets=wfFrSwCctLocalSentNonDEOctets, wfFrSwSigInCallProceedingPkts=wfFrSwSigInCallProceedingPkts, wfFrSwSvcUsageFileCleanup=wfFrSwSvcUsageFileCleanup, wfFrSwSigEntry=wfFrSwSigEntry, wfFrSwMcastTable=wfFrSwMcastTable, wfFrSwSigL2Resets=wfFrSwSigL2Resets, wfFrSwDlcmiOtherErrors=wfFrSwDlcmiOtherErrors, wfFrSwErrTime=wfFrSwErrTime, wfFrSwUsageFlushInterval=wfFrSwUsageFlushInterval, wfFrSwVcTxNonDeFrames=wfFrSwVcTxNonDeFrames, wfFrSwUsageCleanupInterval=wfFrSwUsageCleanupInterval, wfFrSwIsdnScrnEntry=wfFrSwIsdnScrnEntry, wfFrSwUsageRemoteIPAddress=wfFrSwUsageRemoteIPAddress, wfFrSwSigInStatusEnquiryPkts=wfFrSwSigInStatusEnquiryPkts, wfFrSwVirtualIntfTable=wfFrSwVirtualIntfTable, wfFrSwCngcMonP3Level2Percent=wfFrSwCngcMonP3Level2Percent, wfFrSwUsageLastNonDEFramesHigh=wfFrSwUsageLastNonDEFramesHigh, wfFrSwCctLocalSetFECNOctets=wfFrSwCctLocalSetFECNOctets, wfFrSwVcAtmIwfMode=wfFrSwVcAtmIwfMode, wfFrSwVcRedirectType=wfFrSwVcRedirectType, wfFrSwSigT310=wfFrSwSigT310, wfFrSwCctLocalRecvFECNOctets=wfFrSwCctLocalRecvFECNOctets, wfFrSwGlobalE164AddrIPAddr=wfFrSwGlobalE164AddrIPAddr, wfFrSwDlcmiFrameTooShortErrors=wfFrSwDlcmiFrameTooShortErrors, wfFrSwVcMulticast=wfFrSwVcMulticast, wfFrSwUsageIPAddress=wfFrSwUsageIPAddress, wfFrSwSigDefaultThroughput=wfFrSwSigDefaultThroughput, wfFrSwCctLocalRecentNonDEOctets=wfFrSwCctLocalRecentNonDEOctets, wfFrSwUsageSentDEFramesLow=wfFrSwUsageSentDEFramesLow, wfFrSwSvcUsageCleanupTimeStamp=wfFrSwSvcUsageCleanupTimeStamp, wfFrSwSigInUnknownPkts=wfFrSwSigInUnknownPkts, wfFrSwCctInactiveVCDropOctets=wfFrSwCctInactiveVCDropOctets, wfFrSwDlcmiEscapeCircuit=wfFrSwDlcmiEscapeCircuit, wfFrSwUsageDelete=wfFrSwUsageDelete, wfFrSwCctNumber=wfFrSwCctNumber, wfFrSwMcastDelete=wfFrSwMcastDelete, wfFrSwSigTotalOutNegotiableThroughput=wfFrSwSigTotalOutNegotiableThroughput, wfFrSwVcRecvNonDeOctets=wfFrSwVcRecvNonDeOctets, wfFrSwCngcMonP2Level1Percent=wfFrSwCngcMonP2Level1Percent, wfFrSwVcRecvFecnFrames=wfFrSwVcRecvFecnFrames, wfFrSwCctInBe=wfFrSwCctInBe, wfFrSwCctLocalDropDEFrames=wfFrSwCctLocalDropDEFrames, wfFrSwCctLocalSetBECNFrames=wfFrSwCctLocalSetBECNFrames, wfFrSwDlcmiUnknownDlciErrors=wfFrSwDlcmiUnknownDlciErrors, wfFrSwCctLocalRecvDEFrames=wfFrSwCctLocalRecvDEFrames, wfFrSwSvcUsageCurFlushInterval=wfFrSwSvcUsageCurFlushInterval, wfFrSwGlobalE164AddrEntry=wfFrSwGlobalE164AddrEntry, wfFrSwErrData=wfFrSwErrData, wfFrSwVcSetFecnFrames=wfFrSwVcSetFecnFrames, wfFrSwDlcmiCallAccDlciSelectionType=wfFrSwDlcmiCallAccDlciSelectionType, wfFrSwTupleIpAddrA=wfFrSwTupleIpAddrA, wfFrSwCctRemoteSetFECNOctets=wfFrSwCctRemoteSetFECNOctets, wfFrSwVcXNetErrors=wfFrSwVcXNetErrors)
| true | true |
f7253028cdd82bf123f765e9eee0f96a6ac55fad | 6,819 | py | Python | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.utils.encoding import force_str
from rest_framework import exceptions, serializers
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.request import clone_request
from collections import OrderedDict
from rest_framework.metadata import BaseMetadata
from rest_framework.utils.field_mapping import ClassLookupDict
class WalaxModelMetadata(BaseMetadata):
"""
This is the default metadata implementation.
It returns an ad-hoc set of information about the view.
There are not any formalized standards for `OPTIONS` responses
for us to base this on.
"""
label_lookup = ClassLookupDict(
{
serializers.Field: "field",
serializers.PrimaryKeyRelatedField: "related",
serializers.RelatedField: "related",
serializers.BooleanField: "boolean",
serializers.NullBooleanField: "boolean",
serializers.CharField: "string",
serializers.UUIDField: "string",
serializers.URLField: "url",
serializers.EmailField: "email",
serializers.RegexField: "regex",
serializers.SlugField: "slug",
serializers.IntegerField: "integer",
serializers.FloatField: "float",
serializers.DecimalField: "decimal",
serializers.DateField: "date",
serializers.DateTimeField: "datetime",
serializers.TimeField: "time",
serializers.ChoiceField: "choice",
serializers.MultipleChoiceField: "multiple choice",
serializers.FileField: "file upload",
serializers.ImageField: "image upload",
serializers.ListField: "list",
serializers.DictField: "nested object",
serializers.Serializer: "nested object",
}
)
def determine_metadata(self, request, view):
metadata = OrderedDict()
metadata["name"] = view.get_view_name()
metadata["description"] = view.get_view_description()
metadata["renders"] = [
renderer.media_type for renderer in view.renderer_classes
]
metadata["parses"] = [
parser.media_type for parser in view.parser_classes]
metadata["model"] = view.queryset.model.__name__
self.model = view.queryset.model
metadata['extra_actions'] = self.determine_extra_actions(request, view)
if hasattr(view, "get_serializer"):
actions = self.determine_actions(request, view)
if actions:
metadata["actions"] = actions
return metadata
def determine_extra_actions(self, request, view):
"""
Return list of extra callable actions
"""
import inspect
actions = []
for fn, f in [(fn, f) for (fn, f) in inspect.getmembers(self.model) if inspect.isfunction(f) and getattr(f, 'walax_action', False)]:
actions.append({
'method': 'post',
'type': 'instance',
'name': fn
})
return actions
def determine_actions(self, request, view):
"""
For generic class based views we return information about
the fields that are accepted for 'PUT' and 'POST' methods.
"""
actions = {}
for method in {"PUT", "POST"} & set(view.allowed_methods):
view.request = clone_request(request, method)
try:
# Test global permissions
if hasattr(view, "check_permissions"):
view.check_permissions(view.request)
# Test object permissions
if method == "PUT" and hasattr(view, "get_object"):
view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
pass
else:
# If user has appropriate permissions for the view, include
# appropriate metadata about the fields that should be supplied.
serializer = view.get_serializer()
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
return actions
def get_serializer_info(self, serializer):
"""
Given an instance of a serializer, return a dictionary of metadata
about its fields.
"""
if hasattr(serializer, "child"):
# If this is a `ListSerializer` then we want to examine the
# underlying child serializer instance instead.
serializer = serializer.child
return OrderedDict(
[
(field_name, self.get_field_info(field, field_name))
for field_name, field in serializer.fields.items()
if not isinstance(field, serializers.HiddenField)
]
)
def get_field_info(self, field, field_name):
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
"""
field_info = OrderedDict()
field_info["type"] = self.label_lookup[field]
field_info["required"] = getattr(field, "required", False)
if field_info["type"].startswith("related"):
field_info["model"] = field.queryset.model.__name__
field_info["related_name"] = getattr(field, "related_name", None)
attrs = [
"read_only",
"label",
"help_text",
"min_length",
"max_length",
"min_value",
"max_value",
"related_name",
]
if getattr(self.model._meta.get_field(field_name), "primary_key", False):
field_info["primary_key"] = "true"
for attr in attrs:
value = getattr(field, attr, None)
if value is not None and value != "":
field_info[attr] = force_str(value, strings_only=True)
if getattr(field, "child", None):
field_info["child"] = self.get_field_info(field.child)
elif getattr(field, "fields", None):
field_info["children"] = self.get_serializer_info(field)
if (
not field_info.get("read_only")
and not isinstance(
field, (serializers.RelatedField, serializers.ManyRelatedField)
)
and hasattr(field, "choices")
):
field_info["choices"] = [
{
"value": choice_value,
"display_name": force_str(choice_name, strings_only=True),
}
for choice_value, choice_name in field.choices.items()
]
return field_info
| 38.308989 | 140 | 0.592169 | from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.utils.encoding import force_str
from rest_framework import exceptions, serializers
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.request import clone_request
from collections import OrderedDict
from rest_framework.metadata import BaseMetadata
from rest_framework.utils.field_mapping import ClassLookupDict
class WalaxModelMetadata(BaseMetadata):
label_lookup = ClassLookupDict(
{
serializers.Field: "field",
serializers.PrimaryKeyRelatedField: "related",
serializers.RelatedField: "related",
serializers.BooleanField: "boolean",
serializers.NullBooleanField: "boolean",
serializers.CharField: "string",
serializers.UUIDField: "string",
serializers.URLField: "url",
serializers.EmailField: "email",
serializers.RegexField: "regex",
serializers.SlugField: "slug",
serializers.IntegerField: "integer",
serializers.FloatField: "float",
serializers.DecimalField: "decimal",
serializers.DateField: "date",
serializers.DateTimeField: "datetime",
serializers.TimeField: "time",
serializers.ChoiceField: "choice",
serializers.MultipleChoiceField: "multiple choice",
serializers.FileField: "file upload",
serializers.ImageField: "image upload",
serializers.ListField: "list",
serializers.DictField: "nested object",
serializers.Serializer: "nested object",
}
)
def determine_metadata(self, request, view):
metadata = OrderedDict()
metadata["name"] = view.get_view_name()
metadata["description"] = view.get_view_description()
metadata["renders"] = [
renderer.media_type for renderer in view.renderer_classes
]
metadata["parses"] = [
parser.media_type for parser in view.parser_classes]
metadata["model"] = view.queryset.model.__name__
self.model = view.queryset.model
metadata['extra_actions'] = self.determine_extra_actions(request, view)
if hasattr(view, "get_serializer"):
actions = self.determine_actions(request, view)
if actions:
metadata["actions"] = actions
return metadata
def determine_extra_actions(self, request, view):
import inspect
actions = []
for fn, f in [(fn, f) for (fn, f) in inspect.getmembers(self.model) if inspect.isfunction(f) and getattr(f, 'walax_action', False)]:
actions.append({
'method': 'post',
'type': 'instance',
'name': fn
})
return actions
def determine_actions(self, request, view):
actions = {}
for method in {"PUT", "POST"} & set(view.allowed_methods):
view.request = clone_request(request, method)
try:
if hasattr(view, "check_permissions"):
view.check_permissions(view.request)
if method == "PUT" and hasattr(view, "get_object"):
view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
pass
else:
serializer = view.get_serializer()
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
return actions
def get_serializer_info(self, serializer):
if hasattr(serializer, "child"):
serializer = serializer.child
return OrderedDict(
[
(field_name, self.get_field_info(field, field_name))
for field_name, field in serializer.fields.items()
if not isinstance(field, serializers.HiddenField)
]
)
def get_field_info(self, field, field_name):
field_info = OrderedDict()
field_info["type"] = self.label_lookup[field]
field_info["required"] = getattr(field, "required", False)
if field_info["type"].startswith("related"):
field_info["model"] = field.queryset.model.__name__
field_info["related_name"] = getattr(field, "related_name", None)
attrs = [
"read_only",
"label",
"help_text",
"min_length",
"max_length",
"min_value",
"max_value",
"related_name",
]
if getattr(self.model._meta.get_field(field_name), "primary_key", False):
field_info["primary_key"] = "true"
for attr in attrs:
value = getattr(field, attr, None)
if value is not None and value != "":
field_info[attr] = force_str(value, strings_only=True)
if getattr(field, "child", None):
field_info["child"] = self.get_field_info(field.child)
elif getattr(field, "fields", None):
field_info["children"] = self.get_serializer_info(field)
if (
not field_info.get("read_only")
and not isinstance(
field, (serializers.RelatedField, serializers.ManyRelatedField)
)
and hasattr(field, "choices")
):
field_info["choices"] = [
{
"value": choice_value,
"display_name": force_str(choice_name, strings_only=True),
}
for choice_value, choice_name in field.choices.items()
]
return field_info
| true | true |
f725309690ad014e3b8fcbe2e6561e01b841f7ec | 574 | py | Python | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | 2 | 2015-08-24T02:19:01.000Z | 2015-08-24T03:31:06.000Z | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | null | null | null | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | null | null | null | """
DataTable display.
"""
from .subframe import SubFrame
from .plugin import plugins
class DataTable(SubFrame):
"""Display a DataFrame as a DataTable."""
_plugins = [plugins.datatables]
def _js(self, data):
"""Javascript callback body."""
data = data.to_records()
data = self._json({
'data': data.tolist(),
'columns': [
{'title': x} for x in self._map_columns(data.dtype.names)
]
})
return "element.append('<table />').find('table').DataTable({});".format(data)
| 22.076923 | 86 | 0.562718 |
from .subframe import SubFrame
from .plugin import plugins
class DataTable(SubFrame):
_plugins = [plugins.datatables]
def _js(self, data):
data = data.to_records()
data = self._json({
'data': data.tolist(),
'columns': [
{'title': x} for x in self._map_columns(data.dtype.names)
]
})
return "element.append('<table />').find('table').DataTable({});".format(data)
| true | true |
f72530ff5351dea5d4081b0e5aac7da571510f0f | 346 | py | Python | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name="blogen_neo",
version="0.0.1",
description="Simple static site generator for blog",
author="Kjuman Enobikto",
author_email="qmanenobikto@gmail.com",
install_requires=["jinja2", "fire"],
entry_points={
"console_scripts": [
"blogen = main"
]
}
)
| 21.625 | 56 | 0.612717 | from setuptools import setup
setup(
name="blogen_neo",
version="0.0.1",
description="Simple static site generator for blog",
author="Kjuman Enobikto",
author_email="qmanenobikto@gmail.com",
install_requires=["jinja2", "fire"],
entry_points={
"console_scripts": [
"blogen = main"
]
}
)
| true | true |
f7253166d60df35b6e5baa2e8773dc05e8fdf3db | 4,335 | py | Python | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | from envs import REGISTRY as env_REGISTRY
from functools import partial
from components.episode_buffer import EpisodeBatch
import numpy as np
class EpisodeRunner:
def __init__(self, args, logger):
self.args = args
self.logger = logger
self.batch_size = self.args.batch_size_run
assert self.batch_size == 1
self.env = env_REGISTRY[self.args.env](**self.args.env_args)
self.episode_limit = self.env.episode_limit
self.t = 0
self.t_env = 0
self.train_returns = []
self.test_returns = []
self.train_stats = {}
self.test_stats = {}
# Log the first run
self.log_train_stats_t = -1000000
def setup(self, scheme, groups, preprocess, mac):
self.new_batch = partial(EpisodeBatch, scheme, groups, self.batch_size, self.episode_limit + 1,
preprocess=preprocess, device=self.args.device)
self.mac = mac
def get_env_info(self):
return self.env.get_env_info()
def save_replay(self):
self.env.save_replay()
def close_env(self):
self.env.close()
def reset(self):
self.batch = self.new_batch()
self.env.reset()
self.t = 0
def run(self, test_mode=False):
self.reset()
terminated = False
episode_return = 0
self.mac.init_hidden(batch_size=self.batch_size)
while not terminated:
pre_transition_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(pre_transition_data, ts=self.t)
# Pass the entire batch of experiences up till now to the agents
# Receive the actions for each agent at this timestep in a batch of size 1
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
reward, terminated, env_info = self.env.step(actions[0])
episode_return += reward
post_transition_data = {
"actions": actions,
"reward": [(reward,)],
"terminated": [(terminated != env_info.get("episode_limit", False),)],
}
self.batch.update(post_transition_data, ts=self.t)
self.t += 1
last_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(last_data, ts=self.t)
# Select actions in the last stored state
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
self.batch.update({"actions": actions}, ts=self.t)
cur_stats = self.test_stats if test_mode else self.train_stats
cur_returns = self.test_returns if test_mode else self.train_returns
log_prefix = "test_" if test_mode else ""
cur_stats.update({k: cur_stats.get(k, 0) + env_info.get(k, 0) for k in set(cur_stats) | set(env_info)})
cur_stats["n_episodes"] = 1 + cur_stats.get("n_episodes", 0)
cur_stats["ep_length"] = self.t + cur_stats.get("ep_length", 0)
if not test_mode:
self.t_env += self.t
cur_returns.append(episode_return)
if test_mode and (len(self.test_returns) == self.args.test_nepisode):
self._log(cur_returns, cur_stats, log_prefix)
elif self.t_env - self.log_train_stats_t >= self.args.runner_log_interval:
self._log(cur_returns, cur_stats, log_prefix)
if hasattr(self.mac.action_selector, "epsilon"):
self.logger.log_stat("epsilon", self.mac.action_selector.epsilon, self.t_env)
self.log_train_stats_t = self.t_env
return self.batch
def _log(self, returns, stats, prefix):
self.logger.log_stat(prefix + "return_mean", np.mean(returns), self.t_env)
self.logger.log_stat(prefix + "return_std", np.std(returns), self.t_env)
returns.clear()
for k, v in stats.items():
if k != "n_episodes":
self.logger.log_stat(prefix + k + "_mean" , v/stats["n_episodes"], self.t_env)
stats.clear()
| 34.959677 | 111 | 0.608074 | from envs import REGISTRY as env_REGISTRY
from functools import partial
from components.episode_buffer import EpisodeBatch
import numpy as np
class EpisodeRunner:
def __init__(self, args, logger):
self.args = args
self.logger = logger
self.batch_size = self.args.batch_size_run
assert self.batch_size == 1
self.env = env_REGISTRY[self.args.env](**self.args.env_args)
self.episode_limit = self.env.episode_limit
self.t = 0
self.t_env = 0
self.train_returns = []
self.test_returns = []
self.train_stats = {}
self.test_stats = {}
self.log_train_stats_t = -1000000
def setup(self, scheme, groups, preprocess, mac):
self.new_batch = partial(EpisodeBatch, scheme, groups, self.batch_size, self.episode_limit + 1,
preprocess=preprocess, device=self.args.device)
self.mac = mac
def get_env_info(self):
return self.env.get_env_info()
def save_replay(self):
self.env.save_replay()
def close_env(self):
self.env.close()
def reset(self):
self.batch = self.new_batch()
self.env.reset()
self.t = 0
def run(self, test_mode=False):
self.reset()
terminated = False
episode_return = 0
self.mac.init_hidden(batch_size=self.batch_size)
while not terminated:
pre_transition_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(pre_transition_data, ts=self.t)
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
reward, terminated, env_info = self.env.step(actions[0])
episode_return += reward
post_transition_data = {
"actions": actions,
"reward": [(reward,)],
"terminated": [(terminated != env_info.get("episode_limit", False),)],
}
self.batch.update(post_transition_data, ts=self.t)
self.t += 1
last_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(last_data, ts=self.t)
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
self.batch.update({"actions": actions}, ts=self.t)
cur_stats = self.test_stats if test_mode else self.train_stats
cur_returns = self.test_returns if test_mode else self.train_returns
log_prefix = "test_" if test_mode else ""
cur_stats.update({k: cur_stats.get(k, 0) + env_info.get(k, 0) for k in set(cur_stats) | set(env_info)})
cur_stats["n_episodes"] = 1 + cur_stats.get("n_episodes", 0)
cur_stats["ep_length"] = self.t + cur_stats.get("ep_length", 0)
if not test_mode:
self.t_env += self.t
cur_returns.append(episode_return)
if test_mode and (len(self.test_returns) == self.args.test_nepisode):
self._log(cur_returns, cur_stats, log_prefix)
elif self.t_env - self.log_train_stats_t >= self.args.runner_log_interval:
self._log(cur_returns, cur_stats, log_prefix)
if hasattr(self.mac.action_selector, "epsilon"):
self.logger.log_stat("epsilon", self.mac.action_selector.epsilon, self.t_env)
self.log_train_stats_t = self.t_env
return self.batch
def _log(self, returns, stats, prefix):
self.logger.log_stat(prefix + "return_mean", np.mean(returns), self.t_env)
self.logger.log_stat(prefix + "return_std", np.std(returns), self.t_env)
returns.clear()
for k, v in stats.items():
if k != "n_episodes":
self.logger.log_stat(prefix + k + "_mean" , v/stats["n_episodes"], self.t_env)
stats.clear()
| true | true |
f72532605be0861ccc1b4e26456972b2f7cf7351 | 997 | py | Python | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 41 | 2015-01-17T02:59:44.000Z | 2021-12-01T16:16:25.000Z | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 88 | 2015-03-09T21:30:53.000Z | 2021-05-21T14:58:23.000Z | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 30 | 2015-01-17T02:59:15.000Z | 2021-04-19T22:32:57.000Z | # -*- coding: utf-8 -*-
"""
Specialized serializers for NSoT API client.
This is an example of how you would use this with the Client object, to make it
return objects instead of dicts::
>>> serializer = ModelSerializer()
>>> api = Client(url, serializer=serializer)
>>> obj = api.sites(1).get()
>>> obj
<Site(id=1, description=u'Foo site', name=u'Foo')>
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from slumber.serialize import JsonSerializer
from .import models
__author__ = 'Jathan McCollum'
__maintainer__ = 'Jathan McCollum'
__email__ = 'jathan@dropbox.com'
__copyright__ = 'Copyright (c) 2015-2016 Dropbox, Inc.'
class ModelSerializer(JsonSerializer):
"""This serializes to a model instead of a dict."""
key = 'model'
def get_serializer(self, *args, **kwargs):
return self
def loads(self, data):
obj_data = super(ModelSerializer, self).loads(data)
return models.ApiModel(obj_data)
| 25.564103 | 79 | 0.697091 |
from __future__ import unicode_literals
from __future__ import absolute_import
from slumber.serialize import JsonSerializer
from .import models
__author__ = 'Jathan McCollum'
__maintainer__ = 'Jathan McCollum'
__email__ = 'jathan@dropbox.com'
__copyright__ = 'Copyright (c) 2015-2016 Dropbox, Inc.'
class ModelSerializer(JsonSerializer):
key = 'model'
def get_serializer(self, *args, **kwargs):
return self
def loads(self, data):
obj_data = super(ModelSerializer, self).loads(data)
return models.ApiModel(obj_data)
| true | true |
f72532c82404f85f49d2567399b1b9784361b2ba | 779 | py | Python | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | import cv2
from app.Model.Model_cascades import Cascades
class FaceDetection:
def __init__(self):
self.type_cascade = Cascades.FACECASCADE
def get_type_cascade(self):
return self.type_cascade
def detection_rectangle_dimensions(self):
scaleFactor = 1.3
minNeighbors = 5
minSize = (30, 30)
return [scaleFactor, minNeighbors, minSize]
def format_rectangle(self, image, x, y, w, h):
cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
def detection_rectangle(self, rectangle: list, x, y, w, h):
new_rectangle = rectangle[y:y + h, x:x + w]
return new_rectangle
def detection_color(self, image):
color = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return color
| 27.821429 | 68 | 0.640565 | import cv2
from app.Model.Model_cascades import Cascades
class FaceDetection:
def __init__(self):
self.type_cascade = Cascades.FACECASCADE
def get_type_cascade(self):
return self.type_cascade
def detection_rectangle_dimensions(self):
scaleFactor = 1.3
minNeighbors = 5
minSize = (30, 30)
return [scaleFactor, minNeighbors, minSize]
def format_rectangle(self, image, x, y, w, h):
cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
def detection_rectangle(self, rectangle: list, x, y, w, h):
new_rectangle = rectangle[y:y + h, x:x + w]
return new_rectangle
def detection_color(self, image):
color = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return color
| true | true |
f72534dd903118747d36c4ba1c73abaee618366c | 825 | py | Python | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | 5 | 2019-09-25T01:09:07.000Z | 2021-11-03T02:39:42.000Z | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | null | null | null | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='macresources',
version='1.2',
author='Elliot Nunn',
author_email='elliotnunn@me.com',
description='Library for working with legacy Macintosh resource forks',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license='MIT',
url='https://github.com/elliotnunn/macresources',
classifiers=[
'Programming Language :: Python :: 3 :: Only',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Topic :: System :: Filesystems',
'Development Status :: 3 - Alpha',
],
packages=['macresources'],
scripts=['bin/SimpleRez', 'bin/SimpleDeRez', 'bin/hexrez', 'bin/rezhex', 'bin/sortrez', 'bin/rfx', 'bin/greggybits', 'bin/instacomp'],
)
| 35.869565 | 138 | 0.647273 | from setuptools import setup
setup(
name='macresources',
version='1.2',
author='Elliot Nunn',
author_email='elliotnunn@me.com',
description='Library for working with legacy Macintosh resource forks',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license='MIT',
url='https://github.com/elliotnunn/macresources',
classifiers=[
'Programming Language :: Python :: 3 :: Only',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Topic :: System :: Filesystems',
'Development Status :: 3 - Alpha',
],
packages=['macresources'],
scripts=['bin/SimpleRez', 'bin/SimpleDeRez', 'bin/hexrez', 'bin/rezhex', 'bin/sortrez', 'bin/rfx', 'bin/greggybits', 'bin/instacomp'],
)
| true | true |
f72535000dbea0756b1c0b1ca77caaa8aa396926 | 3,406 | py | Python | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | import argparse
import datetime
import glob
import logging
import os
import time
import torch
from logging_helper import init_logger
from models import Discriminator, BartSystem
from train import train
from transformer_base import add_generic_args, generic_train
class Config():
# data_path = './data/chatbot/'
# log_dir = 'runs/exp'
save_path = './save'
# pretrained_embed_path = './embedding/'
device = torch.device('cuda' if True and torch.cuda.is_available() else 'cpu')
# device = torch.device('cpu')
discriminator_method = 'Multi' # 'Multi' or 'Cond'
load_pretrained_embed = False
min_freq = 3
max_length = 1024 # max_source_length
# embed_size = 256
d_model = 256
h = 4
num_styles = 2
num_classes = num_styles + 1 if discriminator_method == 'Multi' else 2
num_layers = 4
# batch_size = 64
lr_F = 5e-6
lr_D = 1e-4
L2 = 0
iter_D = 10
iter_F = 5
F_pretrain_iter = 1
log_steps = 5
eval_steps = 25
learned_pos_embed = True
dropout = 0
drop_rate_config = [(1, 0)]
temperature_config = [(1, 0)]
slf_factor = 0.25
cyc_factor = 0.5
adv_factor = 1
inp_shuffle_len = 0
inp_unk_drop_fac = 0
inp_rand_drop_fac = 0
inp_drop_prob = 0
### Bart system
output_dir='feedback_sum'
do_predict=True
max_source_length=1024
max_target_length=56
data_dir="feedback"
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
def main():
config = Config()
parser = argparse.ArgumentParser()
add_generic_args(parser, os.getcwd())
parser = BartSystem.add_model_specific_args(parser, os.getcwd())
args = parser.parse_args()
# Some values from Config class needs to be copied to args to work.
setattr(config, "num_train_epochs", args.num_train_epochs)
setattr(config, "save_path", args.output_dir)
setattr(args, "learning_rate", config.lr_F)
# Create output directory.
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
setattr(config, "save_folder", os.path.join(config.save_path, timestamp))
os.makedirs(os.path.join(config.save_folder, 'ckpts'))
init_logger(config.save_folder)
logger = logging.getLogger(__name__)
model_F = BartSystem(args).to(config.device)
# Don't use the trainer to fit the model
args.do_train = False
# trainer = generic_train(model_F, args)
if args.output_dir:
try:
checkpoints = list(sorted(glob.glob(os.path.join(args.output_dir, "checkpointepoch=*.ckpt"), recursive=True)))
if checkpoints[-1]:
BartSystem.load_from_checkpoint(checkpoints[-1])
logger.info("Load checkpoint sucessfully!")
except:
logger.info("Failed to load checkpoint!")
# train_iters, dev_iters, test_iters, vocab = load_dataset(config)
train_iters, dev_iters, test_iters = model_F.train_dataloader(), model_F.val_dataloader(), model_F.test_dataloader()
model_D = Discriminator(config, model_F.tokenizer).to(config.device)
logger.info(config.discriminator_method)
# import pdb
# pdb.set_trace()
logger.info(model_D)
train(config, model_F, model_D, train_iters, dev_iters, test_iters)
if __name__ == '__main__':
main()
| 28.383333 | 122 | 0.668526 | import argparse
import datetime
import glob
import logging
import os
import time
import torch
from logging_helper import init_logger
from models import Discriminator, BartSystem
from train import train
from transformer_base import add_generic_args, generic_train
class Config():
save_path = './save'
device = torch.device('cuda' if True and torch.cuda.is_available() else 'cpu')
discriminator_method = 'Multi'
load_pretrained_embed = False
min_freq = 3
max_length = 1024
d_model = 256
h = 4
num_styles = 2
num_classes = num_styles + 1 if discriminator_method == 'Multi' else 2
num_layers = 4
lr_F = 5e-6
lr_D = 1e-4
L2 = 0
iter_D = 10
iter_F = 5
F_pretrain_iter = 1
log_steps = 5
eval_steps = 25
learned_pos_embed = True
dropout = 0
drop_rate_config = [(1, 0)]
temperature_config = [(1, 0)]
slf_factor = 0.25
cyc_factor = 0.5
adv_factor = 1
inp_shuffle_len = 0
inp_unk_drop_fac = 0
inp_rand_drop_fac = 0
inp_drop_prob = 0
um'
do_predict=True
max_source_length=1024
max_target_length=56
data_dir="feedback"
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
def main():
config = Config()
parser = argparse.ArgumentParser()
add_generic_args(parser, os.getcwd())
parser = BartSystem.add_model_specific_args(parser, os.getcwd())
args = parser.parse_args()
setattr(config, "num_train_epochs", args.num_train_epochs)
setattr(config, "save_path", args.output_dir)
setattr(args, "learning_rate", config.lr_F)
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
setattr(config, "save_folder", os.path.join(config.save_path, timestamp))
os.makedirs(os.path.join(config.save_folder, 'ckpts'))
init_logger(config.save_folder)
logger = logging.getLogger(__name__)
model_F = BartSystem(args).to(config.device)
args.do_train = False
# trainer = generic_train(model_F, args)
if args.output_dir:
try:
checkpoints = list(sorted(glob.glob(os.path.join(args.output_dir, "checkpointepoch=*.ckpt"), recursive=True)))
if checkpoints[-1]:
BartSystem.load_from_checkpoint(checkpoints[-1])
logger.info("Load checkpoint sucessfully!")
except:
logger.info("Failed to load checkpoint!")
# train_iters, dev_iters, test_iters, vocab = load_dataset(config)
train_iters, dev_iters, test_iters = model_F.train_dataloader(), model_F.val_dataloader(), model_F.test_dataloader()
model_D = Discriminator(config, model_F.tokenizer).to(config.device)
logger.info(config.discriminator_method)
# import pdb
# pdb.set_trace()
logger.info(model_D)
train(config, model_F, model_D, train_iters, dev_iters, test_iters)
if __name__ == '__main__':
main()
| true | true |
f7253541b69cfbfcfbec4e8411f5009f4337f8ed | 6,853 | py | Python | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 3 | 2021-03-31T12:56:27.000Z | 2021-05-25T15:26:01.000Z | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 65 | 2018-10-14T08:59:06.000Z | 2022-03-31T06:04:07.000Z | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 1 | 2021-03-29T18:56:52.000Z | 2021-03-29T18:56:52.000Z | """Configure number in a device through MQTT topic."""
import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
subscription,
)
from .. import mqtt
from .const import CONF_RETAIN
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
def validate_config(config):
"""Validate that the configuration is valid, throws if it isn't."""
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
PLATFORM_SCHEMA = vol.All(
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT number through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT number dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
"""Set up the MQTT number."""
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
"""representation of an MQTT number."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT Number."""
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._optimistic = config[CONF_OPTIMISTIC]
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = msg.payload
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(payload)
try:
if payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value < self.min_value or num_value > self.max_value:
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic:
last_state = await self.async_get_last_state()
if last_state:
self._current_number = last_state.state
@property
def min_value(self) -> float:
"""Return the minimum value."""
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
"""Return the maximum value."""
return self._config[CONF_MAX]
@property
def step(self) -> float:
"""Return the increment/decrement step."""
return self._config[CONF_STEP]
@property
def value(self):
"""Return the current value."""
return self._current_number
async def async_set_value(self, value: float) -> None:
"""Update the current value."""
current_number = value
if value.is_integer():
current_number = int(value)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
current_number,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
| 31.726852 | 87 | 0.637531 | import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
subscription,
)
from .. import mqtt
from .const import CONF_RETAIN
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
def validate_config(config):
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
PLATFORM_SCHEMA = vol.All(
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
def __init__(self, hass, config, config_entry, discovery_data):
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
self._optimistic = config[CONF_OPTIMISTIC]
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
payload = msg.payload
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(payload)
try:
if payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value < self.min_value or num_value > self.max_value:
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic:
last_state = await self.async_get_last_state()
if last_state:
self._current_number = last_state.state
@property
def min_value(self) -> float:
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
return self._config[CONF_MAX]
@property
def step(self) -> float:
return self._config[CONF_STEP]
@property
def value(self):
return self._current_number
async def async_set_value(self, value: float) -> None:
current_number = value
if value.is_integer():
current_number = int(value)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
current_number,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
@property
def assumed_state(self):
return self._optimistic
| true | true |
f72536cc54c5ab45ec1ef476582c6f686e353776 | 9,038 | py | Python | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 5 | 2016-08-23T17:52:22.000Z | 2019-05-16T08:45:30.000Z | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 2 | 2016-11-10T05:30:21.000Z | 2019-04-05T15:03:37.000Z | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 7 | 2016-08-25T16:11:44.000Z | 2021-02-22T05:31:25.000Z | # coding: utf-8
"""
DiscoveryStartRequest.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class DiscoveryStartRequest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
DiscoveryStartRequest - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'start_ip': 'str', # (required parameter)
'end_ip': 'str', # (required parameter)
'use_agents': 'bool',
'connection_timeout': 'int',
'max_ports_to_use': 'int'
}
self.attribute_map = {
'start_ip': 'startIP', # (required parameter)
'end_ip': 'endIP', # (required parameter)
'use_agents': 'useAgents',
'connection_timeout': 'connectionTimeout',
'max_ports_to_use': 'maxPortsToUse'
}
self._start_ip = None
self._end_ip = None
self._use_agents = None
self._connection_timeout = None
self._max_ports_to_use = None
@property
def start_ip(self):
"""
Gets the start_ip of this DiscoveryStartRequest.
Starting IP address
:return: The start_ip of this DiscoveryStartRequest.
:rtype: str
:required/optional: required
"""
return self._start_ip
@start_ip.setter
def start_ip(self, start_ip):
"""
Sets the start_ip of this DiscoveryStartRequest.
Starting IP address
:param start_ip: The start_ip of this DiscoveryStartRequest.
:type: str
"""
self._start_ip = start_ip
@property
def end_ip(self):
"""
Gets the end_ip of this DiscoveryStartRequest.
Ending IP Address
:return: The end_ip of this DiscoveryStartRequest.
:rtype: str
:required/optional: required
"""
return self._end_ip
@end_ip.setter
def end_ip(self, end_ip):
"""
Sets the end_ip of this DiscoveryStartRequest.
Ending IP Address
:param end_ip: The end_ip of this DiscoveryStartRequest.
:type: str
"""
self._end_ip = end_ip
@property
def use_agents(self):
"""
Gets the use_agents of this DiscoveryStartRequest.
In-band management agents will be queried if this value is true. If you are not sure, its best to leave this as default
:return: The use_agents of this DiscoveryStartRequest.
:rtype: bool
:required/optional: optional
"""
return self._use_agents
@use_agents.setter
def use_agents(self, use_agents):
"""
Sets the use_agents of this DiscoveryStartRequest.
In-band management agents will be queried if this value is true. If you are not sure, its best to leave this as default
:param use_agents: The use_agents of this DiscoveryStartRequest.
:type: bool
"""
self._use_agents = use_agents
@property
def connection_timeout(self):
"""
Gets the connection_timeout of this DiscoveryStartRequest.
Optional parameter. This defaults to 30 seconds. Longer times may be needed for WAN discoveries, but will slow down the whole process. Shorter times speed up the process. This is only the timeout value for the initial TCP connection
:return: The connection_timeout of this DiscoveryStartRequest.
:rtype: int
:required/optional: optional
"""
return self._connection_timeout
@connection_timeout.setter
def connection_timeout(self, connection_timeout):
"""
Sets the connection_timeout of this DiscoveryStartRequest.
Optional parameter. This defaults to 30 seconds. Longer times may be needed for WAN discoveries, but will slow down the whole process. Shorter times speed up the process. This is only the timeout value for the initial TCP connection
:param connection_timeout: The connection_timeout of this DiscoveryStartRequest.
:type: int
"""
self._connection_timeout = connection_timeout
@property
def max_ports_to_use(self):
"""
Gets the max_ports_to_use of this DiscoveryStartRequest.
Number of ports to use for discovery. A port is a file handler in most operating systems. Watch your ULimits. More ports the faster discovery goes. But, for example, windows only has a few thousand available ports. If file handlers are exceeded, then this process will adjust down but other processes on the server may be affected as well as client connections
:return: The max_ports_to_use of this DiscoveryStartRequest.
:rtype: int
:required/optional: optional
"""
return self._max_ports_to_use
@max_ports_to_use.setter
def max_ports_to_use(self, max_ports_to_use):
"""
Sets the max_ports_to_use of this DiscoveryStartRequest.
Number of ports to use for discovery. A port is a file handler in most operating systems. Watch your ULimits. More ports the faster discovery goes. But, for example, windows only has a few thousand available ports. If file handlers are exceeded, then this process will adjust down but other processes on the server may be affected as well as client connections
:param max_ports_to_use: The max_ports_to_use of this DiscoveryStartRequest.
:type: int
"""
self._max_ports_to_use = max_ports_to_use
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 39.295652 | 844 | 0.645829 |
from pprint import pformat
from six import iteritems
class DiscoveryStartRequest(object):
def __init__(self):
self.swagger_types = {
'start_ip': 'str',
'end_ip': 'str',
'use_agents': 'bool',
'connection_timeout': 'int',
'max_ports_to_use': 'int'
}
self.attribute_map = {
'start_ip': 'startIP',
'end_ip': 'endIP',
'use_agents': 'useAgents',
'connection_timeout': 'connectionTimeout',
'max_ports_to_use': 'maxPortsToUse'
}
self._start_ip = None
self._end_ip = None
self._use_agents = None
self._connection_timeout = None
self._max_ports_to_use = None
@property
def start_ip(self):
return self._start_ip
@start_ip.setter
def start_ip(self, start_ip):
self._start_ip = start_ip
@property
def end_ip(self):
return self._end_ip
@end_ip.setter
def end_ip(self, end_ip):
self._end_ip = end_ip
@property
def use_agents(self):
return self._use_agents
@use_agents.setter
def use_agents(self, use_agents):
self._use_agents = use_agents
@property
def connection_timeout(self):
return self._connection_timeout
@connection_timeout.setter
def connection_timeout(self, connection_timeout):
self._connection_timeout = connection_timeout
@property
def max_ports_to_use(self):
return self._max_ports_to_use
@max_ports_to_use.setter
def max_ports_to_use(self, max_ports_to_use):
self._max_ports_to_use = max_ports_to_use
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
if self is None:
return None
return self.to_str()
def __eq__(self, other):
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72536e7634a6d0146e21e210682f4ef9a21937b | 1,108 | py | Python | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | from time import time
from requests import get
from qaviton_io.async_manager import AsyncManager
from tests.utils import server
def test_simple_requests():
def execute_tasks(number_of_tasks: int):
errors = {}
rs = []
def task():
try:
with server() as (host, port):
r = get(f'http://{host}:{port}')
r.raise_for_status()
rs.append(r)
except Exception as e:
name = f'{e.__traceback__}{e}'
if name in errors:
errors[name] += 1
else:
errors[name] = 1
tasks = [task for _ in range(number_of_tasks)]
manager = AsyncManager()
manager.log.clear()
t = time()
manager.run(tasks)
t = time() - t
print(f'took {round(t, 3)}s')
for e, n in errors.items():
print(f'{e} this error occurred {n} times')
assert not errors
return t
print("")
t1 = execute_tasks(1)
t2 = execute_tasks(20)
assert t2 < t1 * 2
| 26.380952 | 55 | 0.50361 | from time import time
from requests import get
from qaviton_io.async_manager import AsyncManager
from tests.utils import server
def test_simple_requests():
def execute_tasks(number_of_tasks: int):
errors = {}
rs = []
def task():
try:
with server() as (host, port):
r = get(f'http://{host}:{port}')
r.raise_for_status()
rs.append(r)
except Exception as e:
name = f'{e.__traceback__}{e}'
if name in errors:
errors[name] += 1
else:
errors[name] = 1
tasks = [task for _ in range(number_of_tasks)]
manager = AsyncManager()
manager.log.clear()
t = time()
manager.run(tasks)
t = time() - t
print(f'took {round(t, 3)}s')
for e, n in errors.items():
print(f'{e} this error occurred {n} times')
assert not errors
return t
print("")
t1 = execute_tasks(1)
t2 = execute_tasks(20)
assert t2 < t1 * 2
| true | true |
f72537a956b381dcd8b8b8636b46fb369b4e987b | 4,514 | py | Python | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | """Miscellaneous statistical functions."""
import numpy as np
import scipy.stats as ss
from scipy.optimize import Bounds, minimize
def weighted_least_squares(y, v, X, tau2=0.0, return_cov=False):
"""2-D weighted least squares.
Args:
y (NDArray): 2-d array of estimates (studies x parallel datasets)
v (NDArray): 2-d array of sampling variances
X (NDArray): Fixed effect design matrix
tau2 (float): tau^2 estimate to use for weights
return_cov (bool): Whether or not to return the inverse cov matrix
Returns:
If return_cov is True, returns both fixed parameter estimates and the
inverse covariance matrix; if False, only the parameter estimates.
"""
w = 1.0 / (v + tau2)
# Einsum indices: k = studies, p = predictors, i = parallel iterates
wX = np.einsum("kp,ki->ipk", X, w)
cov = wX.dot(X)
# numpy >= 1.8 inverts stacked matrices along the first N - 2 dims, so we
# can vectorize computation along the second dimension (parallel datasets)
precision = np.linalg.pinv(cov).T
pwX = np.einsum("ipk,qpi->iqk", wX, precision)
beta = np.einsum("ipk,ik->ip", pwX, y.T).T
return (beta, precision) if return_cov else beta
def ensure_2d(arr):
"""Ensure the passed array has 2 dimensions."""
if arr is None:
return arr
try:
arr = np.array(arr)
except:
return arr
if arr.ndim == 1:
arr = arr[:, None]
return arr
def q_profile(y, v, X, alpha=0.05):
"""Get the CI for tau^2 via the Q-Profile method (Viechtbauer, 2007).
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
alpha (float, optional): alpha value defining the coverage of the CIs,
where width(CI) = 1 - alpha. Defaults to 0.05.
Returns:
A dictionary with keys 'ci_l' and 'ci_u', corresponding to the lower
and upper bounds of the tau^2 confidence interval, respectively.
Notes:
Following the Viechtbauer implementation, this method returns the
interval that gives an equal probability mass at both tails (i.e.,
P(tau^2 <= lower_bound) == P(tau^2 >= upper_bound) == alpha/2), and
*not* the smallest possible range of tau^2 values that provides the
desired coverage.
References:
Viechtbauer, W. (2007). Confidence intervals for the amount of
heterogeneity in meta-analysis. Statistics in Medicine, 26(1), 37-52.
"""
k, p = X.shape
df = k - p
l_crit = ss.chi2.ppf(1 - alpha / 2, df)
u_crit = ss.chi2.ppf(alpha / 2, df)
args = (ensure_2d(y), ensure_2d(v), X)
bds = Bounds([0], [np.inf], keep_feasible=True)
# Use the D-L estimate of tau^2 as a starting point; when using a fixed
# value, minimize() sometimes fails to stay in bounds.
from .estimators import DerSimonianLaird
ub_start = 2 * DerSimonianLaird().fit(y, v, X).params_["tau2"]
lb = minimize(lambda x: (q_gen(*args, x) - l_crit) ** 2, [0], bounds=bds).x[0]
ub = minimize(lambda x: (q_gen(*args, x) - u_crit) ** 2, [ub_start], bounds=bds).x[0]
return {"ci_l": lb, "ci_u": ub}
def q_gen(y, v, X, tau2):
"""Generalized form of Cochran's Q-statistic.
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
tau2 (float): Between-study variance. Must be >= 0.
Returns:
A float giving the value of Cochran's Q-statistic.
References:
Veroniki, A. A., Jackson, D., Viechtbauer, W., Bender, R., Bowden, J.,
Knapp, G., Kuss, O., Higgins, J. P., Langan, D., & Salanti, G. (2016).
Methods to estimate the between-study variance and its uncertainty in
meta-analysis. Research synthesis methods, 7(1), 55–79.
https://doi.org/10.1002/jrsm.1164
"""
if np.any(tau2 < 0):
raise ValueError("Value of tau^2 must be >= 0.")
beta = weighted_least_squares(y, v, X, tau2)
w = 1.0 / (v + tau2)
return (w * (y - X.dot(beta)) ** 2).sum(0)
| 36.699187 | 89 | 0.634692 |
import numpy as np
import scipy.stats as ss
from scipy.optimize import Bounds, minimize
def weighted_least_squares(y, v, X, tau2=0.0, return_cov=False):
w = 1.0 / (v + tau2)
wX = np.einsum("kp,ki->ipk", X, w)
cov = wX.dot(X)
precision = np.linalg.pinv(cov).T
pwX = np.einsum("ipk,qpi->iqk", wX, precision)
beta = np.einsum("ipk,ik->ip", pwX, y.T).T
return (beta, precision) if return_cov else beta
def ensure_2d(arr):
if arr is None:
return arr
try:
arr = np.array(arr)
except:
return arr
if arr.ndim == 1:
arr = arr[:, None]
return arr
def q_profile(y, v, X, alpha=0.05):
k, p = X.shape
df = k - p
l_crit = ss.chi2.ppf(1 - alpha / 2, df)
u_crit = ss.chi2.ppf(alpha / 2, df)
args = (ensure_2d(y), ensure_2d(v), X)
bds = Bounds([0], [np.inf], keep_feasible=True)
from .estimators import DerSimonianLaird
ub_start = 2 * DerSimonianLaird().fit(y, v, X).params_["tau2"]
lb = minimize(lambda x: (q_gen(*args, x) - l_crit) ** 2, [0], bounds=bds).x[0]
ub = minimize(lambda x: (q_gen(*args, x) - u_crit) ** 2, [ub_start], bounds=bds).x[0]
return {"ci_l": lb, "ci_u": ub}
def q_gen(y, v, X, tau2):
if np.any(tau2 < 0):
raise ValueError("Value of tau^2 must be >= 0.")
beta = weighted_least_squares(y, v, X, tau2)
w = 1.0 / (v + tau2)
return (w * (y - X.dot(beta)) ** 2).sum(0)
| true | true |
f72538c76f49ccac9bcd5d18c35fad8c0e5bdbe6 | 691 | py | Python | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | class Solution:
def __init__(self):
self.combs = []
def _backtrack(self, candidates, cur, target, k):
if len(cur) == k and sum(cur) == target:
self.combs.append(cur[:])
return
if sum(cur) > target:
return
elif len(cur) < k:
for idx, candi in enumerate(candidates):
cur.append(candi)
self._backtrack(candidates[idx + 1:], cur, target, k)
# backtracking
cur.pop()
def combinationSum3(self, k: int, n: int) -> list:
self._backtrack(range(1, 10), [], n, k)
return self.combs | 30.043478 | 69 | 0.474674 | class Solution:
def __init__(self):
self.combs = []
def _backtrack(self, candidates, cur, target, k):
if len(cur) == k and sum(cur) == target:
self.combs.append(cur[:])
return
if sum(cur) > target:
return
elif len(cur) < k:
for idx, candi in enumerate(candidates):
cur.append(candi)
self._backtrack(candidates[idx + 1:], cur, target, k)
cur.pop()
def combinationSum3(self, k: int, n: int) -> list:
self._backtrack(range(1, 10), [], n, k)
return self.combs | true | true |
f72538f645e6c0711034952f80384b2e12169de0 | 1,211 | py | Python | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | #CADASTRO DE PESSOAS em dicionário - AULA 19 EXERCÍCIO 94
#dados das pessos: nome, sexo e idade
#todos os dicionários numa lista
#Informar quantos cadastrados, média de idade, lista de mulheres e nomes de pessoas de idade acima da média
#
pessoa = dict()
grupo = list()
somaidades = media = 0
while True:
pessoa.clear() #limnpeza do dicionário senão dá erro nos laços
pessoa["nome"] = str(input('Nome: ')).strip()
pessoa["sexo"] = str(input('Sexo: [M/F] ')).strip().upper()
pessoa["idade"] = int(input('Idade: '))
grupo.append(pessoa.copy()) #cópia do dicionário para lista
cont = str(input('Continuar? [S/N] ')).strip().lower()
somaidades += pessoa["idade"]
if cont == 'n':
break
media = somaidades/len(grupo)
print('-'*50)
print(f'A) Pessoas cadastradas: {len(grupo)}')
print(f'B) Média de idade: {media:.2f} anos')
print(f'C) Mulheres cadastradas: ', end='')
for i in range(len(grupo)):
if grupo[i]["sexo"] == 'F':
print(f'{grupo[i]["nome"]} ', end='')
print()
print(f'D) Acima da média: ', end='')
for i in range(len(grupo)):
if grupo[i]["idade"] > media:
print(f'{grupo[i]["nome"]} {grupo[i]["idade"]} anos ', end='')
print()
print('-'*50)
| 35.617647 | 107 | 0.630058 |
pessoa = dict()
grupo = list()
somaidades = media = 0
while True:
pessoa.clear()
pessoa["nome"] = str(input('Nome: ')).strip()
pessoa["sexo"] = str(input('Sexo: [M/F] ')).strip().upper()
pessoa["idade"] = int(input('Idade: '))
grupo.append(pessoa.copy())
cont = str(input('Continuar? [S/N] ')).strip().lower()
somaidades += pessoa["idade"]
if cont == 'n':
break
media = somaidades/len(grupo)
print('-'*50)
print(f'A) Pessoas cadastradas: {len(grupo)}')
print(f'B) Média de idade: {media:.2f} anos')
print(f'C) Mulheres cadastradas: ', end='')
for i in range(len(grupo)):
if grupo[i]["sexo"] == 'F':
print(f'{grupo[i]["nome"]} ', end='')
print()
print(f'D) Acima da média: ', end='')
for i in range(len(grupo)):
if grupo[i]["idade"] > media:
print(f'{grupo[i]["nome"]} {grupo[i]["idade"]} anos ', end='')
print()
print('-'*50)
| true | true |
f7253987a63d4e5b5b25935dff1c1a6614e9432f | 804 | py | Python | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | from flask import Flask, render_template, redirect
from jinja2 import Template
from splinter import browser
from flask_pymongo import PyMongo
import scrape_mars
# Create an instance of our Flask app.
app = Flask(__name__)
# Use flask_pymongo to set up mongo connection
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
mongo.db.mars_page.drop()
# Set route
@app.route("/")
def home():
mars_page = mongo.db.mars_page.find_one()
return render_template("index.html", mars_page = mars_page)
# Set route
@app.route("/scrape")
def scraper():
mars_page = mongo.db.mars_page
mars_page_data = scrape_mars.scrape()
mars_page.update({}, mars_page_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| 23.647059 | 63 | 0.732587 | from flask import Flask, render_template, redirect
from jinja2 import Template
from splinter import browser
from flask_pymongo import PyMongo
import scrape_mars
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
mongo.db.mars_page.drop()
@app.route("/")
def home():
mars_page = mongo.db.mars_page.find_one()
return render_template("index.html", mars_page = mars_page)
@app.route("/scrape")
def scraper():
mars_page = mongo.db.mars_page
mars_page_data = scrape_mars.scrape()
mars_page.update({}, mars_page_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| true | true |
f72539f2e8f0879fdd0b4a7738c5479966731504 | 14,082 | py | Python | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
__version__ = '1.40.88'
# -----------------------------------------------------------------------------
import asyncio
import concurrent.futures
import socket
import certifi
import aiohttp
import ssl
import sys
import yarl
# -----------------------------------------------------------------------------
from ccxt.async_support.base.throttle import throttle
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
# -----------------------------------------------------------------------------
from ccxt.base.exchange import Exchange as BaseExchange
# -----------------------------------------------------------------------------
__all__ = [
'BaseExchange',
'Exchange',
]
# -----------------------------------------------------------------------------
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
self.markets_loading = None
self.reloading_markets = False
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
# Create our SSL context object with our CA cert file
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
# Pass this SSL context to aiohttp and create a TCPConnector
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop, enable_cleanup_closed=True)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
await self.throttle(self.rateLimit)
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
self.print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
self.open()
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_response = http_response.strip()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
self.print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except (concurrent.futures.TimeoutError, asyncio.TimeoutError) as e:
details = ' '.join([self.id, method, url])
raise RequestTimeout(details) from e
except aiohttp.ClientConnectionError as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except aiohttp.ClientError as e: # base exception class
details = ' '.join([self.id, method, url])
raise ExchangeError(details) from e
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_http_status_code(http_status_code, http_status_text, url, method, http_response)
if json_response is not None:
return json_response
if self.is_text_response(headers):
return http_response
return response.content
async def load_markets_helper(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def load_markets(self, reload=False, params={}):
if (reload and not self.reloading_markets) or not self.markets_loading:
self.reloading_markets = True
coroutine = self.load_markets_helper(reload, params)
# coroutines can only be awaited once so we wrap it in a task
self.markets_loading = asyncio.ensure_future(coroutine)
try:
result = await self.markets_loading
except Exception as e:
self.reloading_markets = False
self.markets_loading = None
raise e
self.reloading_markets = False
return result
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcvc(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcvc(trades, timeframe, since, limit)
async def fetchOHLCVC(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
ohlcvs = await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
return [ohlcv[0:-1] for ohlcv in ohlcvs]
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported('create_order() not supported yet')
async def cancel_order(self, id, symbol=None, params={}):
raise NotSupported('cancel_order() not supported yet')
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_transactions() is not supported yet')
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposits() is not supported yet')
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_withdrawals() is not supported yet')
async def fetch_deposit_address(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposit_address() is not supported yet')
async def sleep(self, milliseconds):
return await asyncio.sleep(milliseconds / 1000)
| 42.802432 | 355 | 0.619656 |
__version__ = '1.40.88'
import asyncio
import concurrent.futures
import socket
import certifi
import aiohttp
import ssl
import sys
import yarl
from ccxt.async_support.base.throttle import throttle
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
from ccxt.base.exchange import Exchange as BaseExchange
__all__ = [
'BaseExchange',
'Exchange',
]
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
self.markets_loading = None
self.reloading_markets = False
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop, enable_cleanup_closed=True)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
if self.enableRateLimit:
await self.throttle(self.rateLimit)
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
self.print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
self.open()
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_response = http_response.strip()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
self.print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except (concurrent.futures.TimeoutError, asyncio.TimeoutError) as e:
details = ' '.join([self.id, method, url])
raise RequestTimeout(details) from e
except aiohttp.ClientConnectionError as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except aiohttp.ClientError as e:
details = ' '.join([self.id, method, url])
raise ExchangeError(details) from e
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_http_status_code(http_status_code, http_status_text, url, method, http_response)
if json_response is not None:
return json_response
if self.is_text_response(headers):
return http_response
return response.content
async def load_markets_helper(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def load_markets(self, reload=False, params={}):
if (reload and not self.reloading_markets) or not self.markets_loading:
self.reloading_markets = True
coroutine = self.load_markets_helper(reload, params)
self.markets_loading = asyncio.ensure_future(coroutine)
try:
result = await self.markets_loading
except Exception as e:
self.reloading_markets = False
self.markets_loading = None
raise e
self.reloading_markets = False
return result
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcvc(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcvc(trades, timeframe, since, limit)
async def fetchOHLCVC(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
ohlcvs = await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
return [ohlcv[0:-1] for ohlcv in ohlcvs]
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported('create_order() not supported yet')
async def cancel_order(self, id, symbol=None, params={}):
raise NotSupported('cancel_order() not supported yet')
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_transactions() is not supported yet')
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposits() is not supported yet')
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_withdrawals() is not supported yet')
async def fetch_deposit_address(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposit_address() is not supported yet')
async def sleep(self, milliseconds):
return await asyncio.sleep(milliseconds / 1000)
| true | true |
f7253a302858e90ec0abb748061c7a21bef6c41b | 844 | py | Python | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | from casbin import log
class Assertion:
key = ""
value = ""
tokens = []
policy = []
rm = None
def build_role_links(self, rm):
self.rm = rm
count = self.value.count("_")
for rule in self.policy:
if count < 2:
raise RuntimeError('the number of "_" in role definition should be at least 2')
if len(rule) < count:
raise RuntimeError("grouping policy elements do not meet role definition")
if count == 2:
self.rm.add_link(rule[0], rule[1])
elif count == 3:
self.rm.add_link(rule[0], rule[1], rule[2])
elif count == 4:
self.rm.add_link(rule[0], rule[1], rule[2], rule[3])
log.log_print("Role links for: " + self.key)
self.rm.print_roles()
| 27.225806 | 95 | 0.520142 | from casbin import log
class Assertion:
key = ""
value = ""
tokens = []
policy = []
rm = None
def build_role_links(self, rm):
self.rm = rm
count = self.value.count("_")
for rule in self.policy:
if count < 2:
raise RuntimeError('the number of "_" in role definition should be at least 2')
if len(rule) < count:
raise RuntimeError("grouping policy elements do not meet role definition")
if count == 2:
self.rm.add_link(rule[0], rule[1])
elif count == 3:
self.rm.add_link(rule[0], rule[1], rule[2])
elif count == 4:
self.rm.add_link(rule[0], rule[1], rule[2], rule[3])
log.log_print("Role links for: " + self.key)
self.rm.print_roles()
| true | true |
f7253a8356e22c76da232f80805a7d9f35a574cc | 292 | py | Python | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null |
# This file is just meant to include functions that can be called from the command line to interact with the service.
# or in other words, these functions will basically make up the service. Perhaps these will actually just end up in the controller.py file.
# Created this file for planning. | 73 | 139 | 0.791096 | true | true | |
f7253ad485c45ab64e6260d07fb70431869f4c85 | 4,179 | py | Python | third_party/rust_crates/vendor/ct-logs/build.py | zhangpf/fuchsia-rs | 903568f28ddf45f09157ead36d61b50322c9cf49 | [
"BSD-3-Clause"
] | 14 | 2020-10-25T05:48:36.000Z | 2021-09-20T02:46:20.000Z | third_party/rust_crates/vendor/ct-logs/build.py | zhangpf/fuchsia-rs | 903568f28ddf45f09157ead36d61b50322c9cf49 | [
"BSD-3-Clause"
] | 16 | 2020-09-04T19:01:11.000Z | 2021-05-28T03:23:09.000Z | third_party/rust_crates/vendor/ct-logs/build.py | ZVNexus/fuchsia | c5610ad15208208c98693618a79c705af935270c | [
"BSD-3-Clause"
] | 4 | 2020-12-28T17:04:45.000Z | 2022-03-12T03:20:44.000Z | # -*- coding: utf-8 -*-
import subprocess
import sys
import json
import hashlib
import time
import base64
from binascii import hexlify
from collections import namedtuple
HEADER = """//!
//! This library is automatically generated from Google's list of known CT
//! logs. Don't edit it.
//!
//! The generation is done deterministically so you can verify it
//! yourself by inspecting and re-running the generation process.
//!
#![forbid(unsafe_code,
unstable_features)]
#![deny(trivial_casts,
trivial_numeric_casts,
unused_import_braces,
unused_extern_crates,
unused_qualifications)]
pub static LOGS: [&sct::Log; %d] = ["""
FOOTER = """];"""
Log = namedtuple('Log', 'name url mmd operator key keyid json'.split())
LOG_LIST = 'https://www.gstatic.com/ct/log_list/log_list.json'
LOG_LIST_SIG = 'https://www.gstatic.com/ct/log_list/log_list.sig'
def fetch_and_check_sig():
for cmd in (['curl', '-o', 'log_list.sig', LOG_LIST_SIG],
['curl', '-o', 'log_list.json', LOG_LIST],
['openssl', 'dgst', '-sha256', '-verify',
'log_list_pubkey.pem', '-signature', 'log_list.sig', 'log_list.json'],
):
subprocess.check_call(cmd, stdout = subprocess.PIPE)
return json.load(open('log_list.json'))
def convert_json(json):
operators = { v['id']: v['name'] for v in json['operators'] }
for lj in json['logs']:
operator = ', '.join(operators[op] for op in lj['operated_by'])
key = base64.b64decode(lj['key'])
keyid = hashlib.sha256(key).digest()
disqualification = lj.get('disqualified_at', None)
if disqualification and time.time() > disqualification:
continue
log = Log(lj['description'],
lj['url'],
lj['maximum_merge_delay'],
operator,
key,
keyid,
lj)
yield log
def commentify(cert):
lines = cert.splitlines()
lines = [ll[2:] if ll.startswith('# ') else ll for ll in lines]
return '/*\n * ' + ('\n * '.join(lines)) + '\n */'
def convert_bytes(bb):
return ''.join('\\x{:02x}'.format(b) for b in bb)
def raw_public_key(spki):
def take_byte(b):
return b[0], b[1:]
def take_len(b):
v, b = take_byte(b)
if v & 0x80:
r = 0
for _ in range(v & 3):
x, b = take_byte(b)
r <<= 8
r |= x
return r, b
return v, b
def take_seq(b):
tag, b = take_byte(b)
ll, b = take_len(b)
assert tag == 0x30
return b[:ll], b[ll:]
def take_bitstring(b):
tag, b = take_byte(b)
ll, b = take_len(b)
bits, b = take_byte(b)
assert tag == 0x03
assert bits == 0
return b[:ll-1], b[ll-1:]
open('key.bin', 'wb').write(spki)
spki, rest = take_seq(spki)
assert len(rest) == 0
id, data = take_seq(spki)
keydata, rest = take_bitstring(data)
assert len(rest) == 0
return keydata
def print_log(log):
comment = commentify(
json.dumps(log.json,
indent = 2,
separators = (',', ': '),
sort_keys = True)
)
id_up = hexlify(log.key).upper()[:16]
description = log.name
url = log.url
operator = log.operator
key = convert_bytes(raw_public_key(log.key))
keyid_hex = ', '.join('0x{:02x}'.format(x) for x in log.keyid)
mmd = log.mmd
print(""" %(comment)s
&sct::Log {
description: "%(description)s",
url: "%(url)s",
operated_by: "%(operator)s",
key: b"%(key)s",
id: [ %(keyid_hex)s ],
max_merge_delay: %(mmd)d,
},
""" % locals())
if __name__ == '__main__':
if sys.platform == "win32":
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
data = fetch_and_check_sig()
logs = {}
for log in convert_json(data):
logs[hexlify(log.keyid)] = log
print(HEADER % len(list(logs.keys())))
for id in sorted(logs.keys()):
print_log(logs[id])
print(FOOTER)
| 26.96129 | 87 | 0.554439 |
import subprocess
import sys
import json
import hashlib
import time
import base64
from binascii import hexlify
from collections import namedtuple
HEADER = """//!
//! This library is automatically generated from Google's list of known CT
//! logs. Don't edit it.
//!
//! The generation is done deterministically so you can verify it
//! yourself by inspecting and re-running the generation process.
//!
#![forbid(unsafe_code,
unstable_features)]
#![deny(trivial_casts,
trivial_numeric_casts,
unused_import_braces,
unused_extern_crates,
unused_qualifications)]
pub static LOGS: [&sct::Log; %d] = ["""
FOOTER = """];"""
Log = namedtuple('Log', 'name url mmd operator key keyid json'.split())
LOG_LIST = 'https://www.gstatic.com/ct/log_list/log_list.json'
LOG_LIST_SIG = 'https://www.gstatic.com/ct/log_list/log_list.sig'
def fetch_and_check_sig():
for cmd in (['curl', '-o', 'log_list.sig', LOG_LIST_SIG],
['curl', '-o', 'log_list.json', LOG_LIST],
['openssl', 'dgst', '-sha256', '-verify',
'log_list_pubkey.pem', '-signature', 'log_list.sig', 'log_list.json'],
):
subprocess.check_call(cmd, stdout = subprocess.PIPE)
return json.load(open('log_list.json'))
def convert_json(json):
operators = { v['id']: v['name'] for v in json['operators'] }
for lj in json['logs']:
operator = ', '.join(operators[op] for op in lj['operated_by'])
key = base64.b64decode(lj['key'])
keyid = hashlib.sha256(key).digest()
disqualification = lj.get('disqualified_at', None)
if disqualification and time.time() > disqualification:
continue
log = Log(lj['description'],
lj['url'],
lj['maximum_merge_delay'],
operator,
key,
keyid,
lj)
yield log
def commentify(cert):
lines = cert.splitlines()
lines = [ll[2:] if ll.startswith('# ') else ll for ll in lines]
return '/*\n * ' + ('\n * '.join(lines)) + '\n */'
def convert_bytes(bb):
return ''.join('\\x{:02x}'.format(b) for b in bb)
def raw_public_key(spki):
def take_byte(b):
return b[0], b[1:]
def take_len(b):
v, b = take_byte(b)
if v & 0x80:
r = 0
for _ in range(v & 3):
x, b = take_byte(b)
r <<= 8
r |= x
return r, b
return v, b
def take_seq(b):
tag, b = take_byte(b)
ll, b = take_len(b)
assert tag == 0x30
return b[:ll], b[ll:]
def take_bitstring(b):
tag, b = take_byte(b)
ll, b = take_len(b)
bits, b = take_byte(b)
assert tag == 0x03
assert bits == 0
return b[:ll-1], b[ll-1:]
open('key.bin', 'wb').write(spki)
spki, rest = take_seq(spki)
assert len(rest) == 0
id, data = take_seq(spki)
keydata, rest = take_bitstring(data)
assert len(rest) == 0
return keydata
def print_log(log):
comment = commentify(
json.dumps(log.json,
indent = 2,
separators = (',', ': '),
sort_keys = True)
)
id_up = hexlify(log.key).upper()[:16]
description = log.name
url = log.url
operator = log.operator
key = convert_bytes(raw_public_key(log.key))
keyid_hex = ', '.join('0x{:02x}'.format(x) for x in log.keyid)
mmd = log.mmd
print(""" %(comment)s
&sct::Log {
description: "%(description)s",
url: "%(url)s",
operated_by: "%(operator)s",
key: b"%(key)s",
id: [ %(keyid_hex)s ],
max_merge_delay: %(mmd)d,
},
""" % locals())
if __name__ == '__main__':
if sys.platform == "win32":
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
data = fetch_and_check_sig()
logs = {}
for log in convert_json(data):
logs[hexlify(log.keyid)] = log
print(HEADER % len(list(logs.keys())))
for id in sorted(logs.keys()):
print_log(logs[id])
print(FOOTER)
| true | true |
f7253effa72e1fa6a01a0b839772bc4261a78df2 | 5,904 | py | Python | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | from model.formfiller import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def add_new_contact(self, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
# add mew contact
wd.find_element_by_xpath("//div[@id='nav']//a[.='nowy wpis']").click()
self.fill_contact_form(contact)
# accept
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.contact_cache = None
def delete_first_contact(self):
wd = self.app.wd
self.delete_contact_by_index(0)
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def delete_contact_by_index(self, index):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
self.select_contact_by_index(index)
#submit deletion
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def modify_first_contact(self):
wd = self.app.wd
self.modify_contact_by_index(0)
def modify_contact_by_index(self, index, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")
cells[7].click()
self.fill_contact_form(contact)
#accept changes
wd.find_element_by_xpath("//div[@id='content']/form[1]/input[22]").click()
self.contact_cache = None
def count(self):
wd = self.app.wd
self.app.open_home_page()
return len(wd.find_elements_by_name("selected[]"))
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.firstname)
self.change_field_value("lastname", contact.lastname)
self.change_field_value("homephone", contact.homephone)
self.change_field_value("mobilephone", contact.mobilephone)
self.change_field_value("workphone", contact.workphone)
self.change_field_value("secondaryphone", contact.secondaryphone)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.app.open_home_page()
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1].text
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
all_phones = cells[5].text
adress = cells[3].text
all_emails = cells[4].text
self.contact_cache.append(Contact(firstname=firstname, lastname=lastname,
id=id, all_phones_from_home_page=all_phones, all_emails_from_home_page=all_emails, adress=adress))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[7]
cells.find_element_by_tag_name("a").click()
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[6]
cells.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
adress=wd.find_element_by_name("address").get_attribute("value")
return Contact(firstname=firstname, lastname=lastname, id=id,
homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone,
email=email, email2=email2, email3=email3, adress=adress)
def get_contact_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contact(homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone)
| 39.624161 | 148 | 0.646172 | from model.formfiller import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def add_new_contact(self, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
wd.find_element_by_xpath("//div[@id='nav']//a[.='nowy wpis']").click()
self.fill_contact_form(contact)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.contact_cache = None
def delete_first_contact(self):
wd = self.app.wd
self.delete_contact_by_index(0)
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def delete_contact_by_index(self, index):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
self.select_contact_by_index(index)
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def modify_first_contact(self):
wd = self.app.wd
self.modify_contact_by_index(0)
def modify_contact_by_index(self, index, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")
cells[7].click()
self.fill_contact_form(contact)
wd.find_element_by_xpath("//div[@id='content']/form[1]/input[22]").click()
self.contact_cache = None
def count(self):
wd = self.app.wd
self.app.open_home_page()
return len(wd.find_elements_by_name("selected[]"))
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.firstname)
self.change_field_value("lastname", contact.lastname)
self.change_field_value("homephone", contact.homephone)
self.change_field_value("mobilephone", contact.mobilephone)
self.change_field_value("workphone", contact.workphone)
self.change_field_value("secondaryphone", contact.secondaryphone)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.app.open_home_page()
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1].text
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
all_phones = cells[5].text
adress = cells[3].text
all_emails = cells[4].text
self.contact_cache.append(Contact(firstname=firstname, lastname=lastname,
id=id, all_phones_from_home_page=all_phones, all_emails_from_home_page=all_emails, adress=adress))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[7]
cells.find_element_by_tag_name("a").click()
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[6]
cells.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
adress=wd.find_element_by_name("address").get_attribute("value")
return Contact(firstname=firstname, lastname=lastname, id=id,
homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone,
email=email, email2=email2, email3=email3, adress=adress)
def get_contact_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contact(homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone)
| true | true |
f7253f2da62ae21d3ff22ab8085cb80e2d6cdb84 | 2,260 | py | Python | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from mininet.net import Mininet
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.node import UserSwitch,RemoteController
from mininet.term import makeTerm
import os, time
class MyTopo( Topo ):
"Simple topology example."
def __init__( self):
"Create custom topo."
# Add default members to class.
Topo.__init__(self)
# Add nodes
Host1=self.addHost('h1', ip='10.0.0.1/24')
Host2=self.addHost('h2', ip='10.0.0.2/24')
switch1=self.addSwitch('s1')
switch2=self.addSwitch('s2')
switch3=self.addSwitch('s3')
switch4=self.addSwitch('s4')
switch5=self.addSwitch('s5')
# Add edges
self.addLink( Host1, switch1, 1, 1)
self.addLink( switch1, switch2, 2, 1)
self.addLink( switch1, switch3, 3, 1)
self.addLink( switch1, switch4, 4, 1)
self.addLink( switch2, switch5, 2, 1)
self.addLink( switch3, switch5, 2, 2)
self.addLink( switch4, switch5, 2, 3)
self.addLink( switch5, Host2, 4, 1)
######Starting controller
os.system("xterm -e 'ryu-manager ~/ryu/ryu/app/openstate/playground/forwarding_consistency_many_to_1_ctrl.py'&")
######Starting mininet
topos = { 'mytopo': ( lambda: MyTopo() ) }
mytopo=MyTopo()
time.sleep(1)
print("\n********************************** HELP *********************************************")
print("Type \"python ~/ryu/ryu/app/openstate/echo_server.py 200\" in h2's xterm")
print("Type \"nc 10.0.0.2 200\" in h1's xterm")
print("Watching the tcpdump results, it is possible to see that forwarding consistency is guaranteed\n"
"In order to test new path selection, close and reopen netcat")
print("\nTo exit type \"ctrl+D\" or exit")
print("*************************************************************************************")
net = Mininet(topo=mytopo,switch=UserSwitch,controller=RemoteController,cleanup=True,autoSetMacs=True,autoStaticArp=True,listenPort=6634)
net.start()
os.system("xterm -e 'tcpdump -i s2-eth1'&")
os.system("xterm -e 'tcpdump -i s3-eth1'&")
os.system("xterm -e 'tcpdump -i s4-eth1'&")
h1,h2 = net.hosts[0], net.hosts[1]
makeTerm(h1)
makeTerm(h2)
CLI(net)
net.stop()
os.system("sudo mn -c")
os.system("kill -9 $(pidof -x ryu-manager)")
| 32.753623 | 137 | 0.626991 |
from mininet.net import Mininet
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.node import UserSwitch,RemoteController
from mininet.term import makeTerm
import os, time
class MyTopo( Topo ):
def __init__( self):
Topo.__init__(self)
Host1=self.addHost('h1', ip='10.0.0.1/24')
Host2=self.addHost('h2', ip='10.0.0.2/24')
switch1=self.addSwitch('s1')
switch2=self.addSwitch('s2')
switch3=self.addSwitch('s3')
switch4=self.addSwitch('s4')
switch5=self.addSwitch('s5')
self.addLink( Host1, switch1, 1, 1)
self.addLink( switch1, switch2, 2, 1)
self.addLink( switch1, switch3, 3, 1)
self.addLink( switch1, switch4, 4, 1)
self.addLink( switch2, switch5, 2, 1)
self.addLink( switch3, switch5, 2, 2)
self.addLink( switch4, switch5, 2, 3)
self.addLink( switch5, Host2, 4, 1)
y'&")
********************** HELP *********************************************")
print("Type \"python ~/ryu/ryu/app/openstate/echo_server.py 200\" in h2's xterm")
print("Type \"nc 10.0.0.2 200\" in h1's xterm")
print("Watching the tcpdump results, it is possible to see that forwarding consistency is guaranteed\n"
"In order to test new path selection, close and reopen netcat")
print("\nTo exit type \"ctrl+D\" or exit")
print("*************************************************************************************")
net = Mininet(topo=mytopo,switch=UserSwitch,controller=RemoteController,cleanup=True,autoSetMacs=True,autoStaticArp=True,listenPort=6634)
net.start()
os.system("xterm -e 'tcpdump -i s2-eth1'&")
os.system("xterm -e 'tcpdump -i s3-eth1'&")
os.system("xterm -e 'tcpdump -i s4-eth1'&")
h1,h2 = net.hosts[0], net.hosts[1]
makeTerm(h1)
makeTerm(h2)
CLI(net)
net.stop()
os.system("sudo mn -c")
os.system("kill -9 $(pidof -x ryu-manager)")
| true | true |
f7254068f9b6b41261cccdb04ae15cf40d62dba9 | 8,954 | py | Python | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | 1 | 2021-03-03T22:11:29.000Z | 2021-03-03T22:11:29.000Z | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | null | null | null | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | null | null | null | import numpy as np
import sys
import os
import math
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppress TF info
import tensorflow as tf
#import matplotlib.pyplot as plt
# Define constants
stride = 15 #1 second @ 15 Hz sampling
window = 30*15 #30 seconds window considered
folder = sys.argv[1]
if not os.path.exists(folder):
print("Unable to open folder containing data, check that folder exists \n")
exit(0)
total_files = 488
total_sum = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
#print(full_path)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
total_sum = total_sum + N
M = len(d[0])
measurements = int((M-window)/6)
dataset = np.zeros((total_sum,measurements,6))
vectors = np.zeros((total_sum,window),dtype=np.uint8)
windows_in_recording = np.zeros((total_files), dtype=np.uint32)
total_windows = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
# Need to recalculate the number of windows each time
N = len(d)
labels = np.zeros(shape = (N,window), dtype=np.uint8) # np.uint8 -> each sample is labeled from 0 to 5
data = np.zeros(shape = (N,measurements,6))
data_max = np.zeros((6)) # Create placeholders
data_min = np.zeros((6))
temp_3 = np.zeros((6))
temp_4 = np.zeros((6))
for j in range(N):
temp = d[j]
temp_1 = temp[0:window]
temp_2 = temp[window:M]
labels[j,:] = temp_1
for k in range(measurements): # Read data
for l in range(6):
data[j,k,l] = temp_2[(6*k) + l]
for j in range(N):
if(j == 1):
data_max = np.amax(data[j,:,:], axis=0)
data_min = np.amin(data[j,:,:], axis=0)
else:
temp_3 = np.amax(data[j,:,:], axis=0)
temp_4 = np.amin(data[j,:,:], axis=0)
for k in range(6):
if(temp_3[k] >= data_max[k]):
data_max[k] = temp_3[k]
if(temp_4[k] <= data_min[k]):
data_min[k] = temp_4[k]
# Normalize each recording (meal)
for j in range(N):
for k in range(measurements):
data[j,k,:] = data[j,k,:] - data_min # Vector subtraction
data[j,k,:] = data[j,k,:]/(data_max - data_min) # Element-wise division
dataset[total_windows:total_windows + N, :, :] = data
vectors[total_windows:total_windows + N,:] = labels
total_windows = total_windows + N
windows_in_recording[i-1] = total_windows #Calculates all windows till this meal -> That is what we want!
# Clear variables from memory
del data, labels, d, temp_1, temp_2, temp_3, temp_4
# Print out to verify
#f = open('segments_data.txt','w')
#for j in range(measurements):
# for k in range(6):
# f.write("%f " % (dataset[0,j,k]))
# f.write("\n") # --> correct way of newline in Python!
#f.close()
#f = open('segments_labels.txt','w')
#for j in range(total_windows):
# for k in range(window):
# f.write("%u " % (vectors[j,k]))
# f.write("\n")
#f.close()
# Cross-validation starts here, split data into five parts, use validation_split (keras) for simplicity
part_1 = windows_in_recording[math.floor((0.2*total_files)) -1]
part_2 = windows_in_recording[math.floor((0.4*total_files)) -1]
part_3 = windows_in_recording[math.floor((0.6*total_files)) -1]
part_4 = windows_in_recording[math.floor((0.8*total_files)) -1]
for iter in range(5):
if(iter == 0):
tst_data = dataset[0:part_1,:,:]
trn_data = dataset[part_1:total_windows,:,:]
tst_vcts = vectors[0:part_1,:]
trn_vcts = vectors[part_1:total_windows,:]
elif(iter == 1):
tst_data = dataset[part_1:part_2,:,:]
temp_1 = dataset[0:part_1,:,:]
temp_2 = dataset[part_2:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_1:part_2,:]
temp_3 = vectors[0:part_1,:]
temp_4 = vectors[part_2:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 2):
tst_data = dataset[part_2:part_3,:,:]
temp_1 = dataset[0:part_2,:,:]
temp_2 = dataset[part_3:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_2:part_3,:]
temp_3 = vectors[0:part_2,:]
temp_4 = vectors[part_3:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 3):
tst_data = dataset[part_3:part_4,:,:]
temp_1 = dataset[0:part_3,:,:]
temp_2 = dataset[part_4:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_3:part_4,:]
temp_3 = vectors[0:part_3,:]
temp_4 = vectors[part_4:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 4):
tst_data = dataset[part_4:total_windows,:,:]
trn_data = dataset[0:part_4,:,:]
tst_vcts = vectors[part_4:total_windows,:]
trn_vcts = vectors[0:part_4,:]
# Reshape labels -> needed for keras compatibility
trn_size = trn_data.shape[0]
trn_vcts = np.reshape(trn_vcts, newshape=(trn_size, 1, window)) # Each vector is of size 1 x training_window => 1 x N image of labels
# Neural network training starts here
print("Creating model", iter, "here")
inputs = tf.keras.layers.Input(shape=(measurements, 6))
reshape = tf.keras.layers.Reshape((1, measurements, 6))(inputs) # Data is a 1 x 450 'image' of 6 channels
# Downstream --> Encoder
conv_1 = tf.keras.layers.Conv2D(filters=8, kernel_size=(1,15), strides=1, padding='same', activation='linear')(reshape)
bn_1 = tf.keras.layers.BatchNormalization(axis=3)(conv_1)
act_1 = tf.keras.layers.ReLU()(bn_1)
pool_1 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_1)
conv_2 = tf.keras.layers.Conv2D(filters=16, kernel_size=(1,7), strides=1, padding='same', activation='linear')(pool_1)
bn_2 = tf.keras.layers.BatchNormalization(axis=3)(conv_2)
act_2 = tf.keras.layers.ReLU()(bn_2)
pool_2 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_2)
conv_3 = tf.keras.layers.Conv2D(filters=32, kernel_size=(1,5), strides=1, padding='same', activation='linear')(pool_2)
bn_3 = tf.keras.layers.BatchNormalization(axis=3)(conv_3)
act_3 = tf.keras.layers.ReLU()(bn_3)
pool_3 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_3)
# Upstream --> Decoder
up_conv1 = tf.keras.layers.Conv2DTranspose(filters=32, kernel_size=(1,5),padding='same',strides=(1,2),activation='linear')(pool_3)
bn_4 = tf.keras.layers.BatchNormalization(axis=3)(up_conv1)
act_4 = tf.keras.layers.ReLU()(bn_4)
concat = tf.keras.layers.Concatenate()
cc_1 = concat([act_4, pool_2])
up_conv2 = tf.keras.layers.Conv2DTranspose(filters=16, kernel_size=(1,7),padding='same',strides=(1,2),activation='linear')(cc_1)
bn_5 = tf.keras.layers.BatchNormalization(axis=3)(up_conv2)
act_5 = tf.keras.layers.ReLU()(bn_5)
pad_1 = tf.keras.layers.ZeroPadding2D(padding=((0,0),(0,1)))(act_5)
cc_2 = concat([pad_1, pool_1])
# Final Layer
pen_ult = tf.keras.layers.Conv2DTranspose(filters=6,kernel_size=(1,3),strides=(1,2),activation='softmax')(cc_2)
outputs = tf.keras.layers.Cropping2D(cropping=((0,0),(0,1)))(pen_ult)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer = 'adam', loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True'), metrics=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True')])
if(iter == 0):
model.summary()
# Store training sequence to .txt file
training_log = 'crossval_fold_' + str(iter) + '.txt'
csv_logger = tf.keras.callbacks.CSVLogger(training_log, append = True, separator=' ')
print("Training for fold", iter)
metrics = model.fit(trn_data, trn_vcts, epochs=200, validation_split= 0.2, verbose=2, callbacks=[csv_logger])
print("Saving model for fold", iter)
model_ID = 'crossval_modelID_' + str(iter) + '.h5'
tf.keras.models.save_model(model,model_ID)
#del model -> Most likely not needed....
##print("Predict")
##op = model.predict(dataset[0:10,:,:])
##print(op.shape)
##temp = op[0,:,:,:]
##temp = np.reshape(temp,(window, 6))
##for i in range(window):
## print(temp[i,:], np.argmax(temp[i,:]))
| 39.619469 | 189 | 0.615814 | import numpy as np
import sys
import os
import math
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
stride = 15
window = 30*15
folder = sys.argv[1]
if not os.path.exists(folder):
print("Unable to open folder containing data, check that folder exists \n")
exit(0)
total_files = 488
total_sum = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
total_sum = total_sum + N
M = len(d[0])
measurements = int((M-window)/6)
dataset = np.zeros((total_sum,measurements,6))
vectors = np.zeros((total_sum,window),dtype=np.uint8)
windows_in_recording = np.zeros((total_files), dtype=np.uint32)
total_windows = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
labels = np.zeros(shape = (N,window), dtype=np.uint8)
data = np.zeros(shape = (N,measurements,6))
data_max = np.zeros((6))
data_min = np.zeros((6))
temp_3 = np.zeros((6))
temp_4 = np.zeros((6))
for j in range(N):
temp = d[j]
temp_1 = temp[0:window]
temp_2 = temp[window:M]
labels[j,:] = temp_1
for k in range(measurements):
for l in range(6):
data[j,k,l] = temp_2[(6*k) + l]
for j in range(N):
if(j == 1):
data_max = np.amax(data[j,:,:], axis=0)
data_min = np.amin(data[j,:,:], axis=0)
else:
temp_3 = np.amax(data[j,:,:], axis=0)
temp_4 = np.amin(data[j,:,:], axis=0)
for k in range(6):
if(temp_3[k] >= data_max[k]):
data_max[k] = temp_3[k]
if(temp_4[k] <= data_min[k]):
data_min[k] = temp_4[k]
for j in range(N):
for k in range(measurements):
data[j,k,:] = data[j,k,:] - data_min
data[j,k,:] = data[j,k,:]/(data_max - data_min)
dataset[total_windows:total_windows + N, :, :] = data
vectors[total_windows:total_windows + N,:] = labels
total_windows = total_windows + N
windows_in_recording[i-1] = total_windows
del data, labels, d, temp_1, temp_2, temp_3, temp_4
ng[math.floor((0.2*total_files)) -1]
part_2 = windows_in_recording[math.floor((0.4*total_files)) -1]
part_3 = windows_in_recording[math.floor((0.6*total_files)) -1]
part_4 = windows_in_recording[math.floor((0.8*total_files)) -1]
for iter in range(5):
if(iter == 0):
tst_data = dataset[0:part_1,:,:]
trn_data = dataset[part_1:total_windows,:,:]
tst_vcts = vectors[0:part_1,:]
trn_vcts = vectors[part_1:total_windows,:]
elif(iter == 1):
tst_data = dataset[part_1:part_2,:,:]
temp_1 = dataset[0:part_1,:,:]
temp_2 = dataset[part_2:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_1:part_2,:]
temp_3 = vectors[0:part_1,:]
temp_4 = vectors[part_2:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 2):
tst_data = dataset[part_2:part_3,:,:]
temp_1 = dataset[0:part_2,:,:]
temp_2 = dataset[part_3:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_2:part_3,:]
temp_3 = vectors[0:part_2,:]
temp_4 = vectors[part_3:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 3):
tst_data = dataset[part_3:part_4,:,:]
temp_1 = dataset[0:part_3,:,:]
temp_2 = dataset[part_4:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_3:part_4,:]
temp_3 = vectors[0:part_3,:]
temp_4 = vectors[part_4:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 4):
tst_data = dataset[part_4:total_windows,:,:]
trn_data = dataset[0:part_4,:,:]
tst_vcts = vectors[part_4:total_windows,:]
trn_vcts = vectors[0:part_4,:]
trn_size = trn_data.shape[0]
trn_vcts = np.reshape(trn_vcts, newshape=(trn_size, 1, window))
print("Creating model", iter, "here")
inputs = tf.keras.layers.Input(shape=(measurements, 6))
reshape = tf.keras.layers.Reshape((1, measurements, 6))(inputs)
conv_1 = tf.keras.layers.Conv2D(filters=8, kernel_size=(1,15), strides=1, padding='same', activation='linear')(reshape)
bn_1 = tf.keras.layers.BatchNormalization(axis=3)(conv_1)
act_1 = tf.keras.layers.ReLU()(bn_1)
pool_1 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_1)
conv_2 = tf.keras.layers.Conv2D(filters=16, kernel_size=(1,7), strides=1, padding='same', activation='linear')(pool_1)
bn_2 = tf.keras.layers.BatchNormalization(axis=3)(conv_2)
act_2 = tf.keras.layers.ReLU()(bn_2)
pool_2 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_2)
conv_3 = tf.keras.layers.Conv2D(filters=32, kernel_size=(1,5), strides=1, padding='same', activation='linear')(pool_2)
bn_3 = tf.keras.layers.BatchNormalization(axis=3)(conv_3)
act_3 = tf.keras.layers.ReLU()(bn_3)
pool_3 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_3)
up_conv1 = tf.keras.layers.Conv2DTranspose(filters=32, kernel_size=(1,5),padding='same',strides=(1,2),activation='linear')(pool_3)
bn_4 = tf.keras.layers.BatchNormalization(axis=3)(up_conv1)
act_4 = tf.keras.layers.ReLU()(bn_4)
concat = tf.keras.layers.Concatenate()
cc_1 = concat([act_4, pool_2])
up_conv2 = tf.keras.layers.Conv2DTranspose(filters=16, kernel_size=(1,7),padding='same',strides=(1,2),activation='linear')(cc_1)
bn_5 = tf.keras.layers.BatchNormalization(axis=3)(up_conv2)
act_5 = tf.keras.layers.ReLU()(bn_5)
pad_1 = tf.keras.layers.ZeroPadding2D(padding=((0,0),(0,1)))(act_5)
cc_2 = concat([pad_1, pool_1])
pen_ult = tf.keras.layers.Conv2DTranspose(filters=6,kernel_size=(1,3),strides=(1,2),activation='softmax')(cc_2)
outputs = tf.keras.layers.Cropping2D(cropping=((0,0),(0,1)))(pen_ult)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer = 'adam', loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True'), metrics=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True')])
if(iter == 0):
model.summary()
training_log = 'crossval_fold_' + str(iter) + '.txt'
csv_logger = tf.keras.callbacks.CSVLogger(training_log, append = True, separator=' ')
print("Training for fold", iter)
metrics = model.fit(trn_data, trn_vcts, epochs=200, validation_split= 0.2, verbose=2, callbacks=[csv_logger])
print("Saving model for fold", iter)
model_ID = 'crossval_modelID_' + str(iter) + '.h5'
tf.keras.models.save_model(model,model_ID)
| true | true |
f725440da7378c0c7c83b48d5b2433930ec3d062 | 5,063 | py | Python | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass, field
from decimal import Decimal
from typing import Dict, List, Optional, Union
from bindings.csw.anim_mode_attrs_calc_mode import AnimModeAttrsCalcMode
from bindings.csw.animate_color_prototype import AnimateColorPrototype
from bindings.csw.fill_default_type import FillDefaultType
from bindings.csw.fill_timing_attrs_type import FillTimingAttrsType
from bindings.csw.lang_value import LangValue
from bindings.csw.restart_default_type import RestartDefaultType
from bindings.csw.restart_timing_type import RestartTimingType
from bindings.csw.sync_behavior_default_type import SyncBehaviorDefaultType
from bindings.csw.sync_behavior_type import SyncBehaviorType
__NAMESPACE__ = "http://www.w3.org/2001/SMIL20/Language"
@dataclass
class AnimateColorType(AnimateColorPrototype):
class Meta:
name = "animateColorType"
other_element: List[object] = field(
default_factory=list,
metadata={
"type": "Wildcard",
"namespace": "##other",
},
)
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
class_value: Optional[str] = field(
default=None,
metadata={
"name": "class",
"type": "Attribute",
},
)
lang: Optional[Union[str, LangValue]] = field(
default=None,
metadata={
"type": "Attribute",
"namespace": "http://www.w3.org/XML/1998/namespace",
},
)
alt: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
longdesc: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
begin: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
end: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
dur: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
repeat_dur: Optional[str] = field(
default=None,
metadata={
"name": "repeatDur",
"type": "Attribute",
},
)
repeat_count: Optional[Decimal] = field(
default=None,
metadata={
"name": "repeatCount",
"type": "Attribute",
"min_inclusive": Decimal("0.0"),
},
)
repeat: Optional[int] = field(
default=None,
metadata={
"type": "Attribute",
},
)
min: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
max: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
sync_behavior: SyncBehaviorType = field(
default=SyncBehaviorType.DEFAULT,
metadata={
"name": "syncBehavior",
"type": "Attribute",
},
)
sync_tolerance: Optional[str] = field(
default=None,
metadata={
"name": "syncTolerance",
"type": "Attribute",
},
)
sync_behavior_default: SyncBehaviorDefaultType = field(
default=SyncBehaviorDefaultType.INHERIT,
metadata={
"name": "syncBehaviorDefault",
"type": "Attribute",
},
)
sync_tolerance_default: str = field(
default="inherit",
metadata={
"name": "syncToleranceDefault",
"type": "Attribute",
},
)
restart: RestartTimingType = field(
default=RestartTimingType.DEFAULT,
metadata={
"type": "Attribute",
},
)
restart_default: RestartDefaultType = field(
default=RestartDefaultType.INHERIT,
metadata={
"name": "restartDefault",
"type": "Attribute",
},
)
fill: FillTimingAttrsType = field(
default=FillTimingAttrsType.DEFAULT,
metadata={
"type": "Attribute",
},
)
fill_default: FillDefaultType = field(
default=FillDefaultType.INHERIT,
metadata={
"name": "fillDefault",
"type": "Attribute",
},
)
target_element: Optional[str] = field(
default=None,
metadata={
"name": "targetElement",
"type": "Attribute",
},
)
calc_mode: AnimModeAttrsCalcMode = field(
default=AnimModeAttrsCalcMode.LINEAR,
metadata={
"name": "calcMode",
"type": "Attribute",
},
)
skip_content: bool = field(
default=True,
metadata={
"name": "skip-content",
"type": "Attribute",
},
)
any_attributes: Dict[str, str] = field(
default_factory=dict,
metadata={
"type": "Attributes",
"namespace": "##any",
},
)
| 26.097938 | 75 | 0.540786 | from dataclasses import dataclass, field
from decimal import Decimal
from typing import Dict, List, Optional, Union
from bindings.csw.anim_mode_attrs_calc_mode import AnimModeAttrsCalcMode
from bindings.csw.animate_color_prototype import AnimateColorPrototype
from bindings.csw.fill_default_type import FillDefaultType
from bindings.csw.fill_timing_attrs_type import FillTimingAttrsType
from bindings.csw.lang_value import LangValue
from bindings.csw.restart_default_type import RestartDefaultType
from bindings.csw.restart_timing_type import RestartTimingType
from bindings.csw.sync_behavior_default_type import SyncBehaviorDefaultType
from bindings.csw.sync_behavior_type import SyncBehaviorType
__NAMESPACE__ = "http://www.w3.org/2001/SMIL20/Language"
@dataclass
class AnimateColorType(AnimateColorPrototype):
class Meta:
name = "animateColorType"
other_element: List[object] = field(
default_factory=list,
metadata={
"type": "Wildcard",
"namespace": "##other",
},
)
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
class_value: Optional[str] = field(
default=None,
metadata={
"name": "class",
"type": "Attribute",
},
)
lang: Optional[Union[str, LangValue]] = field(
default=None,
metadata={
"type": "Attribute",
"namespace": "http://www.w3.org/XML/1998/namespace",
},
)
alt: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
longdesc: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
begin: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
end: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
dur: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
repeat_dur: Optional[str] = field(
default=None,
metadata={
"name": "repeatDur",
"type": "Attribute",
},
)
repeat_count: Optional[Decimal] = field(
default=None,
metadata={
"name": "repeatCount",
"type": "Attribute",
"min_inclusive": Decimal("0.0"),
},
)
repeat: Optional[int] = field(
default=None,
metadata={
"type": "Attribute",
},
)
min: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
max: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
sync_behavior: SyncBehaviorType = field(
default=SyncBehaviorType.DEFAULT,
metadata={
"name": "syncBehavior",
"type": "Attribute",
},
)
sync_tolerance: Optional[str] = field(
default=None,
metadata={
"name": "syncTolerance",
"type": "Attribute",
},
)
sync_behavior_default: SyncBehaviorDefaultType = field(
default=SyncBehaviorDefaultType.INHERIT,
metadata={
"name": "syncBehaviorDefault",
"type": "Attribute",
},
)
sync_tolerance_default: str = field(
default="inherit",
metadata={
"name": "syncToleranceDefault",
"type": "Attribute",
},
)
restart: RestartTimingType = field(
default=RestartTimingType.DEFAULT,
metadata={
"type": "Attribute",
},
)
restart_default: RestartDefaultType = field(
default=RestartDefaultType.INHERIT,
metadata={
"name": "restartDefault",
"type": "Attribute",
},
)
fill: FillTimingAttrsType = field(
default=FillTimingAttrsType.DEFAULT,
metadata={
"type": "Attribute",
},
)
fill_default: FillDefaultType = field(
default=FillDefaultType.INHERIT,
metadata={
"name": "fillDefault",
"type": "Attribute",
},
)
target_element: Optional[str] = field(
default=None,
metadata={
"name": "targetElement",
"type": "Attribute",
},
)
calc_mode: AnimModeAttrsCalcMode = field(
default=AnimModeAttrsCalcMode.LINEAR,
metadata={
"name": "calcMode",
"type": "Attribute",
},
)
skip_content: bool = field(
default=True,
metadata={
"name": "skip-content",
"type": "Attribute",
},
)
any_attributes: Dict[str, str] = field(
default_factory=dict,
metadata={
"type": "Attributes",
"namespace": "##any",
},
)
| true | true |
f7254419547fb3e85242fc0a78ab9478810397a4 | 4,518 | py | Python | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 9 | 2018-04-01T23:21:15.000Z | 2018-08-10T20:59:16.000Z | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 1 | 2019-06-16T00:58:29.000Z | 2019-06-23T23:46:33.000Z | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 5 | 2018-03-27T09:26:45.000Z | 2019-10-23T00:15:00.000Z | #!/usr/bin/env python
# Copyright (c) 2012-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 23
SCRIPT_ADDRESS = 82
PUBKEY_ADDRESS_TEST = 113
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 158
PRIVKEY_TEST = 241
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| 35.023256 | 97 | 0.615095 |
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
PUBKEY_ADDRESS = 23
SCRIPT_ADDRESS = 82
PUBKEY_ADDRESS_TEST = 113
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 158
PRIVKEY_TEST = 241
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
templates = [
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
return random.random() < p
def gen_invalid_vectors():
yield "",
yield "x",
while True:
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1:
if randbool():
val += random.choice(b58chars)
else:
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| true | true |
f7254560c04c87549cd65488408ce3ddfcd4bf5f | 104,573 | py | Python | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
from __future__ import absolute_import, unicode_literals
import collections
import contextlib
import copy
import datetime
import errno
import fileinput
import io
import itertools
import json
import locale
import operator
import os
import platform
import re
import shutil
import subprocess
import socket
import sys
import time
import tokenize
import traceback
import random
from .compat import (
compat_basestring,
compat_cookiejar,
compat_expanduser,
compat_get_terminal_size,
compat_http_client,
compat_kwargs,
compat_numeric_types,
compat_os_name,
compat_str,
compat_tokenize_tokenize,
compat_urllib_error,
compat_urllib_request,
compat_urllib_request_DataHandler,
)
from .utils import (
age_restricted,
args_to_str,
ContentTooShortError,
date_from_str,
DateRange,
DEFAULT_OUTTMPL,
determine_ext,
determine_protocol,
DownloadError,
encode_compat_str,
encodeFilename,
error_to_compat_str,
ExtractorError,
format_bytes,
formatSeconds,
GeoRestrictedError,
ISO3166Utils,
locked_file,
make_HTTPS_handler,
MaxDownloadsReached,
PagedList,
parse_filesize,
PerRequestProxyHandler,
platform_name,
PostProcessingError,
preferredencoding,
prepend_extension,
register_socks_protocols,
render_table,
replace_extension,
SameFileError,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
std_headers,
subtitles_filename,
UnavailableVideoError,
url_basename,
version_tuple,
write_json_file,
write_string,
YoutubeDLCookieProcessor,
YoutubeDLHandler,
)
from .cache import Cache
from .extractor import get_info_extractor, gen_extractor_classes, _LAZY_LOADER
from .downloader import get_suitable_downloader
from .downloader.rtmp import rtmpdump_version
from .postprocessor import (
FFmpegFixupM3u8PP,
FFmpegFixupM4aPP,
FFmpegFixupStretchedPP,
FFmpegMergerPP,
FFmpegPostProcessor,
get_postprocessor,
)
from .version import __version__
if compat_os_name == 'nt':
import ctypes
class YoutubeDL(object):
"""YoutubeDL class.
YoutubeDL objects are the ones responsible of downloading the
actual video file and writing it to disk if the user has requested
it, among some other tasks. In most cases there should be one per
program. As, given a video URL, the downloader doesn't know how to
extract all the needed information, task that InfoExtractors do, it
has to pass the URL to one of them.
For this, YoutubeDL objects have a method that allows
InfoExtractors to be registered in a given order. When it is passed
a URL, the YoutubeDL object handles it to the first InfoExtractor it
finds that reports being able to handle it. The InfoExtractor extracts
all the information about the video or videos the URL refers to, and
YoutubeDL process the extracted information, possibly using a File
Downloader to download the video.
YoutubeDL objects accept a lot of parameters. In order not to saturate
the object constructor with arguments, it receives a dictionary of
options instead. These options are available through the params
attribute for the InfoExtractors to use. The YoutubeDL also
registers itself as the downloader in charge for the InfoExtractors
that are added to it, so this is a "mutual registration".
Available options:
username: Username for authentication purposes.
password: Password for authentication purposes.
videopassword: Password for accessing a video.
ap_mso: Adobe Pass multiple-system operator identifier.
ap_username: Multiple-system operator account username.
ap_password: Multiple-system operator account password.
usenetrc: Use netrc for authentication instead.
verbose: Print additional info to stdout.
quiet: Do not print messages to stdout.
no_warnings: Do not print out anything for warnings.
forceurl: Force printing final URL.
forcetitle: Force printing title.
forceid: Force printing ID.
forcethumbnail: Force printing thumbnail URL.
forcedescription: Force printing description.
forcefilename: Force printing final filename.
forceduration: Force printing duration.
forcejson: Force printing info_dict as JSON.
dump_single_json: Force printing the info_dict of the whole playlist
(or video) as a single JSON line.
simulate: Do not download the video files.
format: Video format code. See options.py for more information.
outtmpl: Template for output names.
restrictfilenames: Do not allow "&" and spaces in file names
ignoreerrors: Do not stop on download errors.
force_generic_extractor: Force downloader to use the generic extractor
nooverwrites: Prevent overwriting files.
playliststart: Playlist item to start at.
playlistend: Playlist item to end at.
playlist_items: Specific indices of playlist to download.
playlistreverse: Download playlist items in reverse order.
playlistrandom: Download playlist items in random order.
matchtitle: Download only matching titles.
rejecttitle: Reject downloads for matching titles.
logger: Log messages to a logging.Logger instance.
logtostderr: Log messages to stderr instead of stdout.
writedescription: Write the video description to a .description file
writeinfojson: Write the video description to a .info.json file
writeannotations: Write the video annotations to a .annotations.xml file
writethumbnail: Write the thumbnail image to a file
write_all_thumbnails: Write all thumbnail formats to files
writesubtitles: Write the video subtitles to a file
writeautomaticsub: Write the automatically generated subtitles to a file
allsubtitles: Downloads all the subtitles of the video
(requires writesubtitles or writeautomaticsub)
listsubtitles: Lists all available subtitles for the video
subtitlesformat: The format code for subtitles
subtitleslangs: List of languages of the subtitles to download
keepvideo: Keep the video file after post-processing
daterange: A DateRange object, download only if the upload_date is in the range.
skip_download: Skip the actual download of the video file
cachedir: Location of the cache files in the filesystem.
False to disable filesystem cache.
noplaylist: Download single video instead of a playlist if in doubt.
age_limit: An integer representing the user's age in years.
Unsuitable videos for the given age are skipped.
min_views: An integer representing the minimum view count the video
must have in order to not be skipped.
Videos without view count information are always
downloaded. None for no limit.
max_views: An integer representing the maximum view count.
Videos that are more popular than that are not
downloaded.
Videos without view count information are always
downloaded. None for no limit.
download_archive: File name of a file where all downloads are recorded.
Videos already present in the file are not downloaded
again.
cookiefile: File name where cookies should be read from and dumped to.
nocheckcertificate:Do not verify SSL certificates
prefer_insecure: Use HTTP instead of HTTPS to retrieve information.
At the moment, this is only supported by YouTube.
proxy: URL of the proxy server to use
geo_verification_proxy: URL of the proxy to use for IP address verification
on geo-restricted sites. (Experimental)
socket_timeout: Time to wait for unresponsive hosts, in seconds
bidi_workaround: Work around buggy terminals without bidirectional text
support, using fridibi
debug_printtraffic:Print out sent and received HTTP traffic
include_ads: Download ads as well
default_search: Prepend this string if an input url is not valid.
'auto' for elaborate guessing
encoding: Use this encoding instead of the system-specified.
extract_flat: Do not resolve URLs, return the immediate result.
Pass in 'in_playlist' to only show this behavior for
playlist items.
postprocessors: A list of dictionaries, each with an entry
* key: The name of the postprocessor. See
youtube_dl/postprocessor/__init__.py for a list.
as well as any further keyword arguments for the
postprocessor.
progress_hooks: A list of functions that get called on download
progress, with a dictionary with the entries
* status: One of "downloading", "error", or "finished".
Check this first and ignore unknown values.
If status is one of "downloading", or "finished", the
following properties may also be present:
* filename: The final filename (always present)
* tmpfilename: The filename we're currently writing to
* downloaded_bytes: Bytes on disk
* total_bytes: Size of the whole file, None if unknown
* total_bytes_estimate: Guess of the eventual file size,
None if unavailable.
* elapsed: The number of seconds since download started.
* eta: The estimated time in seconds, None if unknown
* speed: The download speed in bytes/second, None if
unknown
* fragment_index: The counter of the currently
downloaded video fragment.
* fragment_count: The number of fragments (= individual
files that will be merged)
Progress hooks are guaranteed to be called at least once
(with status "finished") if the download is successful.
merge_output_format: Extension to use when merging formats.
fixup: Automatically correct known faults of the file.
One of:
- "never": do nothing
- "warn": only emit a warning
- "detect_or_warn": check whether we can do anything
about it, warn otherwise (default)
source_address: (Experimental) Client-side IP address to bind to.
call_home: Boolean, true iff we are allowed to contact the
youtube-dl servers for debugging.
sleep_interval: Number of seconds to sleep before each download when
used alone or a lower bound of a range for randomized
sleep before each download (minimum possible number
of seconds to sleep) when used along with
max_sleep_interval.
max_sleep_interval:Upper bound of a range for randomized sleep before each
download (maximum possible number of seconds to sleep).
Must only be used along with sleep_interval.
Actual sleep time will be a random float from range
[sleep_interval; max_sleep_interval].
listformats: Print an overview of available video formats and exit.
list_thumbnails: Print a table of all thumbnails and exit.
match_filter: A function that gets called with the info_dict of
every video.
If it returns a message, the video is ignored.
If it returns None, the video is downloaded.
match_filter_func in utils.py is one example for this.
no_color: Do not emit color codes in output.
geo_bypass: Bypass geographic restriction via faking X-Forwarded-For
HTTP header (experimental)
geo_bypass_country:
Two-letter ISO 3166-2 country code that will be used for
explicit geographic restriction bypassing via faking
X-Forwarded-For HTTP header (experimental)
The following options determine which downloader is picked:
external_downloader: Executable of the external downloader to call.
None or unset for standard (built-in) downloader.
hls_prefer_native: Use the native HLS downloader instead of ffmpeg/avconv
if True, otherwise use ffmpeg/avconv if False, otherwise
use downloader suggested by extractor if None.
The following parameters are not used by YoutubeDL itself, they are used by
the downloader (see youtube_dl/downloader/common.py):
nopart, updatetime, buffersize, ratelimit, min_filesize, max_filesize, test,
noresizebuffer, retries, continuedl, noprogress, consoletitle,
xattr_set_filesize, external_downloader_args, hls_use_mpegts.
The following options are used by the post processors:
prefer_ffmpeg: If True, use ffmpeg instead of avconv if both are available,
otherwise prefer avconv.
postprocessor_args: A list of additional command-line arguments for the
postprocessor.
"""
params = None
_ies = []
_pps = []
_download_retcode = None
_num_downloads = None
_screen_file = None
def __init__(self, params=None, auto_init=True):
"""Create a FileDownloader object with the given options."""
if params is None:
params = {}
self._ies = []
self._ies_instances = {}
self._pps = []
self._progress_hooks = []
self._download_retcode = 0
self._num_downloads = 0
self._screen_file = [sys.stdout, sys.stderr][params.get('logtostderr', False)]
self._err_file = sys.stderr
self.params = {
# Default parameters
'nocheckcertificate': False,
}
self.params.update(params)
self.cache = Cache(self)
def check_deprecated(param, option, suggestion):
if self.params.get(param) is not None:
self.report_warning(
'%s is deprecated. Use %s instead.' % (option, suggestion))
return True
return False
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
if self.params.get('geo_verification_proxy') is None:
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
check_deprecated('autonumber_size', '--autonumber-size', 'output template with %(autonumber)0Nd, where N in the number of digits')
check_deprecated('autonumber', '--auto-number', '-o "%(autonumber)s-%(title)s.%(ext)s"')
check_deprecated('usetitle', '--title', '-o "%(title)s-%(id)s.%(ext)s"')
if params.get('bidi_workaround', False):
try:
import pty
master, slave = pty.openpty()
width = compat_get_terminal_size().columns
if width is None:
width_args = []
else:
width_args = ['-w', str(width)]
sp_kwargs = dict(
stdin=subprocess.PIPE,
stdout=slave,
stderr=self._err_file)
try:
self._output_process = subprocess.Popen(
['bidiv'] + width_args, **sp_kwargs
)
except OSError:
self._output_process = subprocess.Popen(
['fribidi', '-c', 'UTF-8'] + width_args, **sp_kwargs)
self._output_channel = os.fdopen(master, 'rb')
except OSError as ose:
if ose.errno == errno.ENOENT:
self.report_warning('Could not find fribidi executable, ignoring --bidi-workaround . Make sure that fribidi is an executable file in one of the directories in your $PATH.')
else:
raise
if (sys.version_info >= (3,) and sys.platform != 'win32' and
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968'] and
not params.get('restrictfilenames', False)):
# On Python 3, the Unicode filesystem API will throw errors (#1474)
self.report_warning(
'Assuming --restrict-filenames since file system encoding '
'cannot encode all characters. '
'Set the LC_ALL environment variable to fix this.')
self.params['restrictfilenames'] = True
if isinstance(params.get('outtmpl'), bytes):
self.report_warning(
'Parameter outtmpl is bytes, but should be a unicode string. '
'Put from __future__ import unicode_literals at the top of your code file or consider switching to Python 3.x.')
self._setup_opener()
if auto_init:
self.print_debug_header()
self.add_default_info_extractors()
for pp_def_raw in self.params.get('postprocessors', []):
pp_class = get_postprocessor(pp_def_raw['key'])
pp_def = dict(pp_def_raw)
del pp_def['key']
pp = pp_class(self, **compat_kwargs(pp_def))
self.add_post_processor(pp)
for ph in self.params.get('progress_hooks', []):
self.add_progress_hook(ph)
register_socks_protocols()
def warn_if_short_id(self, argv):
# short YouTube ID starting with dash?
idxs = [
i for i, a in enumerate(argv)
if re.match(r'^-[0-9A-Za-z_-]{10}$', a)]
if idxs:
correct_argv = (
['youtube-dl'] +
[a for i, a in enumerate(argv) if i not in idxs] +
['--'] + [argv[i] for i in idxs]
)
self.report_warning(
'Long argument string detected. '
'Use -- to separate parameters and URLs, like this:\n%s\n' %
args_to_str(correct_argv))
def add_info_extractor(self, ie):
"""Add an InfoExtractor object to the end of the list."""
self._ies.append(ie)
if not isinstance(ie, type):
self._ies_instances[ie.ie_key()] = ie
ie.set_downloader(self)
def get_info_extractor(self, ie_key):
"""
Get an instance of an IE with name ie_key, it will try to get one from
the _ies list, if there's no instance it will create a new one and add
it to the extractor list.
"""
ie = self._ies_instances.get(ie_key)
if ie is None:
ie = get_info_extractor(ie_key)()
self.add_info_extractor(ie)
return ie
def add_default_info_extractors(self):
"""
Add the InfoExtractors returned by gen_extractors to the end of the list
"""
for ie in gen_extractor_classes():
self.add_info_extractor(ie)
def add_post_processor(self, pp):
"""Add a PostProcessor object to the end of the chain."""
self._pps.append(pp)
pp.set_downloader(self)
def add_progress_hook(self, ph):
"""Add the progress hook (currently only for the file downloader)"""
self._progress_hooks.append(ph)
def _bidi_workaround(self, message):
if not hasattr(self, '_output_channel'):
return message
assert hasattr(self, '_output_process')
assert isinstance(message, compat_str)
line_count = message.count('\n') + 1
self._output_process.stdin.write((message + '\n').encode('utf-8'))
self._output_process.stdin.flush()
res = ''.join(self._output_channel.readline().decode('utf-8')
for _ in range(line_count))
return res[:-len('\n')]
def to_screen(self, message, skip_eol=False):
"""Print message to stdout if not in quiet mode."""
return self.to_stdout(message, skip_eol, check_quiet=True)
def _write_string(self, s, out=None):
write_string(s, out=out, encoding=self.params.get('encoding'))
def to_stdout(self, message, skip_eol=False, check_quiet=False):
"""Print message to stdout if not in quiet mode."""
if self.params.get('logger'):
self.params['logger'].debug(message)
elif not check_quiet or not self.params.get('quiet', False):
message = self._bidi_workaround(message)
terminator = ['\n', ''][skip_eol]
output = message + terminator
self._write_string(output, self._screen_file)
def to_stderr(self, message):
"""Print message to stderr."""
assert isinstance(message, compat_str)
if self.params.get('logger'):
self.params['logger'].error(message)
else:
message = self._bidi_workaround(message)
output = message + '\n'
self._write_string(output, self._err_file)
def to_console_title(self, message):
if not self.params.get('consoletitle', False):
return
if compat_os_name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow():
# c_wchar_p() might not be necessary if `message` is
# already of type unicode()
ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message))
elif 'TERM' in os.environ:
self._write_string('\033]0;%s\007' % message, self._screen_file)
def save_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
# Save the title on stack
self._write_string('\033[22;0t', self._screen_file)
def restore_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
# Restore the title from stack
self._write_string('\033[23;0t', self._screen_file)
def __enter__(self):
self.save_console_title()
return self
def __exit__(self, *args):
self.restore_console_title()
if self.params.get('cookiefile') is not None:
self.cookiejar.save()
def trouble(self, message=None, tb=None):
"""Determine action to take when a download problem appears.
Depending on if the downloader has been configured to ignore
download errors or not, this method may throw an exception or
not when errors are found, after printing the message.
tb, if given, is additional traceback information.
"""
if message is not None:
self.to_stderr(message)
if self.params.get('verbose'):
if tb is None:
if sys.exc_info()[0]: # if .trouble has been called from an except block
tb = ''
if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
tb += encode_compat_str(traceback.format_exc())
else:
tb_data = traceback.format_list(traceback.extract_stack())
tb = ''.join(tb_data)
self.to_stderr(tb)
if not self.params.get('ignoreerrors', False):
if sys.exc_info()[0] and hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
exc_info = sys.exc_info()[1].exc_info
else:
exc_info = sys.exc_info()
raise DownloadError(message, exc_info)
self._download_retcode = 1
def report_warning(self, message):
'''
Print the message to stderr, it will be prefixed with 'WARNING:'
If stderr is a tty file the 'WARNING:' will be colored
'''
if self.params.get('logger') is not None:
self.params['logger'].warning(message)
else:
if self.params.get('no_warnings'):
return
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
warning_message = '%s %s' % (_msg_header, message)
self.to_stderr(warning_message)
def report_error(self, message, tb=None):
'''
Do the same as trouble, but prefixes the message with 'ERROR:', colored
in red if stderr is a tty file.
'''
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;31mERROR:\033[0m'
else:
_msg_header = 'ERROR:'
error_message = '%s %s' % (_msg_header, message)
self.trouble(error_message, tb)
def report_file_already_downloaded(self, file_name):
"""Report file has already been fully downloaded."""
try:
self.to_screen('[download] %s has already been downloaded' % file_name)
except UnicodeEncodeError:
self.to_screen('[download] The file has already been downloaded')
def prepare_filename(self, info_dict):
"""Generate the output filename."""
try:
template_dict = dict(info_dict)
template_dict['epoch'] = int(time.time())
autonumber_size = self.params.get('autonumber_size')
if autonumber_size is None:
autonumber_size = 5
template_dict['autonumber'] = self.params.get('autonumber_start', 1) - 1 + self._num_downloads
if template_dict.get('resolution') is None:
if template_dict.get('width') and template_dict.get('height'):
template_dict['resolution'] = '%dx%d' % (template_dict['width'], template_dict['height'])
elif template_dict.get('height'):
template_dict['resolution'] = '%sp' % template_dict['height']
elif template_dict.get('width'):
template_dict['resolution'] = '%dx?' % template_dict['width']
sanitize = lambda k, v: sanitize_filename(
compat_str(v),
restricted=self.params.get('restrictfilenames'),
is_id=(k == 'id'))
template_dict = dict((k, v if isinstance(v, compat_numeric_types) else sanitize(k, v))
for k, v in template_dict.items()
if v is not None and not isinstance(v, (list, tuple, dict)))
template_dict = collections.defaultdict(lambda: 'NA', template_dict)
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
# For fields playlist_index and autonumber convert all occurrences
# of %(field)s to %(field)0Nd for backward compatibility
field_size_compat_map = {
'playlist_index': len(str(template_dict['n_entries'])),
'autonumber': autonumber_size,
}
FIELD_SIZE_COMPAT_RE = r'(?<!%)%\((?P<field>autonumber|playlist_index)\)s'
mobj = re.search(FIELD_SIZE_COMPAT_RE, outtmpl)
if mobj:
outtmpl = re.sub(
FIELD_SIZE_COMPAT_RE,
r'%%(\1)0%dd' % field_size_compat_map[mobj.group('field')],
outtmpl)
NUMERIC_FIELDS = set((
'width', 'height', 'tbr', 'abr', 'asr', 'vbr', 'fps', 'filesize', 'filesize_approx',
'upload_year', 'upload_month', 'upload_day',
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
'average_rating', 'comment_count', 'age_limit',
'start_time', 'end_time',
'chapter_number', 'season_number', 'episode_number',
'track_number', 'disc_number', 'release_year',
'playlist_index',
))
# Missing numeric fields used together with integer presentation types
# in format specification will break the argument substitution since
# string 'NA' is returned for missing fields. We will patch output
# template for missing fields to meet string presentation type.
for numeric_field in NUMERIC_FIELDS:
if numeric_field not in template_dict:
# As of [1] format syntax is:
# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
FORMAT_RE = r'''(?x)
(?<!%)
%
\({0}\) # mapping key
(?:[#0\-+ ]+)? # conversion flags (optional)
(?:\d+)? # minimum field width (optional)
(?:\.\d+)? # precision (optional)
[hlL]? # length modifier (optional)
[diouxXeEfFgGcrs%] # conversion type
'''
outtmpl = re.sub(
FORMAT_RE.format(numeric_field),
r'%({0})s'.format(numeric_field), outtmpl)
tmpl = compat_expanduser(outtmpl)
filename = tmpl % template_dict
# Temporary fix for #4787
# 'Treat' all problem characters by passing filename through preferredencoding
# to workaround encoding issues with subprocess on python2 @ Windows
if sys.version_info < (3, 0) and sys.platform == 'win32':
filename = encodeFilename(filename, True).decode(preferredencoding())
return sanitize_path(filename)
except ValueError as err:
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
return None
def _match_entry(self, info_dict, incomplete):
""" Returns None iff the file should be downloaded """
video_title = info_dict.get('title', info_dict.get('id', 'video'))
if 'title' in info_dict:
# This can happen when we're just evaluating the playlist
title = info_dict['title']
matchtitle = self.params.get('matchtitle', False)
if matchtitle:
if not re.search(matchtitle, title, re.IGNORECASE):
return '"' + title + '" title did not match pattern "' + matchtitle + '"'
rejecttitle = self.params.get('rejecttitle', False)
if rejecttitle:
if re.search(rejecttitle, title, re.IGNORECASE):
return '"' + title + '" title matched reject pattern "' + rejecttitle + '"'
date = info_dict.get('upload_date')
if date is not None:
dateRange = self.params.get('daterange', DateRange())
if date not in dateRange:
return '%s upload date is not in range %s' % (date_from_str(date).isoformat(), dateRange)
view_count = info_dict.get('view_count')
if view_count is not None:
min_views = self.params.get('min_views')
if min_views is not None and view_count < min_views:
return 'Skipping %s, because it has not reached minimum view count (%d/%d)' % (video_title, view_count, min_views)
max_views = self.params.get('max_views')
if max_views is not None and view_count > max_views:
return 'Skipping %s, because it has exceeded the maximum view count (%d/%d)' % (video_title, view_count, max_views)
if age_restricted(info_dict.get('age_limit'), self.params.get('age_limit')):
return 'Skipping "%s" because it is age restricted' % video_title
if self.in_download_archive(info_dict):
return '%s has already been recorded in archive' % video_title
if not incomplete:
match_filter = self.params.get('match_filter')
if match_filter is not None:
ret = match_filter(info_dict)
if ret is not None:
return ret
return None
@staticmethod
def add_extra_info(info_dict, extra_info):
'''Set the keys from extra_info in info dict if they are missing'''
for key, value in extra_info.items():
info_dict.setdefault(key, value)
def extract_info(self, url, download=True, ie_key=None, extra_info={},
process=True, force_generic_extractor=False):
'''
Returns a list with a dictionary for each video we find.
If 'download', also downloads the videos.
extra_info is a dict containing the extra values to add to each result
'''
if not ie_key and force_generic_extractor:
ie_key = 'Generic'
if ie_key:
ies = [self.get_info_extractor(ie_key)]
else:
ies = self._ies
for ie in ies:
if not ie.suitable(url):
continue
ie = self.get_info_extractor(ie.ie_key())
if not ie.working():
self.report_warning('The program functionality for this site has been marked as broken, '
'and will probably not work.')
try:
ie_result = ie.extract(url)
if ie_result is None: # Finished already (backwards compatibility; listformats and friends should be moved here)
break
if isinstance(ie_result, list):
# Backwards compatibility: old IE result format
ie_result = {
'_type': 'compat_list',
'entries': ie_result,
}
self.add_default_extra_info(ie_result, ie, url)
if process:
return self.process_ie_result(ie_result, download, extra_info)
else:
return ie_result
except GeoRestrictedError as e:
msg = e.msg
if e.countries:
msg += '\nThis video is available in %s.' % ', '.join(
map(ISO3166Utils.short2full, e.countries))
msg += '\nYou might want to use a VPN or a proxy server (with --proxy) to workaround.'
self.report_error(msg)
break
except ExtractorError as e: # An error we somewhat expected
self.report_error(compat_str(e), e.format_traceback())
break
except MaxDownloadsReached:
raise
except Exception as e:
if self.params.get('ignoreerrors', False):
self.report_error(error_to_compat_str(e), tb=encode_compat_str(traceback.format_exc()))
break
else:
raise
else:
self.report_error('no suitable InfoExtractor for URL %s' % url)
def add_default_extra_info(self, ie_result, ie, url):
self.add_extra_info(ie_result, {
'extractor': ie.IE_NAME,
'webpage_url': url,
'webpage_url_basename': url_basename(url),
'extractor_key': ie.ie_key(),
})
def process_ie_result(self, ie_result, download=True, extra_info={}):
"""
Take the result of the ie(may be modified) and resolve all unresolved
references (URLs, playlist items).
It will also download the videos if 'download'.
Returns the resolved ie_result.
"""
result_type = ie_result.get('_type', 'video')
if result_type in ('url', 'url_transparent'):
ie_result['url'] = sanitize_url(ie_result['url'])
extract_flat = self.params.get('extract_flat', False)
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info) or
extract_flat is True):
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(ie_result))
return ie_result
if result_type == 'video':
self.add_extra_info(ie_result, extra_info)
return self.process_video_result(ie_result, download=download)
elif result_type == 'url':
# We have to add extra_info to the results because it may be
# contained in a playlist
return self.extract_info(ie_result['url'],
download,
ie_key=ie_result.get('ie_key'),
extra_info=extra_info)
elif result_type == 'url_transparent':
# Use the information from the embedding page
info = self.extract_info(
ie_result['url'], ie_key=ie_result.get('ie_key'),
extra_info=extra_info, download=False, process=False)
force_properties = dict(
(k, v) for k, v in ie_result.items() if v is not None)
for f in ('_type', 'url', 'ie_key'):
if f in force_properties:
del force_properties[f]
new_result = info.copy()
new_result.update(force_properties)
assert new_result.get('_type') != 'url_transparent'
return self.process_ie_result(
new_result, download=download, extra_info=extra_info)
elif result_type == 'playlist' or result_type == 'multi_video':
# We process each entry in the playlist
playlist = ie_result.get('title') or ie_result.get('id')
self.to_screen('[download] Downloading playlist: %s' % playlist)
playlist_results = []
playliststart = self.params.get('playliststart', 1) - 1
playlistend = self.params.get('playlistend')
# For backwards compatibility, interpret -1 as whole list
if playlistend == -1:
playlistend = None
playlistitems_str = self.params.get('playlist_items')
playlistitems = None
if playlistitems_str is not None:
def iter_playlistitems(format):
for string_segment in format.split(','):
if '-' in string_segment:
start, end = string_segment.split('-')
for item in range(int(start), int(end) + 1):
yield int(item)
else:
yield int(string_segment)
playlistitems = iter_playlistitems(playlistitems_str)
ie_entries = ie_result['entries']
if isinstance(ie_entries, list):
n_all_entries = len(ie_entries)
if playlistitems:
entries = [
ie_entries[i - 1] for i in playlistitems
if -n_all_entries <= i - 1 < n_all_entries]
else:
entries = ie_entries[playliststart:playlistend]
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Collected %d video ids (downloading %d of them)' %
(ie_result['extractor'], playlist, n_all_entries, n_entries))
elif isinstance(ie_entries, PagedList):
if playlistitems:
entries = []
for item in playlistitems:
entries.extend(ie_entries.getslice(
item - 1, item
))
else:
entries = ie_entries.getslice(
playliststart, playlistend)
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
else: # iterable
if playlistitems:
entry_list = list(ie_entries)
entries = [entry_list[i - 1] for i in playlistitems]
else:
entries = list(itertools.islice(
ie_entries, playliststart, playlistend))
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
if self.params.get('playlistreverse', False):
entries = entries[::-1]
if self.params.get('playlistrandom', False):
random.shuffle(entries)
x_forwarded_for = ie_result.get('__x_forwarded_for_ip')
for i, entry in enumerate(entries, 1):
self.to_screen('[download] Downloading video %s of %s' % (i, n_entries))
# This __x_forwarded_for_ip thing is a bit ugly but requires
# minimal changes
if x_forwarded_for:
entry['__x_forwarded_for_ip'] = x_forwarded_for
extra = {
'n_entries': n_entries,
'playlist': playlist,
'playlist_id': ie_result.get('id'),
'playlist_title': ie_result.get('title'),
'playlist_index': i + playliststart,
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
reason = self._match_entry(entry, incomplete=True)
if reason is not None:
self.to_screen('[download] ' + reason)
continue
entry_result = self.process_ie_result(entry,
download=download,
extra_info=extra)
playlist_results.append(entry_result)
ie_result['entries'] = playlist_results
self.to_screen('[download] Finished downloading playlist: %s' % playlist)
return ie_result
elif result_type == 'compat_list':
self.report_warning(
'Extractor %s returned a compat_list result. '
'It needs to be updated.' % ie_result.get('extractor'))
def _fixup(r):
self.add_extra_info(
r,
{
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
)
return r
ie_result['entries'] = [
self.process_ie_result(_fixup(r), download, extra_info)
for r in ie_result['entries']
]
return ie_result
else:
raise Exception('Invalid result type: %s' % result_type)
def _build_format_filter(self, filter_spec):
" Returns a function to filter the formats according to the filter_spec "
OPERATORS = {
'<': operator.lt,
'<=': operator.le,
'>': operator.gt,
'>=': operator.ge,
'=': operator.eq,
'!=': operator.ne,
}
operator_rex = re.compile(r'''(?x)\s*
(?P<key>width|height|tbr|abr|vbr|asr|filesize|fps)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
(?P<value>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)
$
''' % '|'.join(map(re.escape, OPERATORS.keys())))
m = operator_rex.search(filter_spec)
if m:
try:
comparison_value = int(m.group('value'))
except ValueError:
comparison_value = parse_filesize(m.group('value'))
if comparison_value is None:
comparison_value = parse_filesize(m.group('value') + 'B')
if comparison_value is None:
raise ValueError(
'Invalid value %r in format specification %r' % (
m.group('value'), filter_spec))
op = OPERATORS[m.group('op')]
if not m:
STR_OPERATORS = {
'=': operator.eq,
'!=': operator.ne,
'^=': lambda attr, value: attr.startswith(value),
'$=': lambda attr, value: attr.endswith(value),
'*=': lambda attr, value: value in attr,
}
str_operator_rex = re.compile(r'''(?x)
\s*(?P<key>ext|acodec|vcodec|container|protocol|format_id)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?
\s*(?P<value>[a-zA-Z0-9._-]+)
\s*$
''' % '|'.join(map(re.escape, STR_OPERATORS.keys())))
m = str_operator_rex.search(filter_spec)
if m:
comparison_value = m.group('value')
op = STR_OPERATORS[m.group('op')]
if not m:
raise ValueError('Invalid filter specification %r' % filter_spec)
def _filter(f):
actual_value = f.get(m.group('key'))
if actual_value is None:
return m.group('none_inclusive')
return op(actual_value, comparison_value)
return _filter
def build_format_selector(self, format_spec):
def syntax_error(note, start):
message = (
'Invalid format specification: '
'{0}\n\t{1}\n\t{2}^'.format(note, format_spec, ' ' * start[1]))
return SyntaxError(message)
PICKFIRST = 'PICKFIRST'
MERGE = 'MERGE'
SINGLE = 'SINGLE'
GROUP = 'GROUP'
FormatSelector = collections.namedtuple('FormatSelector', ['type', 'selector', 'filters'])
def _parse_filter(tokens):
filter_parts = []
for type, string, start, _, _ in tokens:
if type == tokenize.OP and string == ']':
return ''.join(filter_parts)
else:
filter_parts.append(string)
def _remove_unused_ops(tokens):
# Remove operators that we don't use and join them with the surrounding strings
# for example: 'mp4' '-' 'baseline' '-' '16x9' is converted to 'mp4-baseline-16x9'
ALLOWED_OPS = ('/', '+', ',', '(', ')')
last_string, last_start, last_end, last_line = None, None, None, None
for type, string, start, end, line in tokens:
if type == tokenize.OP and string == '[':
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
# everything inside brackets will be handled by _parse_filter
for type, string, start, end, line in tokens:
yield type, string, start, end, line
if type == tokenize.OP and string == ']':
break
elif type == tokenize.OP and string in ALLOWED_OPS:
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
elif type in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]:
if not last_string:
last_string = string
last_start = start
last_end = end
else:
last_string += string
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, inside_group=False):
selectors = []
current_selector = None
for type, string, start, _, _ in tokens:
# ENCODING is only defined in python 3.x
if type == getattr(tokenize, 'ENCODING', None):
continue
elif type in [tokenize.NAME, tokenize.NUMBER]:
current_selector = FormatSelector(SINGLE, string, [])
elif type == tokenize.OP:
if string == ')':
if not inside_group:
# ')' will be handled by the parentheses group
tokens.restore_last_token()
break
elif inside_merge and string in ['/', ',']:
tokens.restore_last_token()
break
elif inside_choice and string == ',':
tokens.restore_last_token()
break
elif string == ',':
if not current_selector:
raise syntax_error('"," must follow a format selector', start)
selectors.append(current_selector)
current_selector = None
elif string == '/':
if not current_selector:
raise syntax_error('"/" must follow a format selector', start)
first_choice = current_selector
second_choice = _parse_format_selection(tokens, inside_choice=True)
current_selector = FormatSelector(PICKFIRST, (first_choice, second_choice), [])
elif string == '[':
if not current_selector:
current_selector = FormatSelector(SINGLE, 'best', [])
format_filter = _parse_filter(tokens)
current_selector.filters.append(format_filter)
elif string == '(':
if current_selector:
raise syntax_error('Unexpected "("', start)
group = _parse_format_selection(tokens, inside_group=True)
current_selector = FormatSelector(GROUP, group, [])
elif string == '+':
video_selector = current_selector
audio_selector = _parse_format_selection(tokens, inside_merge=True)
if not video_selector or not audio_selector:
raise syntax_error('"+" must be between two format selectors', start)
current_selector = FormatSelector(MERGE, (video_selector, audio_selector), [])
else:
raise syntax_error('Operator not recognized: "{0}"'.format(string), start)
elif type == tokenize.ENDMARKER:
break
if current_selector:
selectors.append(current_selector)
return selectors
def _build_selector_function(selector):
if isinstance(selector, list):
fs = [_build_selector_function(s) for s in selector]
def selector_function(ctx):
for f in fs:
for format in f(ctx):
yield format
return selector_function
elif selector.type == GROUP:
selector_function = _build_selector_function(selector.selector)
elif selector.type == PICKFIRST:
fs = [_build_selector_function(s) for s in selector.selector]
def selector_function(ctx):
for f in fs:
picked_formats = list(f(ctx))
if picked_formats:
return picked_formats
return []
elif selector.type == SINGLE:
format_spec = selector.selector
def selector_function(ctx):
formats = list(ctx['formats'])
if not formats:
return
if format_spec == 'all':
for f in formats:
yield f
elif format_spec in ['best', 'worst', None]:
format_idx = 0 if format_spec == 'worst' else -1
audiovideo_formats = [
f for f in formats
if f.get('vcodec') != 'none' and f.get('acodec') != 'none']
if audiovideo_formats:
yield audiovideo_formats[format_idx]
# for extractors with incomplete formats (audio only (soundcloud)
# or video only (imgur)) we will fallback to best/worst
# {video,audio}-only format
elif ctx['incomplete_formats']:
yield formats[format_idx]
elif format_spec == 'bestaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[-1]
elif format_spec == 'worstaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[0]
elif format_spec == 'bestvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[-1]
elif format_spec == 'worstvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[0]
else:
extensions = ['mp4', 'flv', 'webm', '3gp', 'm4a', 'mp3', 'ogg', 'aac', 'wav']
if format_spec in extensions:
filter_f = lambda f: f['ext'] == format_spec
else:
filter_f = lambda f: f['format_id'] == format_spec
matches = list(filter(filter_f, formats))
if matches:
yield matches[-1]
elif selector.type == MERGE:
def _merge(formats_info):
format_1, format_2 = [f['format_id'] for f in formats_info]
# The first format must contain the video and the
# second the audio
if formats_info[0].get('vcodec') == 'none':
self.report_error('The first format must '
'contain the video, try using '
'"-f %s+%s"' % (format_2, format_1))
return
# Formats must be opposite (video+audio)
if formats_info[0].get('acodec') == 'none' and formats_info[1].get('acodec') == 'none':
self.report_error(
'Both formats %s and %s are video-only, you must specify "-f video+audio"'
% (format_1, format_2))
return
output_ext = (
formats_info[0]['ext']
if self.params.get('merge_output_format') is None
else self.params['merge_output_format'])
return {
'requested_formats': formats_info,
'format': '%s+%s' % (formats_info[0].get('format'),
formats_info[1].get('format')),
'format_id': '%s+%s' % (formats_info[0].get('format_id'),
formats_info[1].get('format_id')),
'width': formats_info[0].get('width'),
'height': formats_info[0].get('height'),
'resolution': formats_info[0].get('resolution'),
'fps': formats_info[0].get('fps'),
'vcodec': formats_info[0].get('vcodec'),
'vbr': formats_info[0].get('vbr'),
'stretched_ratio': formats_info[0].get('stretched_ratio'),
'acodec': formats_info[1].get('acodec'),
'abr': formats_info[1].get('abr'),
'ext': output_ext,
}
video_selector, audio_selector = map(_build_selector_function, selector.selector)
def selector_function(ctx):
for pair in itertools.product(
video_selector(copy.deepcopy(ctx)), audio_selector(copy.deepcopy(ctx))):
yield _merge(pair)
filters = [self._build_format_filter(f) for f in selector.filters]
def final_selector(ctx):
ctx_copy = copy.deepcopy(ctx)
for _filter in filters:
ctx_copy['formats'] = list(filter(_filter, ctx_copy['formats']))
return selector_function(ctx_copy)
return final_selector
stream = io.BytesIO(format_spec.encode('utf-8'))
try:
tokens = list(_remove_unused_ops(compat_tokenize_tokenize(stream.readline)))
except tokenize.TokenError:
raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec)))
class TokenIterator(object):
def __init__(self, tokens):
self.tokens = tokens
self.counter = 0
def __iter__(self):
return self
def __next__(self):
if self.counter >= len(self.tokens):
raise StopIteration()
value = self.tokens[self.counter]
self.counter += 1
return value
next = __next__
def restore_last_token(self):
self.counter -= 1
parsed_selector = _parse_format_selection(iter(TokenIterator(tokens)))
return _build_selector_function(parsed_selector)
def _calc_headers(self, info_dict):
res = std_headers.copy()
add_headers = info_dict.get('http_headers')
if add_headers:
res.update(add_headers)
cookies = self._calc_cookies(info_dict)
if cookies:
res['Cookie'] = cookies
if 'X-Forwarded-For' not in res:
x_forwarded_for_ip = info_dict.get('__x_forwarded_for_ip')
if x_forwarded_for_ip:
res['X-Forwarded-For'] = x_forwarded_for_ip
return res
def _calc_cookies(self, info_dict):
pr = sanitized_Request(info_dict['url'])
self.cookiejar.add_cookie_header(pr)
return pr.get_header('Cookie')
def process_video_result(self, info_dict, download=True):
assert info_dict.get('_type', 'video') == 'video'
if 'id' not in info_dict:
raise ExtractorError('Missing "id" field in extractor result')
if 'title' not in info_dict:
raise ExtractorError('Missing "title" field in extractor result')
if not isinstance(info_dict['id'], compat_str):
self.report_warning('"id" field is not a string - forcing string conversion')
info_dict['id'] = compat_str(info_dict['id'])
if 'playlist' not in info_dict:
# It isn't part of a playlist
info_dict['playlist'] = None
info_dict['playlist_index'] = None
thumbnails = info_dict.get('thumbnails')
if thumbnails is None:
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnails'] = thumbnails = [{'url': thumbnail}]
if thumbnails:
thumbnails.sort(key=lambda t: (
t.get('preference') if t.get('preference') is not None else -1,
t.get('width') if t.get('width') is not None else -1,
t.get('height') if t.get('height') is not None else -1,
t.get('id') if t.get('id') is not None else '', t.get('url')))
for i, t in enumerate(thumbnails):
t['url'] = sanitize_url(t['url'])
if t.get('width') and t.get('height'):
t['resolution'] = '%dx%d' % (t['width'], t['height'])
if t.get('id') is None:
t['id'] = '%d' % i
if self.params.get('list_thumbnails'):
self.list_thumbnails(info_dict)
return
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnail'] = sanitize_url(thumbnail)
elif thumbnails:
info_dict['thumbnail'] = thumbnails[-1]['url']
if 'display_id' not in info_dict and 'id' in info_dict:
info_dict['display_id'] = info_dict['id']
if info_dict.get('upload_date') is None and info_dict.get('timestamp') is not None:
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
# see http://bugs.python.org/issue1646728)
try:
upload_date = datetime.datetime.utcfromtimestamp(info_dict['timestamp'])
info_dict['upload_date'] = upload_date.strftime('%Y%m%d')
except (ValueError, OverflowError, OSError):
pass
# Auto generate title fields corresponding to the *_number fields when missing
# in order to always have clean titles. This is very common for TV series.
for field in ('chapter', 'season', 'episode'):
if info_dict.get('%s_number' % field) is not None and not info_dict.get(field):
info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field])
subtitles = info_dict.get('subtitles')
if subtitles:
for _, subtitle in subtitles.items():
for subtitle_format in subtitle:
if subtitle_format.get('url'):
subtitle_format['url'] = sanitize_url(subtitle_format['url'])
if subtitle_format.get('ext') is None:
subtitle_format['ext'] = determine_ext(subtitle_format['url']).lower()
if self.params.get('listsubtitles', False):
if 'automatic_captions' in info_dict:
self.list_subtitles(info_dict['id'], info_dict.get('automatic_captions'), 'automatic captions')
self.list_subtitles(info_dict['id'], subtitles, 'subtitles')
return
info_dict['requested_subtitles'] = self.process_subtitles(
info_dict['id'], subtitles,
info_dict.get('automatic_captions'))
# We now pick which formats have to be downloaded
if info_dict.get('formats') is None:
# There's only one format available
formats = [info_dict]
else:
formats = info_dict['formats']
if not formats:
raise ExtractorError('No video formats found!')
formats_dict = {}
# We check that all the formats have the format and format_id fields
for i, format in enumerate(formats):
if 'url' not in format:
raise ExtractorError('Missing "url" key in result (index %d)' % i)
format['url'] = sanitize_url(format['url'])
if format.get('format_id') is None:
format['format_id'] = compat_str(i)
else:
# Sanitize format_id from characters used in format selector expression
format['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', format['format_id'])
format_id = format['format_id']
if format_id not in formats_dict:
formats_dict[format_id] = []
formats_dict[format_id].append(format)
# Make sure all formats have unique format_id
for format_id, ambiguous_formats in formats_dict.items():
if len(ambiguous_formats) > 1:
for i, format in enumerate(ambiguous_formats):
format['format_id'] = '%s-%d' % (format_id, i)
for i, format in enumerate(formats):
if format.get('format') is None:
format['format'] = '{id} - {res}{note}'.format(
id=format['format_id'],
res=self.format_resolution(format),
note=' ({0})'.format(format['format_note']) if format.get('format_note') is not None else '',
)
# Automatically determine file extension if missing
if format.get('ext') is None:
format['ext'] = determine_ext(format['url']).lower()
# Automatically determine protocol if missing (useful for format
# selection purposes)
if format.get('protocol') is None:
format['protocol'] = determine_protocol(format)
# Add HTTP headers, so that external programs can use them from the
# json output
full_format_info = info_dict.copy()
full_format_info.update(format)
format['http_headers'] = self._calc_headers(full_format_info)
# Remove private housekeeping stuff
if '__x_forwarded_for_ip' in info_dict:
del info_dict['__x_forwarded_for_ip']
# TODO Central sorting goes here
if formats[0] is not info_dict:
# only set the 'formats' fields if the original info_dict list them
# otherwise we end up with a circular reference, the first (and unique)
# element in the 'formats' field in info_dict is info_dict itself,
# which can't be exported to json
info_dict['formats'] = formats
if self.params.get('listformats'):
self.list_formats(info_dict)
return
req_format = self.params.get('format')
if req_format is None:
req_format_list = []
if (self.params.get('outtmpl', DEFAULT_OUTTMPL) != '-' and
not info_dict.get('is_live')):
merger = FFmpegMergerPP(self)
if merger.available and merger.can_merge():
req_format_list.append('bestvideo+bestaudio')
req_format_list.append('best')
req_format = '/'.join(req_format_list)
format_selector = self.build_format_selector(req_format)
# While in format selection we may need to have an access to the original
# format set in order to calculate some metrics or do some processing.
# For now we need to be able to guess whether original formats provided
# by extractor are incomplete or not (i.e. whether extractor provides only
# video-only or audio-only formats) for proper formats selection for
# extractors with such incomplete formats (see
# https://github.com/rg3/youtube-dl/pull/5556).
# Since formats may be filtered during format selection and may not match
# the original formats the results may be incorrect. Thus original formats
# or pre-calculated metrics should be passed to format selection routines
# as well.
# We will pass a context object containing all necessary additional data
# instead of just formats.
# This fixes incorrect format selection issue (see
# https://github.com/rg3/youtube-dl/issues/10083).
incomplete_formats = (
# All formats are video-only or
all(f.get('vcodec') != 'none' and f.get('acodec') == 'none' for f in formats) or
# all formats are audio-only
all(f.get('vcodec') == 'none' and f.get('acodec') != 'none' for f in formats))
ctx = {
'formats': formats,
'incomplete_formats': incomplete_formats,
}
formats_to_download = list(format_selector(ctx))
if not formats_to_download:
raise ExtractorError('requested format not available',
expected=True)
if download:
if len(formats_to_download) > 1:
self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download)))
for format in formats_to_download:
new_info = dict(info_dict)
new_info.update(format)
self.process_info(new_info)
# We update the info dict with the best quality format (backwards compatibility)
info_dict.update(formats_to_download[-1])
return info_dict
def process_subtitles(self, video_id, normal_subtitles, automatic_captions):
"""Select the requested subtitles and their format"""
available_subs = {}
if normal_subtitles and self.params.get('writesubtitles'):
available_subs.update(normal_subtitles)
if automatic_captions and self.params.get('writeautomaticsub'):
for lang, cap_info in automatic_captions.items():
if lang not in available_subs:
available_subs[lang] = cap_info
if (not self.params.get('writesubtitles') and not
self.params.get('writeautomaticsub') or not
available_subs):
return None
if self.params.get('allsubtitles', False):
requested_langs = available_subs.keys()
else:
if self.params.get('subtitleslangs', False):
requested_langs = self.params.get('subtitleslangs')
elif 'en' in available_subs:
requested_langs = ['en']
else:
requested_langs = [list(available_subs.keys())[0]]
formats_query = self.params.get('subtitlesformat', 'best')
formats_preference = formats_query.split('/') if formats_query else []
subs = {}
for lang in requested_langs:
formats = available_subs.get(lang)
if formats is None:
self.report_warning('%s subtitles not available for %s' % (lang, video_id))
continue
for ext in formats_preference:
if ext == 'best':
f = formats[-1]
break
matches = list(filter(lambda f: f['ext'] == ext, formats))
if matches:
f = matches[-1]
break
else:
f = formats[-1]
self.report_warning(
'No subtitle format found matching "%s" for language %s, '
'using %s' % (formats_query, lang, f['ext']))
subs[lang] = f
return subs
def process_info(self, info_dict):
"""Process a single resolved IE result."""
assert info_dict.get('_type', 'video') == 'video'
max_downloads = self.params.get('max_downloads')
if max_downloads is not None:
if self._num_downloads >= int(max_downloads):
raise MaxDownloadsReached()
info_dict['fulltitle'] = info_dict['title']
if len(info_dict['title']) > 200:
info_dict['title'] = info_dict['title'][:197] + '...'
if 'format' not in info_dict:
info_dict['format'] = info_dict['ext']
reason = self._match_entry(info_dict, incomplete=False)
if reason is not None:
self.to_screen('[download] ' + reason)
return
self._num_downloads += 1
info_dict['_filename'] = filename = self.prepare_filename(info_dict)
# Forced printings
if self.params.get('forcetitle', False):
self.to_stdout(info_dict['fulltitle'])
if self.params.get('forceid', False):
self.to_stdout(info_dict['id'])
if self.params.get('forceurl', False):
if info_dict.get('requested_formats') is not None:
for f in info_dict['requested_formats']:
self.to_stdout(f['url'] + f.get('play_path', ''))
else:
# For RTMP URLs, also include the playpath
self.to_stdout(info_dict['url'] + info_dict.get('play_path', ''))
if self.params.get('forcethumbnail', False) and info_dict.get('thumbnail') is not None:
self.to_stdout(info_dict['thumbnail'])
if self.params.get('forcedescription', False) and info_dict.get('description') is not None:
self.to_stdout(info_dict['description'])
if self.params.get('forcefilename', False) and filename is not None:
self.to_stdout(filename)
if self.params.get('forceduration', False) and info_dict.get('duration') is not None:
self.to_stdout(formatSeconds(info_dict['duration']))
if self.params.get('forceformat', False):
self.to_stdout(info_dict['format'])
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(info_dict))
# Do nothing else if in simulate mode
if self.params.get('simulate', False):
return
if filename is None:
return
try:
dn = os.path.dirname(sanitize_path(encodeFilename(filename)))
if dn and not os.path.exists(dn):
os.makedirs(dn)
except (OSError, IOError) as err:
self.report_error('unable to create directory ' + error_to_compat_str(err))
return
if self.params.get('writedescription', False):
descfn = replace_extension(filename, 'description', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(descfn)):
self.to_screen('[info] Video description is already present')
elif info_dict.get('description') is None:
self.report_warning('There\'s no description to write.')
else:
try:
self.to_screen('[info] Writing video description to: ' + descfn)
with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
descfile.write(info_dict['description'])
except (OSError, IOError):
self.report_error('Cannot write description file ' + descfn)
return
if self.params.get('writeannotations', False):
annofn = replace_extension(filename, 'annotations.xml', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(annofn)):
self.to_screen('[info] Video annotations are already present')
else:
try:
self.to_screen('[info] Writing video annotations to: ' + annofn)
with io.open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
annofile.write(info_dict['annotations'])
except (KeyError, TypeError):
self.report_warning('There are no annotations to write.')
except (OSError, IOError):
self.report_error('Cannot write annotations file: ' + annofn)
return
subtitles_are_requested = any([self.params.get('writesubtitles', False),
self.params.get('writeautomaticsub')])
if subtitles_are_requested and info_dict.get('requested_subtitles'):
# subtitles download errors are already managed as troubles in relevant IE
# that way it will silently go on when used with unsupporting IE
subtitles = info_dict['requested_subtitles']
ie = self.get_info_extractor(info_dict['extractor_key'])
for sub_lang, sub_info in subtitles.items():
sub_format = sub_info['ext']
if sub_info.get('data') is not None:
sub_data = sub_info['data']
else:
try:
sub_data = ie._download_webpage(
sub_info['url'], info_dict['id'], note=False)
except ExtractorError as err:
self.report_warning('Unable to download subtitle for "%s": %s' %
(sub_lang, error_to_compat_str(err.cause)))
continue
try:
sub_filename = subtitles_filename(filename, sub_lang, sub_format)
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(sub_filename)):
self.to_screen('[info] Video subtitle %s.%s is already_present' % (sub_lang, sub_format))
else:
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
# Use newline='' to prevent conversion of newline characters
# See https://github.com/rg3/youtube-dl/issues/10268
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
subfile.write(sub_data)
except (OSError, IOError):
self.report_error('Cannot write subtitles file ' + sub_filename)
return
if self.params.get('writeinfojson', False):
infofn = replace_extension(filename, 'info.json', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(infofn)):
self.to_screen('[info] Video description metadata is already present')
else:
self.to_screen('[info] Writing video description metadata as JSON to: ' + infofn)
try:
write_json_file(self.filter_requested_info(info_dict), infofn)
except (OSError, IOError):
self.report_error('Cannot write metadata to JSON file ' + infofn)
return
self._write_thumbnails(info_dict, filename)
if not self.params.get('skip_download', False):
try:
def dl(name, info):
fd = get_suitable_downloader(info, self.params)(self, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
if self.params.get('verbose'):
self.to_stdout('[debug] Invoking downloader on %r' % info.get('url'))
return fd.download(name, info)
if info_dict.get('requested_formats') is not None:
downloaded = []
success = True
merger = FFmpegMergerPP(self)
if not merger.available:
postprocessors = []
self.report_warning('You have requested multiple '
'formats but ffmpeg or avconv are not installed.'
' The formats won\'t be merged.')
else:
postprocessors = [merger]
def compatible_formats(formats):
video, audio = formats
# Check extension
video_ext, audio_ext = audio.get('ext'), video.get('ext')
if video_ext and audio_ext:
COMPATIBLE_EXTS = (
('mp3', 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'ismv', 'isma'),
('webm')
)
for exts in COMPATIBLE_EXTS:
if video_ext in exts and audio_ext in exts:
return True
# TODO: Check acodec/vcodec
return False
filename_real_ext = os.path.splitext(filename)[1][1:]
filename_wo_ext = (
os.path.splitext(filename)[0]
if filename_real_ext == info_dict['ext']
else filename)
requested_formats = info_dict['requested_formats']
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
info_dict['ext'] = 'mkv'
self.report_warning(
'Requested formats are incompatible for merge and will be merged into mkv.')
# Ensure filename always has a correct extension for successful merge
filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
if os.path.exists(encodeFilename(filename)):
self.to_screen(
'[download] %s has already been downloaded and '
'merged' % filename)
else:
for f in requested_formats:
new_info = dict(info_dict)
new_info.update(f)
fname = self.prepare_filename(new_info)
fname = prepend_extension(fname, 'f%s' % f['format_id'], new_info['ext'])
downloaded.append(fname)
partial_success = dl(fname, new_info)
success = success and partial_success
info_dict['__postprocessors'] = postprocessors
info_dict['__files_to_merge'] = downloaded
else:
# Just a single file
success = dl(filename, info_dict)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
return
except (OSError, IOError) as err:
raise UnavailableVideoError(err)
except (ContentTooShortError, ) as err:
self.report_error('content too short (expected %s bytes and served %s)' % (err.expected, err.downloaded))
return
if success and filename != '-':
# Fixup content
fixup_policy = self.params.get('fixup')
if fixup_policy is None:
fixup_policy = 'detect_or_warn'
INSTALL_FFMPEG_MESSAGE = 'Install ffmpeg or avconv to fix this automatically.'
stretched_ratio = info_dict.get('stretched_ratio')
if stretched_ratio is not None and stretched_ratio != 1:
if fixup_policy == 'warn':
self.report_warning('%s: Non-uniform pixel ratio (%s)' % (
info_dict['id'], stretched_ratio))
elif fixup_policy == 'detect_or_warn':
stretched_pp = FFmpegFixupStretchedPP(self)
if stretched_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(stretched_pp)
else:
self.report_warning(
'%s: Non-uniform pixel ratio (%s). %s'
% (info_dict['id'], stretched_ratio, INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('requested_formats') is None and
info_dict.get('container') == 'm4a_dash'):
if fixup_policy == 'warn':
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container.'
% info_dict['id'])
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM4aPP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('protocol') == 'm3u8_native' or
info_dict.get('protocol') == 'm3u8' and
self.params.get('hls_prefer_native')):
if fixup_policy == 'warn':
self.report_warning('%s: malformated aac bitstream.' % (
info_dict['id']))
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM3u8PP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: malformated aac bitstream. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
try:
self.post_process(filename, info_dict)
except (PostProcessingError) as err:
self.report_error('postprocessing: %s' % str(err))
return
self.record_download_archive(info_dict)
def download(self, url_list):
"""Download a given list of URLs."""
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
if (len(url_list) > 1 and
'%' not in outtmpl and
self.params.get('max_downloads') != 1):
raise SameFileError(outtmpl)
for url in url_list:
try:
# It also downloads the videos
res = self.extract_info(
url, force_generic_extractor=self.params.get('force_generic_extractor', False))
except UnavailableVideoError:
self.report_error('unable to download video')
except MaxDownloadsReached:
self.to_screen('[info] Maximum number of downloaded files reached.')
raise
else:
if self.params.get('dump_single_json', False):
self.to_stdout(json.dumps(res))
return self._download_retcode
def download_with_info_file(self, info_filename):
with contextlib.closing(fileinput.FileInput(
[info_filename], mode='r',
openhook=fileinput.hook_encoded('utf-8'))) as f:
# FileInput doesn't have a read method, we can't call json.load
info = self.filter_requested_info(json.loads('\n'.join(f)))
try:
self.process_ie_result(info, download=True)
except DownloadError:
webpage_url = info.get('webpage_url')
if webpage_url is not None:
self.report_warning('The info failed to download, trying with "%s"' % webpage_url)
return self.download([webpage_url])
else:
raise
return self._download_retcode
@staticmethod
def filter_requested_info(info_dict):
return dict(
(k, v) for k, v in info_dict.items()
if k not in ['requested_formats', 'requested_subtitles'])
def post_process(self, filename, ie_info):
"""Run all the postprocessors on the given file."""
info = dict(ie_info)
info['filepath'] = filename
pps_chain = []
if ie_info.get('__postprocessors') is not None:
pps_chain.extend(ie_info['__postprocessors'])
pps_chain.extend(self._pps)
for pp in pps_chain:
files_to_delete = []
try:
files_to_delete, info = pp.run(info)
except PostProcessingError as e:
self.report_error(e.msg)
if files_to_delete and not self.params.get('keepvideo', False):
for old_filename in files_to_delete:
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
try:
os.remove(encodeFilename(old_filename))
except (IOError, OSError):
self.report_warning('Unable to remove downloaded original file')
def _make_archive_id(self, info_dict):
# Future-proof against any change in case
# and backwards compatibility with prior versions
extractor = info_dict.get('extractor_key')
if extractor is None:
if 'id' in info_dict:
extractor = info_dict.get('ie_key') # key in a playlist
if extractor is None:
return None # Incomplete video information
return extractor.lower() + ' ' + info_dict['id']
def in_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return False
vid_id = self._make_archive_id(info_dict)
if vid_id is None:
return False # Incomplete video information
try:
with locked_file(fn, 'r', encoding='utf-8') as archive_file:
for line in archive_file:
if line.strip() == vid_id:
return True
except IOError as ioe:
if ioe.errno != errno.ENOENT:
raise
return False
def record_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return
vid_id = self._make_archive_id(info_dict)
assert vid_id
with locked_file(fn, 'a', encoding='utf-8') as archive_file:
archive_file.write(vid_id + '\n')
@staticmethod
def format_resolution(format, default='unknown'):
if format.get('vcodec') == 'none':
return 'audio only'
if format.get('resolution') is not None:
return format['resolution']
if format.get('height') is not None:
if format.get('width') is not None:
res = '%sx%s' % (format['width'], format['height'])
else:
res = '%sp' % format['height']
elif format.get('width') is not None:
res = '%dx?' % format['width']
else:
res = default
return res
def _format_note(self, fdict):
res = ''
if fdict.get('ext') in ['f4f', 'f4m']:
res += '(unsupported) '
if fdict.get('language'):
if res:
res += ' '
res += '[%s] ' % fdict['language']
if fdict.get('format_note') is not None:
res += fdict['format_note'] + ' '
if fdict.get('tbr') is not None:
res += '%4dk ' % fdict['tbr']
if fdict.get('container') is not None:
if res:
res += ', '
res += '%s container' % fdict['container']
if (fdict.get('vcodec') is not None and
fdict.get('vcodec') != 'none'):
if res:
res += ', '
res += fdict['vcodec']
if fdict.get('vbr') is not None:
res += '@'
elif fdict.get('vbr') is not None and fdict.get('abr') is not None:
res += 'video@'
if fdict.get('vbr') is not None:
res += '%4dk' % fdict['vbr']
if fdict.get('fps') is not None:
if res:
res += ', '
res += '%sfps' % fdict['fps']
if fdict.get('acodec') is not None:
if res:
res += ', '
if fdict['acodec'] == 'none':
res += 'video only'
else:
res += '%-5s' % fdict['acodec']
elif fdict.get('abr') is not None:
if res:
res += ', '
res += 'audio'
if fdict.get('abr') is not None:
res += '@%3dk' % fdict['abr']
if fdict.get('asr') is not None:
res += ' (%5dHz)' % fdict['asr']
if fdict.get('filesize') is not None:
if res:
res += ', '
res += format_bytes(fdict['filesize'])
elif fdict.get('filesize_approx') is not None:
if res:
res += ', '
res += '~' + format_bytes(fdict['filesize_approx'])
return res
def list_formats(self, info_dict):
formats = info_dict.get('formats', [info_dict])
table = [
[f['format_id'], f['ext'], self.format_resolution(f), self._format_note(f)]
for f in formats
if f.get('preference') is None or f['preference'] >= -1000]
if len(formats) > 1:
table[-1][-1] += (' ' if table[-1][-1] else '') + '(best)'
header_line = ['format code', 'extension', 'resolution', 'note']
self.to_screen(
'[info] Available formats for %s:\n%s' %
(info_dict['id'], render_table(header_line, table)))
def list_thumbnails(self, info_dict):
thumbnails = info_dict.get('thumbnails')
if not thumbnails:
self.to_screen('[info] No thumbnails present for %s' % info_dict['id'])
return
self.to_screen(
'[info] Thumbnails for %s:' % info_dict['id'])
self.to_screen(render_table(
['ID', 'width', 'height', 'URL'],
[[t['id'], t.get('width', 'unknown'), t.get('height', 'unknown'), t['url']] for t in thumbnails]))
def list_subtitles(self, video_id, subtitles, name='subtitles'):
if not subtitles:
self.to_screen('%s has no %s' % (video_id, name))
return
self.to_screen(
'Available %s for %s:' % (name, video_id))
self.to_screen(render_table(
['Language', 'formats'],
[[lang, ', '.join(f['ext'] for f in reversed(formats))]
for lang, formats in subtitles.items()]))
def urlopen(self, req):
""" Start an HTTP download """
if isinstance(req, compat_basestring):
req = sanitized_Request(req)
return self._opener.open(req, timeout=self._socket_timeout)
def print_debug_header(self):
if not self.params.get('verbose'):
return
if type('') is not compat_str:
# Python 2.6 on SLES11 SP1 (https://github.com/rg3/youtube-dl/issues/3326)
self.report_warning(
'Your Python is broken! Update to a newer and supported version')
stdout_encoding = getattr(
sys.stdout, 'encoding', 'missing (%s)' % type(sys.stdout).__name__)
encoding_str = (
'[debug] Encodings: locale %s, fs %s, out %s, pref %s\n' % (
locale.getpreferredencoding(),
sys.getfilesystemencoding(),
stdout_encoding,
self.get_encoding()))
write_string(encoding_str, encoding=None)
self._write_string('[debug] youtube-dl version ' + __version__ + '\n')
if _LAZY_LOADER:
self._write_string('[debug] Lazy loading extractors enabled' + '\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
self._write_string('[debug] Git HEAD: ' + out + '\n')
except Exception:
try:
sys.exc_clear()
except Exception:
pass
self._write_string('[debug] Python version %s - %s\n' % (
platform.python_version(), platform_name()))
exe_versions = FFmpegPostProcessor.get_versions(self)
exe_versions['rtmpdump'] = rtmpdump_version()
exe_str = ', '.join(
'%s %s' % (exe, v)
for exe, v in sorted(exe_versions.items())
if v
)
if not exe_str:
exe_str = 'none'
self._write_string('[debug] exe versions: %s\n' % exe_str)
proxy_map = {}
for handler in self._opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8')
if version_tuple(latest_version) > version_tuple(__version__):
self.report_warning(
'You are using an outdated version (newest version: %s)! '
'See https://yt-dl.org/update if you need help updating.' %
latest_version)
def _setup_opener(self):
timeout_val = self.params.get('socket_timeout')
self._socket_timeout = 600 if timeout_val is None else float(timeout_val)
opts_cookiefile = self.params.get('cookiefile')
opts_proxy = self.params.get('proxy')
if opts_cookiefile is None:
self.cookiejar = compat_cookiejar.CookieJar()
else:
opts_cookiefile = compat_expanduser(opts_cookiefile)
self.cookiejar = compat_cookiejar.MozillaCookieJar(
opts_cookiefile)
if os.access(opts_cookiefile, os.R_OK):
self.cookiejar.load()
cookie_processor = YoutubeDLCookieProcessor(self.cookiejar)
if opts_proxy is not None:
if opts_proxy == '':
proxies = {}
else:
proxies = {'http': opts_proxy, 'https': opts_proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = PerRequestProxyHandler(proxies)
debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = YoutubeDLHandler(self.params, debuglevel=debuglevel)
data_handler = compat_urllib_request_DataHandler()
# When passing our own FileHandler instance, build_opener won't add the
# default FileHandler and allows us to disable the file protocol, which
# can be used for malicious purposes (see
# https://github.com/rg3/youtube-dl/issues/8227)
file_handler = compat_urllib_request.FileHandler()
def file_open(*args, **kwargs):
raise compat_urllib_error.URLError('file:// scheme is explicitly disabled in youtube-dl for security reasons')
file_handler.file_open = file_open
opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, data_handler, file_handler)
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
self._opener = opener
def encode(self, s):
if isinstance(s, bytes):
return s # Already encoded
try:
return s.encode(self.get_encoding())
except UnicodeEncodeError as err:
err.reason = err.reason + '. Check your system encoding configuration or use the --encoding option.'
raise
def get_encoding(self):
encoding = self.params.get('encoding')
if encoding is None:
encoding = preferredencoding()
return encoding
def _write_thumbnails(self, info_dict, filename):
if self.params.get('writethumbnail', False):
thumbnails = info_dict.get('thumbnails')
if thumbnails:
thumbnails = [thumbnails[-1]]
elif self.params.get('write_all_thumbnails', False):
thumbnails = info_dict.get('thumbnails')
else:
return
if not thumbnails:
# No thumbnails present, so return immediately
return
for t in thumbnails:
thumb_ext = determine_ext(t['url'], 'jpg')
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
thumb_display_id = '%s ' % t['id'] if len(thumbnails) > 1 else ''
t['filename'] = thumb_filename = os.path.splitext(filename)[0] + suffix + '.' + thumb_ext
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(thumb_filename)):
self.to_screen('[%s] %s: Thumbnail %sis already present' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
else:
self.to_screen('[%s] %s: Downloading thumbnail %s...' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
try:
uf = self.urlopen(t['url'])
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
shutil.copyfileobj(uf, thumbf)
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_warning('Unable to download thumbnail "%s": %s' %
(t['url'], error_to_compat_str(err)))
| 46.14872 | 194 | 0.551653 |
from __future__ import absolute_import, unicode_literals
import collections
import contextlib
import copy
import datetime
import errno
import fileinput
import io
import itertools
import json
import locale
import operator
import os
import platform
import re
import shutil
import subprocess
import socket
import sys
import time
import tokenize
import traceback
import random
from .compat import (
compat_basestring,
compat_cookiejar,
compat_expanduser,
compat_get_terminal_size,
compat_http_client,
compat_kwargs,
compat_numeric_types,
compat_os_name,
compat_str,
compat_tokenize_tokenize,
compat_urllib_error,
compat_urllib_request,
compat_urllib_request_DataHandler,
)
from .utils import (
age_restricted,
args_to_str,
ContentTooShortError,
date_from_str,
DateRange,
DEFAULT_OUTTMPL,
determine_ext,
determine_protocol,
DownloadError,
encode_compat_str,
encodeFilename,
error_to_compat_str,
ExtractorError,
format_bytes,
formatSeconds,
GeoRestrictedError,
ISO3166Utils,
locked_file,
make_HTTPS_handler,
MaxDownloadsReached,
PagedList,
parse_filesize,
PerRequestProxyHandler,
platform_name,
PostProcessingError,
preferredencoding,
prepend_extension,
register_socks_protocols,
render_table,
replace_extension,
SameFileError,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
std_headers,
subtitles_filename,
UnavailableVideoError,
url_basename,
version_tuple,
write_json_file,
write_string,
YoutubeDLCookieProcessor,
YoutubeDLHandler,
)
from .cache import Cache
from .extractor import get_info_extractor, gen_extractor_classes, _LAZY_LOADER
from .downloader import get_suitable_downloader
from .downloader.rtmp import rtmpdump_version
from .postprocessor import (
FFmpegFixupM3u8PP,
FFmpegFixupM4aPP,
FFmpegFixupStretchedPP,
FFmpegMergerPP,
FFmpegPostProcessor,
get_postprocessor,
)
from .version import __version__
if compat_os_name == 'nt':
import ctypes
class YoutubeDL(object):
params = None
_ies = []
_pps = []
_download_retcode = None
_num_downloads = None
_screen_file = None
def __init__(self, params=None, auto_init=True):
if params is None:
params = {}
self._ies = []
self._ies_instances = {}
self._pps = []
self._progress_hooks = []
self._download_retcode = 0
self._num_downloads = 0
self._screen_file = [sys.stdout, sys.stderr][params.get('logtostderr', False)]
self._err_file = sys.stderr
self.params = {
'nocheckcertificate': False,
}
self.params.update(params)
self.cache = Cache(self)
def check_deprecated(param, option, suggestion):
if self.params.get(param) is not None:
self.report_warning(
'%s is deprecated. Use %s instead.' % (option, suggestion))
return True
return False
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
if self.params.get('geo_verification_proxy') is None:
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
check_deprecated('autonumber_size', '--autonumber-size', 'output template with %(autonumber)0Nd, where N in the number of digits')
check_deprecated('autonumber', '--auto-number', '-o "%(autonumber)s-%(title)s.%(ext)s"')
check_deprecated('usetitle', '--title', '-o "%(title)s-%(id)s.%(ext)s"')
if params.get('bidi_workaround', False):
try:
import pty
master, slave = pty.openpty()
width = compat_get_terminal_size().columns
if width is None:
width_args = []
else:
width_args = ['-w', str(width)]
sp_kwargs = dict(
stdin=subprocess.PIPE,
stdout=slave,
stderr=self._err_file)
try:
self._output_process = subprocess.Popen(
['bidiv'] + width_args, **sp_kwargs
)
except OSError:
self._output_process = subprocess.Popen(
['fribidi', '-c', 'UTF-8'] + width_args, **sp_kwargs)
self._output_channel = os.fdopen(master, 'rb')
except OSError as ose:
if ose.errno == errno.ENOENT:
self.report_warning('Could not find fribidi executable, ignoring --bidi-workaround . Make sure that fribidi is an executable file in one of the directories in your $PATH.')
else:
raise
if (sys.version_info >= (3,) and sys.platform != 'win32' and
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968'] and
not params.get('restrictfilenames', False)):
self.report_warning(
'Assuming --restrict-filenames since file system encoding '
'cannot encode all characters. '
'Set the LC_ALL environment variable to fix this.')
self.params['restrictfilenames'] = True
if isinstance(params.get('outtmpl'), bytes):
self.report_warning(
'Parameter outtmpl is bytes, but should be a unicode string. '
'Put from __future__ import unicode_literals at the top of your code file or consider switching to Python 3.x.')
self._setup_opener()
if auto_init:
self.print_debug_header()
self.add_default_info_extractors()
for pp_def_raw in self.params.get('postprocessors', []):
pp_class = get_postprocessor(pp_def_raw['key'])
pp_def = dict(pp_def_raw)
del pp_def['key']
pp = pp_class(self, **compat_kwargs(pp_def))
self.add_post_processor(pp)
for ph in self.params.get('progress_hooks', []):
self.add_progress_hook(ph)
register_socks_protocols()
def warn_if_short_id(self, argv):
idxs = [
i for i, a in enumerate(argv)
if re.match(r'^-[0-9A-Za-z_-]{10}$', a)]
if idxs:
correct_argv = (
['youtube-dl'] +
[a for i, a in enumerate(argv) if i not in idxs] +
['--'] + [argv[i] for i in idxs]
)
self.report_warning(
'Long argument string detected. '
'Use -- to separate parameters and URLs, like this:\n%s\n' %
args_to_str(correct_argv))
def add_info_extractor(self, ie):
self._ies.append(ie)
if not isinstance(ie, type):
self._ies_instances[ie.ie_key()] = ie
ie.set_downloader(self)
def get_info_extractor(self, ie_key):
ie = self._ies_instances.get(ie_key)
if ie is None:
ie = get_info_extractor(ie_key)()
self.add_info_extractor(ie)
return ie
def add_default_info_extractors(self):
for ie in gen_extractor_classes():
self.add_info_extractor(ie)
def add_post_processor(self, pp):
self._pps.append(pp)
pp.set_downloader(self)
def add_progress_hook(self, ph):
self._progress_hooks.append(ph)
def _bidi_workaround(self, message):
if not hasattr(self, '_output_channel'):
return message
assert hasattr(self, '_output_process')
assert isinstance(message, compat_str)
line_count = message.count('\n') + 1
self._output_process.stdin.write((message + '\n').encode('utf-8'))
self._output_process.stdin.flush()
res = ''.join(self._output_channel.readline().decode('utf-8')
for _ in range(line_count))
return res[:-len('\n')]
def to_screen(self, message, skip_eol=False):
return self.to_stdout(message, skip_eol, check_quiet=True)
def _write_string(self, s, out=None):
write_string(s, out=out, encoding=self.params.get('encoding'))
def to_stdout(self, message, skip_eol=False, check_quiet=False):
if self.params.get('logger'):
self.params['logger'].debug(message)
elif not check_quiet or not self.params.get('quiet', False):
message = self._bidi_workaround(message)
terminator = ['\n', ''][skip_eol]
output = message + terminator
self._write_string(output, self._screen_file)
def to_stderr(self, message):
assert isinstance(message, compat_str)
if self.params.get('logger'):
self.params['logger'].error(message)
else:
message = self._bidi_workaround(message)
output = message + '\n'
self._write_string(output, self._err_file)
def to_console_title(self, message):
if not self.params.get('consoletitle', False):
return
if compat_os_name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow():
ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message))
elif 'TERM' in os.environ:
self._write_string('\033]0;%s\007' % message, self._screen_file)
def save_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
self._write_string('\033[22;0t', self._screen_file)
def restore_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
self._write_string('\033[23;0t', self._screen_file)
def __enter__(self):
self.save_console_title()
return self
def __exit__(self, *args):
self.restore_console_title()
if self.params.get('cookiefile') is not None:
self.cookiejar.save()
def trouble(self, message=None, tb=None):
if message is not None:
self.to_stderr(message)
if self.params.get('verbose'):
if tb is None:
if sys.exc_info()[0]:
tb = ''
if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
tb += encode_compat_str(traceback.format_exc())
else:
tb_data = traceback.format_list(traceback.extract_stack())
tb = ''.join(tb_data)
self.to_stderr(tb)
if not self.params.get('ignoreerrors', False):
if sys.exc_info()[0] and hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
exc_info = sys.exc_info()[1].exc_info
else:
exc_info = sys.exc_info()
raise DownloadError(message, exc_info)
self._download_retcode = 1
def report_warning(self, message):
if self.params.get('logger') is not None:
self.params['logger'].warning(message)
else:
if self.params.get('no_warnings'):
return
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
warning_message = '%s %s' % (_msg_header, message)
self.to_stderr(warning_message)
def report_error(self, message, tb=None):
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;31mERROR:\033[0m'
else:
_msg_header = 'ERROR:'
error_message = '%s %s' % (_msg_header, message)
self.trouble(error_message, tb)
def report_file_already_downloaded(self, file_name):
try:
self.to_screen('[download] %s has already been downloaded' % file_name)
except UnicodeEncodeError:
self.to_screen('[download] The file has already been downloaded')
def prepare_filename(self, info_dict):
try:
template_dict = dict(info_dict)
template_dict['epoch'] = int(time.time())
autonumber_size = self.params.get('autonumber_size')
if autonumber_size is None:
autonumber_size = 5
template_dict['autonumber'] = self.params.get('autonumber_start', 1) - 1 + self._num_downloads
if template_dict.get('resolution') is None:
if template_dict.get('width') and template_dict.get('height'):
template_dict['resolution'] = '%dx%d' % (template_dict['width'], template_dict['height'])
elif template_dict.get('height'):
template_dict['resolution'] = '%sp' % template_dict['height']
elif template_dict.get('width'):
template_dict['resolution'] = '%dx?' % template_dict['width']
sanitize = lambda k, v: sanitize_filename(
compat_str(v),
restricted=self.params.get('restrictfilenames'),
is_id=(k == 'id'))
template_dict = dict((k, v if isinstance(v, compat_numeric_types) else sanitize(k, v))
for k, v in template_dict.items()
if v is not None and not isinstance(v, (list, tuple, dict)))
template_dict = collections.defaultdict(lambda: 'NA', template_dict)
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
field_size_compat_map = {
'playlist_index': len(str(template_dict['n_entries'])),
'autonumber': autonumber_size,
}
FIELD_SIZE_COMPAT_RE = r'(?<!%)%\((?P<field>autonumber|playlist_index)\)s'
mobj = re.search(FIELD_SIZE_COMPAT_RE, outtmpl)
if mobj:
outtmpl = re.sub(
FIELD_SIZE_COMPAT_RE,
r'%%(\1)0%dd' % field_size_compat_map[mobj.group('field')],
outtmpl)
NUMERIC_FIELDS = set((
'width', 'height', 'tbr', 'abr', 'asr', 'vbr', 'fps', 'filesize', 'filesize_approx',
'upload_year', 'upload_month', 'upload_day',
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
'average_rating', 'comment_count', 'age_limit',
'start_time', 'end_time',
'chapter_number', 'season_number', 'episode_number',
'track_number', 'disc_number', 'release_year',
'playlist_index',
))
for numeric_field in NUMERIC_FIELDS:
if numeric_field not in template_dict:
FORMAT_RE = r'''(?x)
(?<!%)
%
\({0}\) # mapping key
(?:[#0\-+ ]+)? # conversion flags (optional)
(?:\d+)? # minimum field width (optional)
(?:\.\d+)? # precision (optional)
[hlL]? # length modifier (optional)
[diouxXeEfFgGcrs%] # conversion type
'''
outtmpl = re.sub(
FORMAT_RE.format(numeric_field),
r'%({0})s'.format(numeric_field), outtmpl)
tmpl = compat_expanduser(outtmpl)
filename = tmpl % template_dict
if sys.version_info < (3, 0) and sys.platform == 'win32':
filename = encodeFilename(filename, True).decode(preferredencoding())
return sanitize_path(filename)
except ValueError as err:
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
return None
def _match_entry(self, info_dict, incomplete):
video_title = info_dict.get('title', info_dict.get('id', 'video'))
if 'title' in info_dict:
title = info_dict['title']
matchtitle = self.params.get('matchtitle', False)
if matchtitle:
if not re.search(matchtitle, title, re.IGNORECASE):
return '"' + title + '" title did not match pattern "' + matchtitle + '"'
rejecttitle = self.params.get('rejecttitle', False)
if rejecttitle:
if re.search(rejecttitle, title, re.IGNORECASE):
return '"' + title + '" title matched reject pattern "' + rejecttitle + '"'
date = info_dict.get('upload_date')
if date is not None:
dateRange = self.params.get('daterange', DateRange())
if date not in dateRange:
return '%s upload date is not in range %s' % (date_from_str(date).isoformat(), dateRange)
view_count = info_dict.get('view_count')
if view_count is not None:
min_views = self.params.get('min_views')
if min_views is not None and view_count < min_views:
return 'Skipping %s, because it has not reached minimum view count (%d/%d)' % (video_title, view_count, min_views)
max_views = self.params.get('max_views')
if max_views is not None and view_count > max_views:
return 'Skipping %s, because it has exceeded the maximum view count (%d/%d)' % (video_title, view_count, max_views)
if age_restricted(info_dict.get('age_limit'), self.params.get('age_limit')):
return 'Skipping "%s" because it is age restricted' % video_title
if self.in_download_archive(info_dict):
return '%s has already been recorded in archive' % video_title
if not incomplete:
match_filter = self.params.get('match_filter')
if match_filter is not None:
ret = match_filter(info_dict)
if ret is not None:
return ret
return None
@staticmethod
def add_extra_info(info_dict, extra_info):
for key, value in extra_info.items():
info_dict.setdefault(key, value)
def extract_info(self, url, download=True, ie_key=None, extra_info={},
process=True, force_generic_extractor=False):
if not ie_key and force_generic_extractor:
ie_key = 'Generic'
if ie_key:
ies = [self.get_info_extractor(ie_key)]
else:
ies = self._ies
for ie in ies:
if not ie.suitable(url):
continue
ie = self.get_info_extractor(ie.ie_key())
if not ie.working():
self.report_warning('The program functionality for this site has been marked as broken, '
'and will probably not work.')
try:
ie_result = ie.extract(url)
if ie_result is None: # Finished already (backwards compatibility; listformats and friends should be moved here)
break
if isinstance(ie_result, list):
# Backwards compatibility: old IE result format
ie_result = {
'_type': 'compat_list',
'entries': ie_result,
}
self.add_default_extra_info(ie_result, ie, url)
if process:
return self.process_ie_result(ie_result, download, extra_info)
else:
return ie_result
except GeoRestrictedError as e:
msg = e.msg
if e.countries:
msg += '\nThis video is available in %s.' % ', '.join(
map(ISO3166Utils.short2full, e.countries))
msg += '\nYou might want to use a VPN or a proxy server (with --proxy) to workaround.'
self.report_error(msg)
break
except ExtractorError as e: # An error we somewhat expected
self.report_error(compat_str(e), e.format_traceback())
break
except MaxDownloadsReached:
raise
except Exception as e:
if self.params.get('ignoreerrors', False):
self.report_error(error_to_compat_str(e), tb=encode_compat_str(traceback.format_exc()))
break
else:
raise
else:
self.report_error('no suitable InfoExtractor for URL %s' % url)
def add_default_extra_info(self, ie_result, ie, url):
self.add_extra_info(ie_result, {
'extractor': ie.IE_NAME,
'webpage_url': url,
'webpage_url_basename': url_basename(url),
'extractor_key': ie.ie_key(),
})
def process_ie_result(self, ie_result, download=True, extra_info={}):
result_type = ie_result.get('_type', 'video')
if result_type in ('url', 'url_transparent'):
ie_result['url'] = sanitize_url(ie_result['url'])
extract_flat = self.params.get('extract_flat', False)
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info) or
extract_flat is True):
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(ie_result))
return ie_result
if result_type == 'video':
self.add_extra_info(ie_result, extra_info)
return self.process_video_result(ie_result, download=download)
elif result_type == 'url':
# We have to add extra_info to the results because it may be
# contained in a playlist
return self.extract_info(ie_result['url'],
download,
ie_key=ie_result.get('ie_key'),
extra_info=extra_info)
elif result_type == 'url_transparent':
# Use the information from the embedding page
info = self.extract_info(
ie_result['url'], ie_key=ie_result.get('ie_key'),
extra_info=extra_info, download=False, process=False)
force_properties = dict(
(k, v) for k, v in ie_result.items() if v is not None)
for f in ('_type', 'url', 'ie_key'):
if f in force_properties:
del force_properties[f]
new_result = info.copy()
new_result.update(force_properties)
assert new_result.get('_type') != 'url_transparent'
return self.process_ie_result(
new_result, download=download, extra_info=extra_info)
elif result_type == 'playlist' or result_type == 'multi_video':
# We process each entry in the playlist
playlist = ie_result.get('title') or ie_result.get('id')
self.to_screen('[download] Downloading playlist: %s' % playlist)
playlist_results = []
playliststart = self.params.get('playliststart', 1) - 1
playlistend = self.params.get('playlistend')
# For backwards compatibility, interpret -1 as whole list
if playlistend == -1:
playlistend = None
playlistitems_str = self.params.get('playlist_items')
playlistitems = None
if playlistitems_str is not None:
def iter_playlistitems(format):
for string_segment in format.split(','):
if '-' in string_segment:
start, end = string_segment.split('-')
for item in range(int(start), int(end) + 1):
yield int(item)
else:
yield int(string_segment)
playlistitems = iter_playlistitems(playlistitems_str)
ie_entries = ie_result['entries']
if isinstance(ie_entries, list):
n_all_entries = len(ie_entries)
if playlistitems:
entries = [
ie_entries[i - 1] for i in playlistitems
if -n_all_entries <= i - 1 < n_all_entries]
else:
entries = ie_entries[playliststart:playlistend]
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Collected %d video ids (downloading %d of them)' %
(ie_result['extractor'], playlist, n_all_entries, n_entries))
elif isinstance(ie_entries, PagedList):
if playlistitems:
entries = []
for item in playlistitems:
entries.extend(ie_entries.getslice(
item - 1, item
))
else:
entries = ie_entries.getslice(
playliststart, playlistend)
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
else: # iterable
if playlistitems:
entry_list = list(ie_entries)
entries = [entry_list[i - 1] for i in playlistitems]
else:
entries = list(itertools.islice(
ie_entries, playliststart, playlistend))
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
if self.params.get('playlistreverse', False):
entries = entries[::-1]
if self.params.get('playlistrandom', False):
random.shuffle(entries)
x_forwarded_for = ie_result.get('__x_forwarded_for_ip')
for i, entry in enumerate(entries, 1):
self.to_screen('[download] Downloading video %s of %s' % (i, n_entries))
# This __x_forwarded_for_ip thing is a bit ugly but requires
# minimal changes
if x_forwarded_for:
entry['__x_forwarded_for_ip'] = x_forwarded_for
extra = {
'n_entries': n_entries,
'playlist': playlist,
'playlist_id': ie_result.get('id'),
'playlist_title': ie_result.get('title'),
'playlist_index': i + playliststart,
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
reason = self._match_entry(entry, incomplete=True)
if reason is not None:
self.to_screen('[download] ' + reason)
continue
entry_result = self.process_ie_result(entry,
download=download,
extra_info=extra)
playlist_results.append(entry_result)
ie_result['entries'] = playlist_results
self.to_screen('[download] Finished downloading playlist: %s' % playlist)
return ie_result
elif result_type == 'compat_list':
self.report_warning(
'Extractor %s returned a compat_list result. '
'It needs to be updated.' % ie_result.get('extractor'))
def _fixup(r):
self.add_extra_info(
r,
{
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
)
return r
ie_result['entries'] = [
self.process_ie_result(_fixup(r), download, extra_info)
for r in ie_result['entries']
]
return ie_result
else:
raise Exception('Invalid result type: %s' % result_type)
def _build_format_filter(self, filter_spec):
OPERATORS = {
'<': operator.lt,
'<=': operator.le,
'>': operator.gt,
'>=': operator.ge,
'=': operator.eq,
'!=': operator.ne,
}
operator_rex = re.compile(r'''(?x)\s*
(?P<key>width|height|tbr|abr|vbr|asr|filesize|fps)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
(?P<value>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)
$
''' % '|'.join(map(re.escape, OPERATORS.keys())))
m = operator_rex.search(filter_spec)
if m:
try:
comparison_value = int(m.group('value'))
except ValueError:
comparison_value = parse_filesize(m.group('value'))
if comparison_value is None:
comparison_value = parse_filesize(m.group('value') + 'B')
if comparison_value is None:
raise ValueError(
'Invalid value %r in format specification %r' % (
m.group('value'), filter_spec))
op = OPERATORS[m.group('op')]
if not m:
STR_OPERATORS = {
'=': operator.eq,
'!=': operator.ne,
'^=': lambda attr, value: attr.startswith(value),
'$=': lambda attr, value: attr.endswith(value),
'*=': lambda attr, value: value in attr,
}
str_operator_rex = re.compile(r'''(?x)
\s*(?P<key>ext|acodec|vcodec|container|protocol|format_id)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?
\s*(?P<value>[a-zA-Z0-9._-]+)
\s*$
''' % '|'.join(map(re.escape, STR_OPERATORS.keys())))
m = str_operator_rex.search(filter_spec)
if m:
comparison_value = m.group('value')
op = STR_OPERATORS[m.group('op')]
if not m:
raise ValueError('Invalid filter specification %r' % filter_spec)
def _filter(f):
actual_value = f.get(m.group('key'))
if actual_value is None:
return m.group('none_inclusive')
return op(actual_value, comparison_value)
return _filter
def build_format_selector(self, format_spec):
def syntax_error(note, start):
message = (
'Invalid format specification: '
'{0}\n\t{1}\n\t{2}^'.format(note, format_spec, ' ' * start[1]))
return SyntaxError(message)
PICKFIRST = 'PICKFIRST'
MERGE = 'MERGE'
SINGLE = 'SINGLE'
GROUP = 'GROUP'
FormatSelector = collections.namedtuple('FormatSelector', ['type', 'selector', 'filters'])
def _parse_filter(tokens):
filter_parts = []
for type, string, start, _, _ in tokens:
if type == tokenize.OP and string == ']':
return ''.join(filter_parts)
else:
filter_parts.append(string)
def _remove_unused_ops(tokens):
# Remove operators that we don't use and join them with the surrounding strings
ALLOWED_OPS = ('/', '+', ',', '(', ')')
last_string, last_start, last_end, last_line = None, None, None, None
for type, string, start, end, line in tokens:
if type == tokenize.OP and string == '[':
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
for type, string, start, end, line in tokens:
yield type, string, start, end, line
if type == tokenize.OP and string == ']':
break
elif type == tokenize.OP and string in ALLOWED_OPS:
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
elif type in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]:
if not last_string:
last_string = string
last_start = start
last_end = end
else:
last_string += string
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, inside_group=False):
selectors = []
current_selector = None
for type, string, start, _, _ in tokens:
if type == getattr(tokenize, 'ENCODING', None):
continue
elif type in [tokenize.NAME, tokenize.NUMBER]:
current_selector = FormatSelector(SINGLE, string, [])
elif type == tokenize.OP:
if string == ')':
if not inside_group:
tokens.restore_last_token()
break
elif inside_merge and string in ['/', ',']:
tokens.restore_last_token()
break
elif inside_choice and string == ',':
tokens.restore_last_token()
break
elif string == ',':
if not current_selector:
raise syntax_error('"," must follow a format selector', start)
selectors.append(current_selector)
current_selector = None
elif string == '/':
if not current_selector:
raise syntax_error('"/" must follow a format selector', start)
first_choice = current_selector
second_choice = _parse_format_selection(tokens, inside_choice=True)
current_selector = FormatSelector(PICKFIRST, (first_choice, second_choice), [])
elif string == '[':
if not current_selector:
current_selector = FormatSelector(SINGLE, 'best', [])
format_filter = _parse_filter(tokens)
current_selector.filters.append(format_filter)
elif string == '(':
if current_selector:
raise syntax_error('Unexpected "("', start)
group = _parse_format_selection(tokens, inside_group=True)
current_selector = FormatSelector(GROUP, group, [])
elif string == '+':
video_selector = current_selector
audio_selector = _parse_format_selection(tokens, inside_merge=True)
if not video_selector or not audio_selector:
raise syntax_error('"+" must be between two format selectors', start)
current_selector = FormatSelector(MERGE, (video_selector, audio_selector), [])
else:
raise syntax_error('Operator not recognized: "{0}"'.format(string), start)
elif type == tokenize.ENDMARKER:
break
if current_selector:
selectors.append(current_selector)
return selectors
def _build_selector_function(selector):
if isinstance(selector, list):
fs = [_build_selector_function(s) for s in selector]
def selector_function(ctx):
for f in fs:
for format in f(ctx):
yield format
return selector_function
elif selector.type == GROUP:
selector_function = _build_selector_function(selector.selector)
elif selector.type == PICKFIRST:
fs = [_build_selector_function(s) for s in selector.selector]
def selector_function(ctx):
for f in fs:
picked_formats = list(f(ctx))
if picked_formats:
return picked_formats
return []
elif selector.type == SINGLE:
format_spec = selector.selector
def selector_function(ctx):
formats = list(ctx['formats'])
if not formats:
return
if format_spec == 'all':
for f in formats:
yield f
elif format_spec in ['best', 'worst', None]:
format_idx = 0 if format_spec == 'worst' else -1
audiovideo_formats = [
f for f in formats
if f.get('vcodec') != 'none' and f.get('acodec') != 'none']
if audiovideo_formats:
yield audiovideo_formats[format_idx]
elif ctx['incomplete_formats']:
yield formats[format_idx]
elif format_spec == 'bestaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[-1]
elif format_spec == 'worstaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[0]
elif format_spec == 'bestvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[-1]
elif format_spec == 'worstvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[0]
else:
extensions = ['mp4', 'flv', 'webm', '3gp', 'm4a', 'mp3', 'ogg', 'aac', 'wav']
if format_spec in extensions:
filter_f = lambda f: f['ext'] == format_spec
else:
filter_f = lambda f: f['format_id'] == format_spec
matches = list(filter(filter_f, formats))
if matches:
yield matches[-1]
elif selector.type == MERGE:
def _merge(formats_info):
format_1, format_2 = [f['format_id'] for f in formats_info]
if formats_info[0].get('vcodec') == 'none':
self.report_error('The first format must '
'contain the video, try using '
'"-f %s+%s"' % (format_2, format_1))
return
if formats_info[0].get('acodec') == 'none' and formats_info[1].get('acodec') == 'none':
self.report_error(
'Both formats %s and %s are video-only, you must specify "-f video+audio"'
% (format_1, format_2))
return
output_ext = (
formats_info[0]['ext']
if self.params.get('merge_output_format') is None
else self.params['merge_output_format'])
return {
'requested_formats': formats_info,
'format': '%s+%s' % (formats_info[0].get('format'),
formats_info[1].get('format')),
'format_id': '%s+%s' % (formats_info[0].get('format_id'),
formats_info[1].get('format_id')),
'width': formats_info[0].get('width'),
'height': formats_info[0].get('height'),
'resolution': formats_info[0].get('resolution'),
'fps': formats_info[0].get('fps'),
'vcodec': formats_info[0].get('vcodec'),
'vbr': formats_info[0].get('vbr'),
'stretched_ratio': formats_info[0].get('stretched_ratio'),
'acodec': formats_info[1].get('acodec'),
'abr': formats_info[1].get('abr'),
'ext': output_ext,
}
video_selector, audio_selector = map(_build_selector_function, selector.selector)
def selector_function(ctx):
for pair in itertools.product(
video_selector(copy.deepcopy(ctx)), audio_selector(copy.deepcopy(ctx))):
yield _merge(pair)
filters = [self._build_format_filter(f) for f in selector.filters]
def final_selector(ctx):
ctx_copy = copy.deepcopy(ctx)
for _filter in filters:
ctx_copy['formats'] = list(filter(_filter, ctx_copy['formats']))
return selector_function(ctx_copy)
return final_selector
stream = io.BytesIO(format_spec.encode('utf-8'))
try:
tokens = list(_remove_unused_ops(compat_tokenize_tokenize(stream.readline)))
except tokenize.TokenError:
raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec)))
class TokenIterator(object):
def __init__(self, tokens):
self.tokens = tokens
self.counter = 0
def __iter__(self):
return self
def __next__(self):
if self.counter >= len(self.tokens):
raise StopIteration()
value = self.tokens[self.counter]
self.counter += 1
return value
next = __next__
def restore_last_token(self):
self.counter -= 1
parsed_selector = _parse_format_selection(iter(TokenIterator(tokens)))
return _build_selector_function(parsed_selector)
def _calc_headers(self, info_dict):
res = std_headers.copy()
add_headers = info_dict.get('http_headers')
if add_headers:
res.update(add_headers)
cookies = self._calc_cookies(info_dict)
if cookies:
res['Cookie'] = cookies
if 'X-Forwarded-For' not in res:
x_forwarded_for_ip = info_dict.get('__x_forwarded_for_ip')
if x_forwarded_for_ip:
res['X-Forwarded-For'] = x_forwarded_for_ip
return res
def _calc_cookies(self, info_dict):
pr = sanitized_Request(info_dict['url'])
self.cookiejar.add_cookie_header(pr)
return pr.get_header('Cookie')
def process_video_result(self, info_dict, download=True):
assert info_dict.get('_type', 'video') == 'video'
if 'id' not in info_dict:
raise ExtractorError('Missing "id" field in extractor result')
if 'title' not in info_dict:
raise ExtractorError('Missing "title" field in extractor result')
if not isinstance(info_dict['id'], compat_str):
self.report_warning('"id" field is not a string - forcing string conversion')
info_dict['id'] = compat_str(info_dict['id'])
if 'playlist' not in info_dict:
info_dict['playlist'] = None
info_dict['playlist_index'] = None
thumbnails = info_dict.get('thumbnails')
if thumbnails is None:
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnails'] = thumbnails = [{'url': thumbnail}]
if thumbnails:
thumbnails.sort(key=lambda t: (
t.get('preference') if t.get('preference') is not None else -1,
t.get('width') if t.get('width') is not None else -1,
t.get('height') if t.get('height') is not None else -1,
t.get('id') if t.get('id') is not None else '', t.get('url')))
for i, t in enumerate(thumbnails):
t['url'] = sanitize_url(t['url'])
if t.get('width') and t.get('height'):
t['resolution'] = '%dx%d' % (t['width'], t['height'])
if t.get('id') is None:
t['id'] = '%d' % i
if self.params.get('list_thumbnails'):
self.list_thumbnails(info_dict)
return
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnail'] = sanitize_url(thumbnail)
elif thumbnails:
info_dict['thumbnail'] = thumbnails[-1]['url']
if 'display_id' not in info_dict and 'id' in info_dict:
info_dict['display_id'] = info_dict['id']
if info_dict.get('upload_date') is None and info_dict.get('timestamp') is not None:
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
# see http://bugs.python.org/issue1646728)
try:
upload_date = datetime.datetime.utcfromtimestamp(info_dict['timestamp'])
info_dict['upload_date'] = upload_date.strftime('%Y%m%d')
except (ValueError, OverflowError, OSError):
pass
# Auto generate title fields corresponding to the *_number fields when missing
# in order to always have clean titles. This is very common for TV series.
for field in ('chapter', 'season', 'episode'):
if info_dict.get('%s_number' % field) is not None and not info_dict.get(field):
info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field])
subtitles = info_dict.get('subtitles')
if subtitles:
for _, subtitle in subtitles.items():
for subtitle_format in subtitle:
if subtitle_format.get('url'):
subtitle_format['url'] = sanitize_url(subtitle_format['url'])
if subtitle_format.get('ext') is None:
subtitle_format['ext'] = determine_ext(subtitle_format['url']).lower()
if self.params.get('listsubtitles', False):
if 'automatic_captions' in info_dict:
self.list_subtitles(info_dict['id'], info_dict.get('automatic_captions'), 'automatic captions')
self.list_subtitles(info_dict['id'], subtitles, 'subtitles')
return
info_dict['requested_subtitles'] = self.process_subtitles(
info_dict['id'], subtitles,
info_dict.get('automatic_captions'))
# We now pick which formats have to be downloaded
if info_dict.get('formats') is None:
# There's only one format available
formats = [info_dict]
else:
formats = info_dict['formats']
if not formats:
raise ExtractorError('No video formats found!')
formats_dict = {}
for i, format in enumerate(formats):
if 'url' not in format:
raise ExtractorError('Missing "url" key in result (index %d)' % i)
format['url'] = sanitize_url(format['url'])
if format.get('format_id') is None:
format['format_id'] = compat_str(i)
else:
format['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', format['format_id'])
format_id = format['format_id']
if format_id not in formats_dict:
formats_dict[format_id] = []
formats_dict[format_id].append(format)
for format_id, ambiguous_formats in formats_dict.items():
if len(ambiguous_formats) > 1:
for i, format in enumerate(ambiguous_formats):
format['format_id'] = '%s-%d' % (format_id, i)
for i, format in enumerate(formats):
if format.get('format') is None:
format['format'] = '{id} - {res}{note}'.format(
id=format['format_id'],
res=self.format_resolution(format),
note=' ({0})'.format(format['format_note']) if format.get('format_note') is not None else '',
)
if format.get('ext') is None:
format['ext'] = determine_ext(format['url']).lower()
if format.get('protocol') is None:
format['protocol'] = determine_protocol(format)
full_format_info = info_dict.copy()
full_format_info.update(format)
format['http_headers'] = self._calc_headers(full_format_info)
if '__x_forwarded_for_ip' in info_dict:
del info_dict['__x_forwarded_for_ip']
if formats[0] is not info_dict:
info_dict['formats'] = formats
if self.params.get('listformats'):
self.list_formats(info_dict)
return
req_format = self.params.get('format')
if req_format is None:
req_format_list = []
if (self.params.get('outtmpl', DEFAULT_OUTTMPL) != '-' and
not info_dict.get('is_live')):
merger = FFmpegMergerPP(self)
if merger.available and merger.can_merge():
req_format_list.append('bestvideo+bestaudio')
req_format_list.append('best')
req_format = '/'.join(req_format_list)
format_selector = self.build_format_selector(req_format)
# While in format selection we may need to have an access to the original
# format set in order to calculate some metrics or do some processing.
# For now we need to be able to guess whether original formats provided
# by extractor are incomplete or not (i.e. whether extractor provides only
# video-only or audio-only formats) for proper formats selection for
# extractors with such incomplete formats (see
# https://github.com/rg3/youtube-dl/pull/5556).
# Since formats may be filtered during format selection and may not match
# the original formats the results may be incorrect. Thus original formats
# or pre-calculated metrics should be passed to format selection routines
# as well.
# We will pass a context object containing all necessary additional data
# instead of just formats.
# This fixes incorrect format selection issue (see
# https://github.com/rg3/youtube-dl/issues/10083).
incomplete_formats = (
# All formats are video-only or
all(f.get('vcodec') != 'none' and f.get('acodec') == 'none' for f in formats) or
# all formats are audio-only
all(f.get('vcodec') == 'none' and f.get('acodec') != 'none' for f in formats))
ctx = {
'formats': formats,
'incomplete_formats': incomplete_formats,
}
formats_to_download = list(format_selector(ctx))
if not formats_to_download:
raise ExtractorError('requested format not available',
expected=True)
if download:
if len(formats_to_download) > 1:
self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download)))
for format in formats_to_download:
new_info = dict(info_dict)
new_info.update(format)
self.process_info(new_info)
# We update the info dict with the best quality format (backwards compatibility)
info_dict.update(formats_to_download[-1])
return info_dict
def process_subtitles(self, video_id, normal_subtitles, automatic_captions):
available_subs = {}
if normal_subtitles and self.params.get('writesubtitles'):
available_subs.update(normal_subtitles)
if automatic_captions and self.params.get('writeautomaticsub'):
for lang, cap_info in automatic_captions.items():
if lang not in available_subs:
available_subs[lang] = cap_info
if (not self.params.get('writesubtitles') and not
self.params.get('writeautomaticsub') or not
available_subs):
return None
if self.params.get('allsubtitles', False):
requested_langs = available_subs.keys()
else:
if self.params.get('subtitleslangs', False):
requested_langs = self.params.get('subtitleslangs')
elif 'en' in available_subs:
requested_langs = ['en']
else:
requested_langs = [list(available_subs.keys())[0]]
formats_query = self.params.get('subtitlesformat', 'best')
formats_preference = formats_query.split('/') if formats_query else []
subs = {}
for lang in requested_langs:
formats = available_subs.get(lang)
if formats is None:
self.report_warning('%s subtitles not available for %s' % (lang, video_id))
continue
for ext in formats_preference:
if ext == 'best':
f = formats[-1]
break
matches = list(filter(lambda f: f['ext'] == ext, formats))
if matches:
f = matches[-1]
break
else:
f = formats[-1]
self.report_warning(
'No subtitle format found matching "%s" for language %s, '
'using %s' % (formats_query, lang, f['ext']))
subs[lang] = f
return subs
def process_info(self, info_dict):
assert info_dict.get('_type', 'video') == 'video'
max_downloads = self.params.get('max_downloads')
if max_downloads is not None:
if self._num_downloads >= int(max_downloads):
raise MaxDownloadsReached()
info_dict['fulltitle'] = info_dict['title']
if len(info_dict['title']) > 200:
info_dict['title'] = info_dict['title'][:197] + '...'
if 'format' not in info_dict:
info_dict['format'] = info_dict['ext']
reason = self._match_entry(info_dict, incomplete=False)
if reason is not None:
self.to_screen('[download] ' + reason)
return
self._num_downloads += 1
info_dict['_filename'] = filename = self.prepare_filename(info_dict)
# Forced printings
if self.params.get('forcetitle', False):
self.to_stdout(info_dict['fulltitle'])
if self.params.get('forceid', False):
self.to_stdout(info_dict['id'])
if self.params.get('forceurl', False):
if info_dict.get('requested_formats') is not None:
for f in info_dict['requested_formats']:
self.to_stdout(f['url'] + f.get('play_path', ''))
else:
# For RTMP URLs, also include the playpath
self.to_stdout(info_dict['url'] + info_dict.get('play_path', ''))
if self.params.get('forcethumbnail', False) and info_dict.get('thumbnail') is not None:
self.to_stdout(info_dict['thumbnail'])
if self.params.get('forcedescription', False) and info_dict.get('description') is not None:
self.to_stdout(info_dict['description'])
if self.params.get('forcefilename', False) and filename is not None:
self.to_stdout(filename)
if self.params.get('forceduration', False) and info_dict.get('duration') is not None:
self.to_stdout(formatSeconds(info_dict['duration']))
if self.params.get('forceformat', False):
self.to_stdout(info_dict['format'])
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(info_dict))
# Do nothing else if in simulate mode
if self.params.get('simulate', False):
return
if filename is None:
return
try:
dn = os.path.dirname(sanitize_path(encodeFilename(filename)))
if dn and not os.path.exists(dn):
os.makedirs(dn)
except (OSError, IOError) as err:
self.report_error('unable to create directory ' + error_to_compat_str(err))
return
if self.params.get('writedescription', False):
descfn = replace_extension(filename, 'description', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(descfn)):
self.to_screen('[info] Video description is already present')
elif info_dict.get('description') is None:
self.report_warning('There\'s no description to write.')
else:
try:
self.to_screen('[info] Writing video description to: ' + descfn)
with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
descfile.write(info_dict['description'])
except (OSError, IOError):
self.report_error('Cannot write description file ' + descfn)
return
if self.params.get('writeannotations', False):
annofn = replace_extension(filename, 'annotations.xml', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(annofn)):
self.to_screen('[info] Video annotations are already present')
else:
try:
self.to_screen('[info] Writing video annotations to: ' + annofn)
with io.open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
annofile.write(info_dict['annotations'])
except (KeyError, TypeError):
self.report_warning('There are no annotations to write.')
except (OSError, IOError):
self.report_error('Cannot write annotations file: ' + annofn)
return
subtitles_are_requested = any([self.params.get('writesubtitles', False),
self.params.get('writeautomaticsub')])
if subtitles_are_requested and info_dict.get('requested_subtitles'):
subtitles = info_dict['requested_subtitles']
ie = self.get_info_extractor(info_dict['extractor_key'])
for sub_lang, sub_info in subtitles.items():
sub_format = sub_info['ext']
if sub_info.get('data') is not None:
sub_data = sub_info['data']
else:
try:
sub_data = ie._download_webpage(
sub_info['url'], info_dict['id'], note=False)
except ExtractorError as err:
self.report_warning('Unable to download subtitle for "%s": %s' %
(sub_lang, error_to_compat_str(err.cause)))
continue
try:
sub_filename = subtitles_filename(filename, sub_lang, sub_format)
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(sub_filename)):
self.to_screen('[info] Video subtitle %s.%s is already_present' % (sub_lang, sub_format))
else:
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
subfile.write(sub_data)
except (OSError, IOError):
self.report_error('Cannot write subtitles file ' + sub_filename)
return
if self.params.get('writeinfojson', False):
infofn = replace_extension(filename, 'info.json', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(infofn)):
self.to_screen('[info] Video description metadata is already present')
else:
self.to_screen('[info] Writing video description metadata as JSON to: ' + infofn)
try:
write_json_file(self.filter_requested_info(info_dict), infofn)
except (OSError, IOError):
self.report_error('Cannot write metadata to JSON file ' + infofn)
return
self._write_thumbnails(info_dict, filename)
if not self.params.get('skip_download', False):
try:
def dl(name, info):
fd = get_suitable_downloader(info, self.params)(self, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
if self.params.get('verbose'):
self.to_stdout('[debug] Invoking downloader on %r' % info.get('url'))
return fd.download(name, info)
if info_dict.get('requested_formats') is not None:
downloaded = []
success = True
merger = FFmpegMergerPP(self)
if not merger.available:
postprocessors = []
self.report_warning('You have requested multiple '
'formats but ffmpeg or avconv are not installed.'
' The formats won\'t be merged.')
else:
postprocessors = [merger]
def compatible_formats(formats):
video, audio = formats
# Check extension
video_ext, audio_ext = audio.get('ext'), video.get('ext')
if video_ext and audio_ext:
COMPATIBLE_EXTS = (
('mp3', 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'ismv', 'isma'),
('webm')
)
for exts in COMPATIBLE_EXTS:
if video_ext in exts and audio_ext in exts:
return True
# TODO: Check acodec/vcodec
return False
filename_real_ext = os.path.splitext(filename)[1][1:]
filename_wo_ext = (
os.path.splitext(filename)[0]
if filename_real_ext == info_dict['ext']
else filename)
requested_formats = info_dict['requested_formats']
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
info_dict['ext'] = 'mkv'
self.report_warning(
'Requested formats are incompatible for merge and will be merged into mkv.')
# Ensure filename always has a correct extension for successful merge
filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
if os.path.exists(encodeFilename(filename)):
self.to_screen(
'[download] %s has already been downloaded and '
'merged' % filename)
else:
for f in requested_formats:
new_info = dict(info_dict)
new_info.update(f)
fname = self.prepare_filename(new_info)
fname = prepend_extension(fname, 'f%s' % f['format_id'], new_info['ext'])
downloaded.append(fname)
partial_success = dl(fname, new_info)
success = success and partial_success
info_dict['__postprocessors'] = postprocessors
info_dict['__files_to_merge'] = downloaded
else:
# Just a single file
success = dl(filename, info_dict)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
return
except (OSError, IOError) as err:
raise UnavailableVideoError(err)
except (ContentTooShortError, ) as err:
self.report_error('content too short (expected %s bytes and served %s)' % (err.expected, err.downloaded))
return
if success and filename != '-':
# Fixup content
fixup_policy = self.params.get('fixup')
if fixup_policy is None:
fixup_policy = 'detect_or_warn'
INSTALL_FFMPEG_MESSAGE = 'Install ffmpeg or avconv to fix this automatically.'
stretched_ratio = info_dict.get('stretched_ratio')
if stretched_ratio is not None and stretched_ratio != 1:
if fixup_policy == 'warn':
self.report_warning('%s: Non-uniform pixel ratio (%s)' % (
info_dict['id'], stretched_ratio))
elif fixup_policy == 'detect_or_warn':
stretched_pp = FFmpegFixupStretchedPP(self)
if stretched_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(stretched_pp)
else:
self.report_warning(
'%s: Non-uniform pixel ratio (%s). %s'
% (info_dict['id'], stretched_ratio, INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('requested_formats') is None and
info_dict.get('container') == 'm4a_dash'):
if fixup_policy == 'warn':
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container.'
% info_dict['id'])
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM4aPP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('protocol') == 'm3u8_native' or
info_dict.get('protocol') == 'm3u8' and
self.params.get('hls_prefer_native')):
if fixup_policy == 'warn':
self.report_warning('%s: malformated aac bitstream.' % (
info_dict['id']))
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM3u8PP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: malformated aac bitstream. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
try:
self.post_process(filename, info_dict)
except (PostProcessingError) as err:
self.report_error('postprocessing: %s' % str(err))
return
self.record_download_archive(info_dict)
def download(self, url_list):
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
if (len(url_list) > 1 and
'%' not in outtmpl and
self.params.get('max_downloads') != 1):
raise SameFileError(outtmpl)
for url in url_list:
try:
# It also downloads the videos
res = self.extract_info(
url, force_generic_extractor=self.params.get('force_generic_extractor', False))
except UnavailableVideoError:
self.report_error('unable to download video')
except MaxDownloadsReached:
self.to_screen('[info] Maximum number of downloaded files reached.')
raise
else:
if self.params.get('dump_single_json', False):
self.to_stdout(json.dumps(res))
return self._download_retcode
def download_with_info_file(self, info_filename):
with contextlib.closing(fileinput.FileInput(
[info_filename], mode='r',
openhook=fileinput.hook_encoded('utf-8'))) as f:
# FileInput doesn't have a read method, we can't call json.load
info = self.filter_requested_info(json.loads('\n'.join(f)))
try:
self.process_ie_result(info, download=True)
except DownloadError:
webpage_url = info.get('webpage_url')
if webpage_url is not None:
self.report_warning('The info failed to download, trying with "%s"' % webpage_url)
return self.download([webpage_url])
else:
raise
return self._download_retcode
@staticmethod
def filter_requested_info(info_dict):
return dict(
(k, v) for k, v in info_dict.items()
if k not in ['requested_formats', 'requested_subtitles'])
def post_process(self, filename, ie_info):
info = dict(ie_info)
info['filepath'] = filename
pps_chain = []
if ie_info.get('__postprocessors') is not None:
pps_chain.extend(ie_info['__postprocessors'])
pps_chain.extend(self._pps)
for pp in pps_chain:
files_to_delete = []
try:
files_to_delete, info = pp.run(info)
except PostProcessingError as e:
self.report_error(e.msg)
if files_to_delete and not self.params.get('keepvideo', False):
for old_filename in files_to_delete:
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
try:
os.remove(encodeFilename(old_filename))
except (IOError, OSError):
self.report_warning('Unable to remove downloaded original file')
def _make_archive_id(self, info_dict):
# Future-proof against any change in case
# and backwards compatibility with prior versions
extractor = info_dict.get('extractor_key')
if extractor is None:
if 'id' in info_dict:
extractor = info_dict.get('ie_key') # key in a playlist
if extractor is None:
return None # Incomplete video information
return extractor.lower() + ' ' + info_dict['id']
def in_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return False
vid_id = self._make_archive_id(info_dict)
if vid_id is None:
return False # Incomplete video information
try:
with locked_file(fn, 'r', encoding='utf-8') as archive_file:
for line in archive_file:
if line.strip() == vid_id:
return True
except IOError as ioe:
if ioe.errno != errno.ENOENT:
raise
return False
def record_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return
vid_id = self._make_archive_id(info_dict)
assert vid_id
with locked_file(fn, 'a', encoding='utf-8') as archive_file:
archive_file.write(vid_id + '\n')
@staticmethod
def format_resolution(format, default='unknown'):
if format.get('vcodec') == 'none':
return 'audio only'
if format.get('resolution') is not None:
return format['resolution']
if format.get('height') is not None:
if format.get('width') is not None:
res = '%sx%s' % (format['width'], format['height'])
else:
res = '%sp' % format['height']
elif format.get('width') is not None:
res = '%dx?' % format['width']
else:
res = default
return res
def _format_note(self, fdict):
res = ''
if fdict.get('ext') in ['f4f', 'f4m']:
res += '(unsupported) '
if fdict.get('language'):
if res:
res += ' '
res += '[%s] ' % fdict['language']
if fdict.get('format_note') is not None:
res += fdict['format_note'] + ' '
if fdict.get('tbr') is not None:
res += '%4dk ' % fdict['tbr']
if fdict.get('container') is not None:
if res:
res += ', '
res += '%s container' % fdict['container']
if (fdict.get('vcodec') is not None and
fdict.get('vcodec') != 'none'):
if res:
res += ', '
res += fdict['vcodec']
if fdict.get('vbr') is not None:
res += '@'
elif fdict.get('vbr') is not None and fdict.get('abr') is not None:
res += 'video@'
if fdict.get('vbr') is not None:
res += '%4dk' % fdict['vbr']
if fdict.get('fps') is not None:
if res:
res += ', '
res += '%sfps' % fdict['fps']
if fdict.get('acodec') is not None:
if res:
res += ', '
if fdict['acodec'] == 'none':
res += 'video only'
else:
res += '%-5s' % fdict['acodec']
elif fdict.get('abr') is not None:
if res:
res += ', '
res += 'audio'
if fdict.get('abr') is not None:
res += '@%3dk' % fdict['abr']
if fdict.get('asr') is not None:
res += ' (%5dHz)' % fdict['asr']
if fdict.get('filesize') is not None:
if res:
res += ', '
res += format_bytes(fdict['filesize'])
elif fdict.get('filesize_approx') is not None:
if res:
res += ', '
res += '~' + format_bytes(fdict['filesize_approx'])
return res
def list_formats(self, info_dict):
formats = info_dict.get('formats', [info_dict])
table = [
[f['format_id'], f['ext'], self.format_resolution(f), self._format_note(f)]
for f in formats
if f.get('preference') is None or f['preference'] >= -1000]
if len(formats) > 1:
table[-1][-1] += (' ' if table[-1][-1] else '') + '(best)'
header_line = ['format code', 'extension', 'resolution', 'note']
self.to_screen(
'[info] Available formats for %s:\n%s' %
(info_dict['id'], render_table(header_line, table)))
def list_thumbnails(self, info_dict):
thumbnails = info_dict.get('thumbnails')
if not thumbnails:
self.to_screen('[info] No thumbnails present for %s' % info_dict['id'])
return
self.to_screen(
'[info] Thumbnails for %s:' % info_dict['id'])
self.to_screen(render_table(
['ID', 'width', 'height', 'URL'],
[[t['id'], t.get('width', 'unknown'), t.get('height', 'unknown'), t['url']] for t in thumbnails]))
def list_subtitles(self, video_id, subtitles, name='subtitles'):
if not subtitles:
self.to_screen('%s has no %s' % (video_id, name))
return
self.to_screen(
'Available %s for %s:' % (name, video_id))
self.to_screen(render_table(
['Language', 'formats'],
[[lang, ', '.join(f['ext'] for f in reversed(formats))]
for lang, formats in subtitles.items()]))
def urlopen(self, req):
if isinstance(req, compat_basestring):
req = sanitized_Request(req)
return self._opener.open(req, timeout=self._socket_timeout)
def print_debug_header(self):
if not self.params.get('verbose'):
return
if type('') is not compat_str:
# Python 2.6 on SLES11 SP1 (https://github.com/rg3/youtube-dl/issues/3326)
self.report_warning(
'Your Python is broken! Update to a newer and supported version')
stdout_encoding = getattr(
sys.stdout, 'encoding', 'missing (%s)' % type(sys.stdout).__name__)
encoding_str = (
'[debug] Encodings: locale %s, fs %s, out %s, pref %s\n' % (
locale.getpreferredencoding(),
sys.getfilesystemencoding(),
stdout_encoding,
self.get_encoding()))
write_string(encoding_str, encoding=None)
self._write_string('[debug] youtube-dl version ' + __version__ + '\n')
if _LAZY_LOADER:
self._write_string('[debug] Lazy loading extractors enabled' + '\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
self._write_string('[debug] Git HEAD: ' + out + '\n')
except Exception:
try:
sys.exc_clear()
except Exception:
pass
self._write_string('[debug] Python version %s - %s\n' % (
platform.python_version(), platform_name()))
exe_versions = FFmpegPostProcessor.get_versions(self)
exe_versions['rtmpdump'] = rtmpdump_version()
exe_str = ', '.join(
'%s %s' % (exe, v)
for exe, v in sorted(exe_versions.items())
if v
)
if not exe_str:
exe_str = 'none'
self._write_string('[debug] exe versions: %s\n' % exe_str)
proxy_map = {}
for handler in self._opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8')
if version_tuple(latest_version) > version_tuple(__version__):
self.report_warning(
'You are using an outdated version (newest version: %s)! '
'See https://yt-dl.org/update if you need help updating.' %
latest_version)
def _setup_opener(self):
timeout_val = self.params.get('socket_timeout')
self._socket_timeout = 600 if timeout_val is None else float(timeout_val)
opts_cookiefile = self.params.get('cookiefile')
opts_proxy = self.params.get('proxy')
if opts_cookiefile is None:
self.cookiejar = compat_cookiejar.CookieJar()
else:
opts_cookiefile = compat_expanduser(opts_cookiefile)
self.cookiejar = compat_cookiejar.MozillaCookieJar(
opts_cookiefile)
if os.access(opts_cookiefile, os.R_OK):
self.cookiejar.load()
cookie_processor = YoutubeDLCookieProcessor(self.cookiejar)
if opts_proxy is not None:
if opts_proxy == '':
proxies = {}
else:
proxies = {'http': opts_proxy, 'https': opts_proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = PerRequestProxyHandler(proxies)
debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = YoutubeDLHandler(self.params, debuglevel=debuglevel)
data_handler = compat_urllib_request_DataHandler()
# When passing our own FileHandler instance, build_opener won't add the
file_handler = compat_urllib_request.FileHandler()
def file_open(*args, **kwargs):
raise compat_urllib_error.URLError('file:// scheme is explicitly disabled in youtube-dl for security reasons')
file_handler.file_open = file_open
opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, data_handler, file_handler)
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
self._opener = opener
def encode(self, s):
if isinstance(s, bytes):
return s # Already encoded
try:
return s.encode(self.get_encoding())
except UnicodeEncodeError as err:
err.reason = err.reason + '. Check your system encoding configuration or use the --encoding option.'
raise
def get_encoding(self):
encoding = self.params.get('encoding')
if encoding is None:
encoding = preferredencoding()
return encoding
def _write_thumbnails(self, info_dict, filename):
if self.params.get('writethumbnail', False):
thumbnails = info_dict.get('thumbnails')
if thumbnails:
thumbnails = [thumbnails[-1]]
elif self.params.get('write_all_thumbnails', False):
thumbnails = info_dict.get('thumbnails')
else:
return
if not thumbnails:
# No thumbnails present, so return immediately
return
for t in thumbnails:
thumb_ext = determine_ext(t['url'], 'jpg')
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
thumb_display_id = '%s ' % t['id'] if len(thumbnails) > 1 else ''
t['filename'] = thumb_filename = os.path.splitext(filename)[0] + suffix + '.' + thumb_ext
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(thumb_filename)):
self.to_screen('[%s] %s: Thumbnail %sis already present' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
else:
self.to_screen('[%s] %s: Downloading thumbnail %s...' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
try:
uf = self.urlopen(t['url'])
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
shutil.copyfileobj(uf, thumbf)
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_warning('Unable to download thumbnail "%s": %s' %
(t['url'], error_to_compat_str(err)))
| true | true |
f725461411704fbc79dc7ae2a9d1d39784ab8ad4 | 678 | py | Python | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11 | 2015-10-04T02:17:46.000Z | 2018-02-07T18:23:00.000Z | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22 | 2017-08-01T22:45:10.000Z | 2022-03-10T07:46:31.000Z | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4 | 2016-06-10T17:57:39.000Z | 2018-09-11T04:59:38.000Z | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Shapelib(CMakePackage):
"""The Shapefile C Library provides the ability to write simple C programs
for reading, writing and updating (to a limited extent) ESRI Shapefiles,
and the associated attribute file (.dbf).
"""
homepage = "http://shapelib.maptools.org/"
url = "https://github.com/OSGeo/shapelib/archive/v1.5.0.tar.gz"
version('1.5.0', sha256='48de3a6a8691b0b111b909c0b908af4627635c75322b3a501c0c0885f3558cad')
| 35.684211 | 95 | 0.738938 |
from spack import *
class Shapelib(CMakePackage):
homepage = "http://shapelib.maptools.org/"
url = "https://github.com/OSGeo/shapelib/archive/v1.5.0.tar.gz"
version('1.5.0', sha256='48de3a6a8691b0b111b909c0b908af4627635c75322b3a501c0c0885f3558cad')
| true | true |
f725468d7e8fa6ad83dd70dda266a65d8900285a | 4,522 | py | Python | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | Pyverilog/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 232 | 2015-09-01T16:07:48.000Z | 2022-03-28T14:53:28.000Z | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | Pyverilog/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 34 | 2015-08-21T09:13:03.000Z | 2022-03-21T23:52:44.000Z | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | shtaxxx/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 46 | 2015-09-24T14:39:57.000Z | 2022-02-23T21:59:56.000Z | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))
from veriloggen import *
import veriloggen.thread as vthread
import veriloggen.types.axi as axi
def mkLed():
m = Module('blinkled')
clk = m.Input('CLK')
rst = m.Input('RST')
datawidth = 32
addrwidth = 10
myaxi = vthread.AXIM(m, 'myaxi', clk, rst, datawidth)
ram_a = vthread.RAM(m, 'ram_a', clk, rst, datawidth, addrwidth)
ram_b = vthread.RAM(m, 'ram_b', clk, rst, datawidth, addrwidth)
strm = vthread.Stream(m, 'mystream', clk, rst)
a = strm.source('a')
size = strm.parameter('size')
sum, sum_valid = strm.ReduceAddValid(a, size)
strm.sink(sum, 'sum', when=sum_valid, when_name='sum_valid')
def comp_stream(size, offset):
strm.set_source('a', ram_a, offset, size)
strm.set_parameter('size', size)
strm.set_sink('sum', ram_b, offset, 1)
strm.run()
strm.set_source('a', ram_a, offset + size, size + size)
strm.set_parameter('size', size + size)
strm.set_sink('sum', ram_b, offset + 1, 1)
strm.source_join()
strm.run()
strm.set_source('a', ram_a, offset + size + size + size, size + size + size)
strm.set_parameter('size', size + size + size)
strm.set_sink('sum', ram_b, offset + 2, 1)
strm.source_join()
strm.run()
strm.source_join()
strm.join()
def comp_sequential(size, offset):
sum = 0
for i in range(size):
a = ram_a.read(i + offset)
sum += a
ram_b.write(offset, sum)
sum = 0
for i in range(size + size):
a = ram_a.read(i + offset + size)
sum += a
ram_b.write(offset + 1, sum)
sum = 0
for i in range(size + size + size):
a = ram_a.read(i + offset + size + size + size)
sum += a
ram_b.write(offset + 2, sum)
def check(size, offset_stream, offset_seq):
all_ok = True
for i in range(size):
st = ram_b.read(i + offset_stream)
sq = ram_b.read(i + offset_seq)
if vthread.verilog.NotEql(st, sq):
all_ok = False
if all_ok:
print('# verify: PASSED')
else:
print('# verify: FAILED')
def comp(size):
# stream
offset = 0
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_stream(size, offset)
myaxi.dma_write(ram_b, offset, 1024, 3)
# sequential
offset = size
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_sequential(size, offset)
myaxi.dma_write(ram_b, offset, 1024 * 2, 3)
# verification
myaxi.dma_read(ram_b, 0, 1024, 3)
myaxi.dma_read(ram_b, offset, 1024 * 2, 3)
check(3, 0, offset)
vthread.finish()
th = vthread.Thread(m, 'th_comp', clk, rst, comp)
fsm = th.start(32)
return m
def mkTest(memimg_name=None):
m = Module('test')
# target instance
led = mkLed()
# copy paras and ports
params = m.copy_params(led)
ports = m.copy_sim_ports(led)
clk = ports['CLK']
rst = ports['RST']
memory = axi.AxiMemoryModel(m, 'memory', clk, rst, memimg_name=memimg_name)
memory.connect(ports, 'myaxi')
uut = m.Instance(led, 'uut',
params=m.connect_params(led),
ports=m.connect_ports(led))
# simulation.setup_waveform(m, uut)
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, rst, m.make_reset(), period=100)
init.add(
Delay(1000000),
Systask('finish'),
)
return m
def run(filename='tmp.v', simtype='iverilog', outputfile=None):
if outputfile is None:
outputfile = os.path.splitext(os.path.basename(__file__))[0] + '.out'
memimg_name = 'memimg_' + outputfile
test = mkTest(memimg_name=memimg_name)
if filename is not None:
test.to_verilog(filename)
sim = simulation.Simulator(test, sim=simtype)
rslt = sim.run(outputfile=outputfile)
lines = rslt.splitlines()
if simtype == 'verilator' and lines[-1].startswith('-'):
rslt = '\n'.join(lines[:-1])
return rslt
if __name__ == '__main__':
rslt = run(filename='tmp.v')
print(rslt)
| 27.406061 | 84 | 0.587351 | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))
from veriloggen import *
import veriloggen.thread as vthread
import veriloggen.types.axi as axi
def mkLed():
m = Module('blinkled')
clk = m.Input('CLK')
rst = m.Input('RST')
datawidth = 32
addrwidth = 10
myaxi = vthread.AXIM(m, 'myaxi', clk, rst, datawidth)
ram_a = vthread.RAM(m, 'ram_a', clk, rst, datawidth, addrwidth)
ram_b = vthread.RAM(m, 'ram_b', clk, rst, datawidth, addrwidth)
strm = vthread.Stream(m, 'mystream', clk, rst)
a = strm.source('a')
size = strm.parameter('size')
sum, sum_valid = strm.ReduceAddValid(a, size)
strm.sink(sum, 'sum', when=sum_valid, when_name='sum_valid')
def comp_stream(size, offset):
strm.set_source('a', ram_a, offset, size)
strm.set_parameter('size', size)
strm.set_sink('sum', ram_b, offset, 1)
strm.run()
strm.set_source('a', ram_a, offset + size, size + size)
strm.set_parameter('size', size + size)
strm.set_sink('sum', ram_b, offset + 1, 1)
strm.source_join()
strm.run()
strm.set_source('a', ram_a, offset + size + size + size, size + size + size)
strm.set_parameter('size', size + size + size)
strm.set_sink('sum', ram_b, offset + 2, 1)
strm.source_join()
strm.run()
strm.source_join()
strm.join()
def comp_sequential(size, offset):
sum = 0
for i in range(size):
a = ram_a.read(i + offset)
sum += a
ram_b.write(offset, sum)
sum = 0
for i in range(size + size):
a = ram_a.read(i + offset + size)
sum += a
ram_b.write(offset + 1, sum)
sum = 0
for i in range(size + size + size):
a = ram_a.read(i + offset + size + size + size)
sum += a
ram_b.write(offset + 2, sum)
def check(size, offset_stream, offset_seq):
all_ok = True
for i in range(size):
st = ram_b.read(i + offset_stream)
sq = ram_b.read(i + offset_seq)
if vthread.verilog.NotEql(st, sq):
all_ok = False
if all_ok:
print('# verify: PASSED')
else:
print('# verify: FAILED')
def comp(size):
offset = 0
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_stream(size, offset)
myaxi.dma_write(ram_b, offset, 1024, 3)
offset = size
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_sequential(size, offset)
myaxi.dma_write(ram_b, offset, 1024 * 2, 3)
myaxi.dma_read(ram_b, 0, 1024, 3)
myaxi.dma_read(ram_b, offset, 1024 * 2, 3)
check(3, 0, offset)
vthread.finish()
th = vthread.Thread(m, 'th_comp', clk, rst, comp)
fsm = th.start(32)
return m
def mkTest(memimg_name=None):
m = Module('test')
led = mkLed()
params = m.copy_params(led)
ports = m.copy_sim_ports(led)
clk = ports['CLK']
rst = ports['RST']
memory = axi.AxiMemoryModel(m, 'memory', clk, rst, memimg_name=memimg_name)
memory.connect(ports, 'myaxi')
uut = m.Instance(led, 'uut',
params=m.connect_params(led),
ports=m.connect_ports(led))
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, rst, m.make_reset(), period=100)
init.add(
Delay(1000000),
Systask('finish'),
)
return m
def run(filename='tmp.v', simtype='iverilog', outputfile=None):
if outputfile is None:
outputfile = os.path.splitext(os.path.basename(__file__))[0] + '.out'
memimg_name = 'memimg_' + outputfile
test = mkTest(memimg_name=memimg_name)
if filename is not None:
test.to_verilog(filename)
sim = simulation.Simulator(test, sim=simtype)
rslt = sim.run(outputfile=outputfile)
lines = rslt.splitlines()
if simtype == 'verilator' and lines[-1].startswith('-'):
rslt = '\n'.join(lines[:-1])
return rslt
if __name__ == '__main__':
rslt = run(filename='tmp.v')
print(rslt)
| true | true |
f72546b650f48e4a4338cf5ec715dc344a3257bd | 14,857 | py | Python | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | """aiohttp based Socket Mode client
* https://api.slack.com/apis/connections/socket
* https://slack.dev/python-slack-sdk/socket-mode/
* https://pypi.org/project/aiohttp/
"""
import asyncio
import logging
import time
from asyncio import Future, Lock
from asyncio import Queue
from logging import Logger
from typing import Union, Optional, List, Callable, Awaitable
import aiohttp
from aiohttp import ClientWebSocketResponse, WSMessage, WSMsgType, ClientConnectionError
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from slack_sdk.socket_mode.async_client import AsyncBaseSocketModeClient
from slack_sdk.socket_mode.async_listeners import (
AsyncWebSocketMessageListener,
AsyncSocketModeRequestListener,
)
from slack_sdk.socket_mode.request import SocketModeRequest
from slack_sdk.web.async_client import AsyncWebClient
class SocketModeClient(AsyncBaseSocketModeClient):
logger: Logger
web_client: AsyncWebClient
app_token: str
wss_uri: Optional[str]
auto_reconnect_enabled: bool
message_queue: Queue
message_listeners: List[
Union[
AsyncWebSocketMessageListener,
Callable[
["AsyncBaseSocketModeClient", dict, Optional[str]], Awaitable[None]
],
]
]
socket_mode_request_listeners: List[
Union[
AsyncSocketModeRequestListener,
Callable[["AsyncBaseSocketModeClient", SocketModeRequest], Awaitable[None]],
]
]
message_receiver: Optional[Future]
message_processor: Future
proxy: Optional[str]
ping_interval: float
trace_enabled: bool
last_ping_pong_time: Optional[float]
current_session: Optional[ClientWebSocketResponse]
current_session_monitor: Optional[Future]
auto_reconnect_enabled: bool
default_auto_reconnect_enabled: bool
closed: bool
stale: bool
connect_operation_lock: Lock
on_message_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_error_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_close_listeners: List[Callable[[WSMessage], Awaitable[None]]]
def __init__(
self,
app_token: str,
logger: Optional[Logger] = None,
web_client: Optional[AsyncWebClient] = None,
proxy: Optional[str] = None,
auto_reconnect_enabled: bool = True,
ping_interval: float = 5,
trace_enabled: bool = False,
on_message_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_error_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_close_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
):
"""Socket Mode client
Args:
app_token: App-level token
logger: Custom logger
web_client: Web API client
auto_reconnect_enabled: True if automatic reconnection is enabled (default: True)
ping_interval: interval for ping-pong with Slack servers (seconds)
trace_enabled: True if more verbose logs to see what's happening under the hood
proxy: the HTTP proxy URL
on_message_listeners: listener functions for on_message
on_error_listeners: listener functions for on_error
on_close_listeners: listener functions for on_close
"""
self.app_token = app_token
self.logger = logger or logging.getLogger(__name__)
self.web_client = web_client or AsyncWebClient()
self.closed = False
self.stale = False
self.connect_operation_lock = Lock()
self.proxy = proxy
if self.proxy is None or len(self.proxy.strip()) == 0:
env_variable = load_http_proxy_from_env(self.logger)
if env_variable is not None:
self.proxy = env_variable
self.default_auto_reconnect_enabled = auto_reconnect_enabled
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.ping_interval = ping_interval
self.trace_enabled = trace_enabled
self.last_ping_pong_time = None
self.wss_uri = None
self.message_queue = Queue()
self.message_listeners = []
self.socket_mode_request_listeners = []
self.current_session = None
self.current_session_monitor = None
# https://docs.aiohttp.org/en/stable/client_reference.html
# Unless you are connecting to a large, unknown number of different servers
# over the lifetime of your application,
# it is suggested you use a single session for the lifetime of your application
# to benefit from connection pooling.
self.aiohttp_client_session = aiohttp.ClientSession()
self.on_message_listeners = on_message_listeners or []
self.on_error_listeners = on_error_listeners or []
self.on_close_listeners = on_close_listeners or []
self.message_receiver = None
self.message_processor = asyncio.ensure_future(self.process_messages())
async def monitor_current_session(self) -> None:
try:
while not self.closed:
try:
await asyncio.sleep(self.ping_interval)
if self.current_session is not None:
t = time.time()
if self.last_ping_pong_time is None:
self.last_ping_pong_time = float(t)
await self.current_session.ping(f"sdk-ping-pong:{t}")
if self.auto_reconnect_enabled:
should_reconnect = False
if self.current_session is None or self.current_session.closed:
self.logger.info(
"The session seems to be already closed. Reconnecting..."
)
should_reconnect = True
if self.last_ping_pong_time is not None:
disconnected_seconds = int(
time.time() - self.last_ping_pong_time
)
if disconnected_seconds >= (self.ping_interval * 4):
self.logger.info(
"The connection seems to be stale. Reconnecting..."
f" reason: disconnected for {disconnected_seconds}+ seconds)"
)
self.stale = True
self.last_ping_pong_time = None
should_reconnect = True
if should_reconnect is True or not await self.is_connected():
await self.connect_to_new_endpoint()
except Exception as e:
self.logger.error(
"Failed to check the current session or reconnect to the server "
f"(error: {type(e).__name__}, message: {e})"
)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug(
"The running monitor_current_session task is now cancelled"
)
raise
async def receive_messages(self) -> None:
try:
consecutive_error_count = 0
while not self.closed:
try:
message: WSMessage = await self.current_session.receive()
if self.trace_enabled and self.logger.level <= logging.DEBUG:
type = WSMsgType(message.type)
message_type = type.name if type is not None else message.type
message_data = message.data
if isinstance(message_data, bytes):
message_data = message_data.decode("utf-8")
if len(message_data) > 0:
# To skip the empty message that Slack server-side often sends
self.logger.debug(
f"Received message (type: {message_type}, data: {message_data}, extra: {message.extra})"
)
if message is not None:
if message.type == WSMsgType.TEXT:
message_data = message.data
await self.enqueue_message(message_data)
for listener in self.on_message_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSE:
if self.auto_reconnect_enabled:
self.logger.info(
"Received CLOSE event. Reconnecting..."
)
await self.connect_to_new_endpoint()
for listener in self.on_close_listeners:
await listener(message)
elif message.type == WSMsgType.ERROR:
for listener in self.on_error_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSED:
await asyncio.sleep(self.ping_interval)
continue
elif message.type == WSMsgType.PING:
await self.current_session.pong(message.data)
continue
elif message.type == WSMsgType.PONG:
if message.data is not None:
str_message_data = message.data.decode("utf-8")
elements = str_message_data.split(":")
if (
len(elements) == 2
and elements[0] == "sdk-ping-pong"
):
try:
self.last_ping_pong_time = float(elements[1])
except Exception as e:
self.logger.warning(
f"Failed to parse the last_ping_pong_time value from {str_message_data}"
f" - error : {e}"
)
continue
consecutive_error_count = 0
except Exception as e:
consecutive_error_count += 1
self.logger.error(
f"Failed to receive or enqueue a message: {type(e).__name__}, {e}"
)
if isinstance(e, ClientConnectionError):
await asyncio.sleep(self.ping_interval)
else:
await asyncio.sleep(consecutive_error_count)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug("The running receive_messages task is now cancelled")
raise
async def is_connected(self) -> bool:
return (
not self.closed
and not self.stale
and self.current_session is not None
and not self.current_session.closed
)
async def connect(self):
old_session = None if self.current_session is None else self.current_session
if self.wss_uri is None:
self.wss_uri = await self.issue_new_wss_url()
self.current_session = await self.aiohttp_client_session.ws_connect(
self.wss_uri,
autoping=False,
heartbeat=self.ping_interval,
proxy=self.proxy,
)
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.stale = False
self.logger.info("A new session has been established")
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
self.current_session_monitor = asyncio.ensure_future(
self.monitor_current_session()
)
if self.message_receiver is not None:
self.message_receiver.cancel()
self.message_receiver = asyncio.ensure_future(self.receive_messages())
if old_session is not None:
await old_session.close()
self.logger.info("The old session has been abandoned")
async def disconnect(self):
if self.current_session is not None:
await self.current_session.close()
self.logger.info("The session has been abandoned")
async def send_message(self, message: str):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Sending a message: {message}")
try:
await self.current_session.send_str(message)
except ConnectionError as e:
# We rarely get this exception while replacing the underlying WebSocket connections.
# We can do one more try here as the self.current_session should be ready now.
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"Failed to send a message (error: {e}, message: {message})"
" as the underlying connection was replaced. Retrying the same request only one time..."
)
# Although acquiring self.connect_operation_lock also for the first method call is the safest way,
# we avoid synchronizing a lot for better performance. That's why we are doing a retry here.
try:
await self.connect_operation_lock.acquire()
if await self.is_connected():
await self.current_session.send_str(message)
else:
self.logger.warning(
"The current session is no longer active. Failed to send a message"
)
raise e
finally:
if self.connect_operation_lock.locked() is True:
self.connect_operation_lock.release()
async def close(self):
self.closed = True
self.auto_reconnect_enabled = False
await self.disconnect()
if self.message_processor is not None:
self.message_processor.cancel()
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
if self.message_receiver is not None:
self.message_receiver.cancel()
if self.aiohttp_client_session is not None:
await self.aiohttp_client_session.close()
| 43.825959 | 120 | 0.568755 | import asyncio
import logging
import time
from asyncio import Future, Lock
from asyncio import Queue
from logging import Logger
from typing import Union, Optional, List, Callable, Awaitable
import aiohttp
from aiohttp import ClientWebSocketResponse, WSMessage, WSMsgType, ClientConnectionError
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from slack_sdk.socket_mode.async_client import AsyncBaseSocketModeClient
from slack_sdk.socket_mode.async_listeners import (
AsyncWebSocketMessageListener,
AsyncSocketModeRequestListener,
)
from slack_sdk.socket_mode.request import SocketModeRequest
from slack_sdk.web.async_client import AsyncWebClient
class SocketModeClient(AsyncBaseSocketModeClient):
logger: Logger
web_client: AsyncWebClient
app_token: str
wss_uri: Optional[str]
auto_reconnect_enabled: bool
message_queue: Queue
message_listeners: List[
Union[
AsyncWebSocketMessageListener,
Callable[
["AsyncBaseSocketModeClient", dict, Optional[str]], Awaitable[None]
],
]
]
socket_mode_request_listeners: List[
Union[
AsyncSocketModeRequestListener,
Callable[["AsyncBaseSocketModeClient", SocketModeRequest], Awaitable[None]],
]
]
message_receiver: Optional[Future]
message_processor: Future
proxy: Optional[str]
ping_interval: float
trace_enabled: bool
last_ping_pong_time: Optional[float]
current_session: Optional[ClientWebSocketResponse]
current_session_monitor: Optional[Future]
auto_reconnect_enabled: bool
default_auto_reconnect_enabled: bool
closed: bool
stale: bool
connect_operation_lock: Lock
on_message_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_error_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_close_listeners: List[Callable[[WSMessage], Awaitable[None]]]
def __init__(
self,
app_token: str,
logger: Optional[Logger] = None,
web_client: Optional[AsyncWebClient] = None,
proxy: Optional[str] = None,
auto_reconnect_enabled: bool = True,
ping_interval: float = 5,
trace_enabled: bool = False,
on_message_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_error_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_close_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
):
self.app_token = app_token
self.logger = logger or logging.getLogger(__name__)
self.web_client = web_client or AsyncWebClient()
self.closed = False
self.stale = False
self.connect_operation_lock = Lock()
self.proxy = proxy
if self.proxy is None or len(self.proxy.strip()) == 0:
env_variable = load_http_proxy_from_env(self.logger)
if env_variable is not None:
self.proxy = env_variable
self.default_auto_reconnect_enabled = auto_reconnect_enabled
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.ping_interval = ping_interval
self.trace_enabled = trace_enabled
self.last_ping_pong_time = None
self.wss_uri = None
self.message_queue = Queue()
self.message_listeners = []
self.socket_mode_request_listeners = []
self.current_session = None
self.current_session_monitor = None
self.aiohttp_client_session = aiohttp.ClientSession()
self.on_message_listeners = on_message_listeners or []
self.on_error_listeners = on_error_listeners or []
self.on_close_listeners = on_close_listeners or []
self.message_receiver = None
self.message_processor = asyncio.ensure_future(self.process_messages())
async def monitor_current_session(self) -> None:
try:
while not self.closed:
try:
await asyncio.sleep(self.ping_interval)
if self.current_session is not None:
t = time.time()
if self.last_ping_pong_time is None:
self.last_ping_pong_time = float(t)
await self.current_session.ping(f"sdk-ping-pong:{t}")
if self.auto_reconnect_enabled:
should_reconnect = False
if self.current_session is None or self.current_session.closed:
self.logger.info(
"The session seems to be already closed. Reconnecting..."
)
should_reconnect = True
if self.last_ping_pong_time is not None:
disconnected_seconds = int(
time.time() - self.last_ping_pong_time
)
if disconnected_seconds >= (self.ping_interval * 4):
self.logger.info(
"The connection seems to be stale. Reconnecting..."
f" reason: disconnected for {disconnected_seconds}+ seconds)"
)
self.stale = True
self.last_ping_pong_time = None
should_reconnect = True
if should_reconnect is True or not await self.is_connected():
await self.connect_to_new_endpoint()
except Exception as e:
self.logger.error(
"Failed to check the current session or reconnect to the server "
f"(error: {type(e).__name__}, message: {e})"
)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug(
"The running monitor_current_session task is now cancelled"
)
raise
async def receive_messages(self) -> None:
try:
consecutive_error_count = 0
while not self.closed:
try:
message: WSMessage = await self.current_session.receive()
if self.trace_enabled and self.logger.level <= logging.DEBUG:
type = WSMsgType(message.type)
message_type = type.name if type is not None else message.type
message_data = message.data
if isinstance(message_data, bytes):
message_data = message_data.decode("utf-8")
if len(message_data) > 0:
self.logger.debug(
f"Received message (type: {message_type}, data: {message_data}, extra: {message.extra})"
)
if message is not None:
if message.type == WSMsgType.TEXT:
message_data = message.data
await self.enqueue_message(message_data)
for listener in self.on_message_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSE:
if self.auto_reconnect_enabled:
self.logger.info(
"Received CLOSE event. Reconnecting..."
)
await self.connect_to_new_endpoint()
for listener in self.on_close_listeners:
await listener(message)
elif message.type == WSMsgType.ERROR:
for listener in self.on_error_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSED:
await asyncio.sleep(self.ping_interval)
continue
elif message.type == WSMsgType.PING:
await self.current_session.pong(message.data)
continue
elif message.type == WSMsgType.PONG:
if message.data is not None:
str_message_data = message.data.decode("utf-8")
elements = str_message_data.split(":")
if (
len(elements) == 2
and elements[0] == "sdk-ping-pong"
):
try:
self.last_ping_pong_time = float(elements[1])
except Exception as e:
self.logger.warning(
f"Failed to parse the last_ping_pong_time value from {str_message_data}"
f" - error : {e}"
)
continue
consecutive_error_count = 0
except Exception as e:
consecutive_error_count += 1
self.logger.error(
f"Failed to receive or enqueue a message: {type(e).__name__}, {e}"
)
if isinstance(e, ClientConnectionError):
await asyncio.sleep(self.ping_interval)
else:
await asyncio.sleep(consecutive_error_count)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug("The running receive_messages task is now cancelled")
raise
async def is_connected(self) -> bool:
return (
not self.closed
and not self.stale
and self.current_session is not None
and not self.current_session.closed
)
async def connect(self):
old_session = None if self.current_session is None else self.current_session
if self.wss_uri is None:
self.wss_uri = await self.issue_new_wss_url()
self.current_session = await self.aiohttp_client_session.ws_connect(
self.wss_uri,
autoping=False,
heartbeat=self.ping_interval,
proxy=self.proxy,
)
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.stale = False
self.logger.info("A new session has been established")
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
self.current_session_monitor = asyncio.ensure_future(
self.monitor_current_session()
)
if self.message_receiver is not None:
self.message_receiver.cancel()
self.message_receiver = asyncio.ensure_future(self.receive_messages())
if old_session is not None:
await old_session.close()
self.logger.info("The old session has been abandoned")
async def disconnect(self):
if self.current_session is not None:
await self.current_session.close()
self.logger.info("The session has been abandoned")
async def send_message(self, message: str):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Sending a message: {message}")
try:
await self.current_session.send_str(message)
except ConnectionError as e:
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"Failed to send a message (error: {e}, message: {message})"
" as the underlying connection was replaced. Retrying the same request only one time..."
)
try:
await self.connect_operation_lock.acquire()
if await self.is_connected():
await self.current_session.send_str(message)
else:
self.logger.warning(
"The current session is no longer active. Failed to send a message"
)
raise e
finally:
if self.connect_operation_lock.locked() is True:
self.connect_operation_lock.release()
async def close(self):
self.closed = True
self.auto_reconnect_enabled = False
await self.disconnect()
if self.message_processor is not None:
self.message_processor.cancel()
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
if self.message_receiver is not None:
self.message_receiver.cancel()
if self.aiohttp_client_session is not None:
await self.aiohttp_client_session.close()
| true | true |
f72547daa09175d778fa9ae8f7c4bef863150db5 | 840 | py | Python | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from distutils.core import setup
long_desc = 'Licensed under the generic MIT License.\"sit-rep\" can either be downloaded from the ' \
'Releases page on GitHub and manually added to PATH or installed via \"pip\".'
version = ''
with open("Setup/version.txt", "r", encoding="utf-8") as fh:
version = fh.read()
fh.close()
setup(name='sit-rep-prerelease',
version=version,
py_modules=['sit-rep-prerelease'],
description='Sit Rep [Pre-Release]| The System Situation Report',
long_description=long_desc,
long_description_content_type='text/markdown',
author='Tushar Iyer',
author_email='',
url='https://github.com/tushariyer/sit-rep',
project_urls={
"Bug Tracker": "https://github.com/tushariyer/sit-rep/issues",
}
) | 33.6 | 101 | 0.65 |
from distutils.core import setup
long_desc = 'Licensed under the generic MIT License.\"sit-rep\" can either be downloaded from the ' \
'Releases page on GitHub and manually added to PATH or installed via \"pip\".'
version = ''
with open("Setup/version.txt", "r", encoding="utf-8") as fh:
version = fh.read()
fh.close()
setup(name='sit-rep-prerelease',
version=version,
py_modules=['sit-rep-prerelease'],
description='Sit Rep [Pre-Release]| The System Situation Report',
long_description=long_desc,
long_description_content_type='text/markdown',
author='Tushar Iyer',
author_email='',
url='https://github.com/tushariyer/sit-rep',
project_urls={
"Bug Tracker": "https://github.com/tushariyer/sit-rep/issues",
}
) | true | true |
f72547e17a96865f9a04a22b21403532cfa71d79 | 3,673 | py | Python | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | 10 | 2021-11-07T04:25:08.000Z | 2022-03-25T03:33:21.000Z | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | null | null | null | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | 6 | 2021-11-03T21:27:06.000Z | 2022-03-27T17:27:40.000Z | import numpy as np
import cv2
import os
import glob
import sys
from collections import defaultdict
from pathlib import Path
import pycocotools.mask as rletools
from PIL import Image, ImageDraw
import matplotlib.pyplot as plt
DATA_PATH = '../../data/KITTIMOTS/'
IMG_PATH = DATA_PATH + 'train/'
SAVE_VIDEO = False
IS_GT = True
cats = ['Car', 'Pedestrian']
cat_ids = {cat: i for i, cat in enumerate(cats)}
COLORS = [(255, 0, 255), (122, 122, 255), (255, 0, 0)]
def draw_bbox(img, bboxes, c=(255, 0, 255)):
for bbox in bboxes:
color = COLORS[int(bbox[5])]
cv2.rectangle(img, (int(bbox[0]), int(bbox[1])),
(int(bbox[2]), int(bbox[3])),
color, 2, lineType=cv2.LINE_AA)
ct = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
txt = '{}'.format(int(bbox[4]))
cv2.putText(img, txt, (int(ct[0]), int(ct[1])),
cv2.FONT_HERSHEY_SIMPLEX, 0.5,
color, thickness=1, lineType=cv2.LINE_AA)
if __name__ == '__main__':
# seqs = os.listdir(IMG_PATH)
seqs = ['0001']
for seq in sorted(seqs):
print('seq', seq)
if '.DS_Store' in seq:
continue
gt_file = DATA_PATH + 'instances_txt/' + seq + '.txt'
with open(gt_file, 'r') as f:
lines = f.readlines()
lines = [l.split() for l in lines]
frame_count = -1
im_to_inst = {}
for l in lines:
frame, oid, cid, h, w, rle = l
if int(cid) - 1 not in cat_ids.values():
continue
frame = int(frame)
if frame_count != frame:
frame_count = frame
im_to_inst[frame] = []
im_to_inst[frame].append(rle)
for i in im_to_inst:
#img = cv2.imread(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i)))
img = Image.open(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i))).convert('RGBA')
#img.putalpha(128)
size = [int(h), int(w)]
merged = np.zeros(size, dtype=np.float)
print(f'Frame {i}: {len(im_to_inst[i])} masks')
for mask in im_to_inst[i]:
m = {'size': size, 'counts': mask.encode(encoding='UTF-8')}
binary_mask = rletools.decode(m)
merged = np.logical_or(merged, binary_mask)
merged_mask = Image.fromarray(np.uint8(merged * 128), mode='L')
color = Image.new('RGBA', (size[1], size[0]), (228, 150, 150, 255))
# plt.imshow(merged_mask)
# plt.imshow(img)
# plt.show()
image = Image.composite(color, img, merged_mask)
image.save('../../data/KITTIMOTS/examples/{:06d}.png'.format(i))
# preds = {}
# for K in range(1, len(sys.argv)):
# pred_path = sys.argv[K] + '/{}.txt'.format(seq)
# pred_file = open(pred_path, 'r')
# preds[K] = defaultdict(list)
# for line in pred_file:
# tmp = line[:-1].split(' ')
# frame_id = int(tmp[0])
# track_id = int(tmp[1])
# cat_id = cat_ids[tmp[2]]
# bbox = [float(tmp[6]), float(tmp[7]), float(tmp[8]), float(tmp[9])]
# score = float(tmp[17])
# preds[K][frame_id].append(bbox + [track_id, cat_id, score])
# images_path = '{}/{}/'.format(IMG_PATH, seq)
# images = os.listdir(images_path)
# num_images = len([image for image in images if 'png' in image])
# for i in range(num_images):
# frame_id = i
# file_path = '{}/{:06d}.png'.format(images_path, i)
# img = cv2.imread(file_path)
# for K in range(1, len(sys.argv)):
# img_pred = img.copy()
# draw_bbox(img_pred, preds[K][frame_id])
# cv2.imshow('pred{}'.format(K), img_pred)
# cv2.waitKey()
# if SAVE_VIDEO:
# video.write(img_pred)
# if SAVE_VIDEO:
# video.release()
| 30.865546 | 94 | 0.572829 | import numpy as np
import cv2
import os
import glob
import sys
from collections import defaultdict
from pathlib import Path
import pycocotools.mask as rletools
from PIL import Image, ImageDraw
import matplotlib.pyplot as plt
DATA_PATH = '../../data/KITTIMOTS/'
IMG_PATH = DATA_PATH + 'train/'
SAVE_VIDEO = False
IS_GT = True
cats = ['Car', 'Pedestrian']
cat_ids = {cat: i for i, cat in enumerate(cats)}
COLORS = [(255, 0, 255), (122, 122, 255), (255, 0, 0)]
def draw_bbox(img, bboxes, c=(255, 0, 255)):
for bbox in bboxes:
color = COLORS[int(bbox[5])]
cv2.rectangle(img, (int(bbox[0]), int(bbox[1])),
(int(bbox[2]), int(bbox[3])),
color, 2, lineType=cv2.LINE_AA)
ct = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
txt = '{}'.format(int(bbox[4]))
cv2.putText(img, txt, (int(ct[0]), int(ct[1])),
cv2.FONT_HERSHEY_SIMPLEX, 0.5,
color, thickness=1, lineType=cv2.LINE_AA)
if __name__ == '__main__':
seqs = ['0001']
for seq in sorted(seqs):
print('seq', seq)
if '.DS_Store' in seq:
continue
gt_file = DATA_PATH + 'instances_txt/' + seq + '.txt'
with open(gt_file, 'r') as f:
lines = f.readlines()
lines = [l.split() for l in lines]
frame_count = -1
im_to_inst = {}
for l in lines:
frame, oid, cid, h, w, rle = l
if int(cid) - 1 not in cat_ids.values():
continue
frame = int(frame)
if frame_count != frame:
frame_count = frame
im_to_inst[frame] = []
im_to_inst[frame].append(rle)
for i in im_to_inst:
img = Image.open(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i))).convert('RGBA')
size = [int(h), int(w)]
merged = np.zeros(size, dtype=np.float)
print(f'Frame {i}: {len(im_to_inst[i])} masks')
for mask in im_to_inst[i]:
m = {'size': size, 'counts': mask.encode(encoding='UTF-8')}
binary_mask = rletools.decode(m)
merged = np.logical_or(merged, binary_mask)
merged_mask = Image.fromarray(np.uint8(merged * 128), mode='L')
color = Image.new('RGBA', (size[1], size[0]), (228, 150, 150, 255))
image = Image.composite(color, img, merged_mask)
image.save('../../data/KITTIMOTS/examples/{:06d}.png'.format(i))
| true | true |
f7254891c728997635a95c7943f2f2e7d783a797 | 14,518 | py | Python | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 9 | 2018-04-20T03:31:01.000Z | 2020-05-13T14:10:53.000Z | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 36 | 2017-09-21T09:12:27.000Z | 2020-06-17T16:40:48.000Z | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 32 | 2017-08-31T12:50:52.000Z | 2022-03-01T07:34:53.000Z | #!/usr/bin/env python
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from tinctest import logger
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
from gppylib.db import dbconn
from mpp.gpdb.tests.storage.walrepl.run import StandbyRunMixin
from mpp.gpdb.tests.storage import walrepl
from mpp.gpdb.tests.storage.walrepl.lib.walcomm import *
from mpp.gpdb.tests.storage.walrepl.lib import PgControlData
from gppylib.commands.base import Command
import os
import re
import select
import signal
import subprocess
import time
import sys
class syncrep(StandbyRunMixin, MPPTestCase):
def generate_trigger_file(self, content):
filename = 'wal_rcv_test'
self.assertTrue(content is not None)
filepath = os.path.join(self.standby.datadir, filename)
with open(filepath, 'wb') as f:
f.write(content)
def wait_stdout(self, proc, timeout):
rlist = [proc.stdout.fileno()]
(rout, _, _) = select.select(rlist, [], [], timeout)
return len(rout) > 0
def set_guc(self, guc_name, guc_value):
logger.info('Configuring ' + guc_name +' ...')
cmd = Command("gpconfig " + guc_name,
"gpconfig -c " + guc_name + " -v " + guc_value)
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
logger.info('gpstop -u to reload config files...')
cmd = Command("gpstop -u",
"gpstop -u")
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
def test_syncrep(self):
# 1. Initiate the Standby
# 2. Once the WAL receiver starts, signal it to suspend post xlog flush
# but before sending the ack.
# 3. Now execute a transaction and commit it. The backend is expected
# be blocked.
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
# cleanup
PSQL.run_sql_command('DROP table if exists foo')
# 1. create standby and start
res = self.standby.create()
self.assertEqual(res, 0)
res = self.standby.start()
self.assertTrue(res.wasSuccessful())
# wait for the walreceiver to start
num_walsender = self.wait_for_walsender()
self.assertEqual(num_walsender, 1)
# 2. Once the WAL receiver starts, signal it to suspend post xlog flush
# but before sending the ack.
proc = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
search = "wal receiver process"
for line in stdout.split('\n'):
if (line.find(search) > 0):
split_line = re.split(r'\s+', line.strip())
break
self.assertTrue(len(split_line) > 0)
wal_rcv_pid = int(split_line[1])
logger.info('Suspending WAL Receiver(' + str(wal_rcv_pid) +')...')
self.generate_trigger_file('wait_before_send_ack')
os.kill(wal_rcv_pid, signal.SIGUSR2)
# 3. Now execute a transaction and commit it. The backend is expected
# be blocked.
logger.info('Create table foo...')
# we use subprocess since we expect it'll be blocked.
proc = subprocess.Popen(['psql', '-c', 'create table foo (a int)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
readable = self.wait_stdout(proc, 5.0)
self.assertFalse(readable, 'psql did not block')
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
logger.info('Resume the WAL Receiver...')
self.generate_trigger_file('resume')
os.kill(wal_rcv_pid, signal.SIGUSR2)
readable = self.wait_stdout(proc, 5.0)
self.assertTrue(readable, 'psql still blocks')
proc.communicate()
logger.info('No blocked backend found!')
logger.info('Verifying if table exists ? ...')
PSQL(sql_cmd='select * from foo').run(validateAfter=True)
logger.info('Pass')
def test_unblock_while_catchup_out_of_range(self):
"""
This test verifies if a backend gets blocked in case
the WAL sender is still in catchup mode.
"""
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
sql_startup = "SELECT count(*) FROM pg_stat_replication where state = 'startup'"
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_startup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No WAL sender in startup phase found")
logger.info('WAL sender is alive and now is in startup phase...')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
logger.info('Pass - Database does not block if WAL sender is alive and in startup phase')
logger.info('Creating some xlog seg files to simulate catchup out-of-range..')
i = 0
while(i < 3):
PSQL.run_sql_command('select pg_switch_xlog();select pg_switch_xlog();checkpoint;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, run some sql...')
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int);'
,dbname='postgres')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) > 0):
self.assertTrue(0, "Previous backend was blocked ...")
i = i + 1
logger.info('Create table is NOT blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive and still in catchup phase...')
with dbconn.connect(dbconn.DbURL(dbname='postgres'), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_table_present)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "Table foo not found")
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Database does not block if WAL sender is alive and "
"the catchup is out-of-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
def test_block_while_catchup_within_range(self):
"""
This test verifies if a backend gets blocked in case
the WAL sender is still in catchup mode.
"""
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
# Set the guc to > 1 so that we can verify the test
# using less amount of xlog
self.set_guc('repl_catchup_within_range', '3')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, create table...')
subprocess.Popen(['psql', '-c',
'DROP TABLE IF EXISTS raghav; create table raghav (a int);'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, still in catchup phase ..')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) == 1):
break;
if (i == 4):
self.assertTrue(0, "Previous backend not blocked ...")
i = i + 1
logger.info('But, create table is blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, in catchup phase and backend is blocked...')
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Backends block if WAL sender is alive and the catchup is within-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
logger.info ("Pass")
self.set_guc('repl_catchup_within_range', '1')
| 44.533742 | 131 | 0.564403 |
from tinctest import logger
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
from gppylib.db import dbconn
from mpp.gpdb.tests.storage.walrepl.run import StandbyRunMixin
from mpp.gpdb.tests.storage import walrepl
from mpp.gpdb.tests.storage.walrepl.lib.walcomm import *
from mpp.gpdb.tests.storage.walrepl.lib import PgControlData
from gppylib.commands.base import Command
import os
import re
import select
import signal
import subprocess
import time
import sys
class syncrep(StandbyRunMixin, MPPTestCase):
def generate_trigger_file(self, content):
filename = 'wal_rcv_test'
self.assertTrue(content is not None)
filepath = os.path.join(self.standby.datadir, filename)
with open(filepath, 'wb') as f:
f.write(content)
def wait_stdout(self, proc, timeout):
rlist = [proc.stdout.fileno()]
(rout, _, _) = select.select(rlist, [], [], timeout)
return len(rout) > 0
def set_guc(self, guc_name, guc_value):
logger.info('Configuring ' + guc_name +' ...')
cmd = Command("gpconfig " + guc_name,
"gpconfig -c " + guc_name + " -v " + guc_value)
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
logger.info('gpstop -u to reload config files...')
cmd = Command("gpstop -u",
"gpstop -u")
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
def test_syncrep(self):
PSQL.run_sql_command('DROP table if exists foo')
res = self.standby.create()
self.assertEqual(res, 0)
res = self.standby.start()
self.assertTrue(res.wasSuccessful())
num_walsender = self.wait_for_walsender()
self.assertEqual(num_walsender, 1)
proc = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
search = "wal receiver process"
for line in stdout.split('\n'):
if (line.find(search) > 0):
split_line = re.split(r'\s+', line.strip())
break
self.assertTrue(len(split_line) > 0)
wal_rcv_pid = int(split_line[1])
logger.info('Suspending WAL Receiver(' + str(wal_rcv_pid) +')...')
self.generate_trigger_file('wait_before_send_ack')
os.kill(wal_rcv_pid, signal.SIGUSR2)
logger.info('Create table foo...')
proc = subprocess.Popen(['psql', '-c', 'create table foo (a int)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
readable = self.wait_stdout(proc, 5.0)
self.assertFalse(readable, 'psql did not block')
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
logger.info('Resume the WAL Receiver...')
self.generate_trigger_file('resume')
os.kill(wal_rcv_pid, signal.SIGUSR2)
readable = self.wait_stdout(proc, 5.0)
self.assertTrue(readable, 'psql still blocks')
proc.communicate()
logger.info('No blocked backend found!')
logger.info('Verifying if table exists ? ...')
PSQL(sql_cmd='select * from foo').run(validateAfter=True)
logger.info('Pass')
def test_unblock_while_catchup_out_of_range(self):
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
sql_startup = "SELECT count(*) FROM pg_stat_replication where state = 'startup'"
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_startup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No WAL sender in startup phase found")
logger.info('WAL sender is alive and now is in startup phase...')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
logger.info('Pass - Database does not block if WAL sender is alive and in startup phase')
logger.info('Creating some xlog seg files to simulate catchup out-of-range..')
i = 0
while(i < 3):
PSQL.run_sql_command('select pg_switch_xlog();select pg_switch_xlog();checkpoint;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, run some sql...')
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int);'
,dbname='postgres')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) > 0):
self.assertTrue(0, "Previous backend was blocked ...")
i = i + 1
logger.info('Create table is NOT blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive and still in catchup phase...')
with dbconn.connect(dbconn.DbURL(dbname='postgres'), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_table_present)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "Table foo not found")
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Database does not block if WAL sender is alive and "
"the catchup is out-of-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
def test_block_while_catchup_within_range(self):
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
# Set the guc to > 1 so that we can verify the test
# using less amount of xlog
self.set_guc('repl_catchup_within_range', '3')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, create table...')
subprocess.Popen(['psql', '-c',
'DROP TABLE IF EXISTS raghav; create table raghav (a int);'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, still in catchup phase ..')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) == 1):
break;
if (i == 4):
self.assertTrue(0, "Previous backend not blocked ...")
i = i + 1
logger.info('But, create table is blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, in catchup phase and backend is blocked...')
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Backends block if WAL sender is alive and the catchup is within-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
logger.info ("Pass")
self.set_guc('repl_catchup_within_range', '1')
| true | true |
f7254a9efae6931fc674550384bd79222176447d | 3,222 | py | Python | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | 1 | 2020-03-04T20:24:33.000Z | 2020-03-04T20:24:33.000Z | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | null | null | null | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import lldb
import fblldbbase as fb
import fblldbviewhelpers as viewHelpers
def lldbcommands():
return [
FBPrintAutolayoutTrace(),
FBAutolayoutBorderAmbiguous(),
FBAutolayoutUnborderAmbiguous(),
]
class FBPrintAutolayoutTrace(fb.FBCommand):
def name(self):
return 'paltrace'
def description(self):
return "Print the Auto Layout trace for the given view. Defaults to the key window."
def args(self):
return [ fb.FBCommandArgument(arg='view', type='UIView *', help='The view to print the Auto Layout trace for.', default='(id)[[UIApplication sharedApplication] keyWindow]') ]
def run(self, arguments, options):
view = fb.evaluateInputExpression(arguments[0])
opt = fb.evaluateBooleanExpression('[UIView instancesRespondToSelector:@selector(_autolayoutTraceRecursively:)]')
traceCall = '_autolayoutTraceRecursively:1' if opt else '_autolayoutTrace'
print(fb.describeObject('[{} {}]'.format(view, traceCall)))
def setBorderOnAmbiguousViewRecursive(view, width, color):
if not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % view):
return
isAmbiguous = fb.evaluateBooleanExpression('(BOOL)[%s hasAmbiguousLayout]' % view)
if isAmbiguous:
layer = viewHelpers.convertToLayer(view)
fb.evaluateEffect('[%s setBorderWidth:(CGFloat)%s]' % (layer, width))
fb.evaluateEffect('[%s setBorderColor:(CGColorRef)[(id)[UIColor %sColor] CGColor]]' % (layer, color))
subviews = fb.evaluateExpression('(id)[%s subviews]' % view)
subviewsCount = int(fb.evaluateExpression('(int)[(id)%s count]' % subviews))
if subviewsCount > 0:
for i in range(0, subviewsCount):
subview = fb.evaluateExpression('(id)[%s objectAtIndex:%i]' % (subviews, i))
setBorderOnAmbiguousViewRecursive(subview, width, color)
class FBAutolayoutBorderAmbiguous(fb.FBCommand):
def name(self):
return 'alamborder'
def description(self):
return "Put a border around views with an ambiguous layout"
def options(self):
return [
fb.FBCommandArgument(short='-c', long='--color', arg='color', type='string', default='red', help='A color name such as \'red\', \'green\', \'magenta\', etc.'),
fb.FBCommandArgument(short='-w', long='--width', arg='width', type='CGFloat', default=2.0, help='Desired width of border.')
]
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, options.width, options.color)
lldb.debugger.HandleCommand('caflush')
class FBAutolayoutUnborderAmbiguous(fb.FBCommand):
def name(self):
return 'alamunborder'
def description(self):
return "Removes the border around views with an ambiguous layout"
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, 0, "red")
lldb.debugger.HandleCommand('caflush')
| 37.905882 | 178 | 0.724705 |
import lldb
import fblldbbase as fb
import fblldbviewhelpers as viewHelpers
def lldbcommands():
return [
FBPrintAutolayoutTrace(),
FBAutolayoutBorderAmbiguous(),
FBAutolayoutUnborderAmbiguous(),
]
class FBPrintAutolayoutTrace(fb.FBCommand):
def name(self):
return 'paltrace'
def description(self):
return "Print the Auto Layout trace for the given view. Defaults to the key window."
def args(self):
return [ fb.FBCommandArgument(arg='view', type='UIView *', help='The view to print the Auto Layout trace for.', default='(id)[[UIApplication sharedApplication] keyWindow]') ]
def run(self, arguments, options):
view = fb.evaluateInputExpression(arguments[0])
opt = fb.evaluateBooleanExpression('[UIView instancesRespondToSelector:@selector(_autolayoutTraceRecursively:)]')
traceCall = '_autolayoutTraceRecursively:1' if opt else '_autolayoutTrace'
print(fb.describeObject('[{} {}]'.format(view, traceCall)))
def setBorderOnAmbiguousViewRecursive(view, width, color):
if not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % view):
return
isAmbiguous = fb.evaluateBooleanExpression('(BOOL)[%s hasAmbiguousLayout]' % view)
if isAmbiguous:
layer = viewHelpers.convertToLayer(view)
fb.evaluateEffect('[%s setBorderWidth:(CGFloat)%s]' % (layer, width))
fb.evaluateEffect('[%s setBorderColor:(CGColorRef)[(id)[UIColor %sColor] CGColor]]' % (layer, color))
subviews = fb.evaluateExpression('(id)[%s subviews]' % view)
subviewsCount = int(fb.evaluateExpression('(int)[(id)%s count]' % subviews))
if subviewsCount > 0:
for i in range(0, subviewsCount):
subview = fb.evaluateExpression('(id)[%s objectAtIndex:%i]' % (subviews, i))
setBorderOnAmbiguousViewRecursive(subview, width, color)
class FBAutolayoutBorderAmbiguous(fb.FBCommand):
def name(self):
return 'alamborder'
def description(self):
return "Put a border around views with an ambiguous layout"
def options(self):
return [
fb.FBCommandArgument(short='-c', long='--color', arg='color', type='string', default='red', help='A color name such as \'red\', \'green\', \'magenta\', etc.'),
fb.FBCommandArgument(short='-w', long='--width', arg='width', type='CGFloat', default=2.0, help='Desired width of border.')
]
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, options.width, options.color)
lldb.debugger.HandleCommand('caflush')
class FBAutolayoutUnborderAmbiguous(fb.FBCommand):
def name(self):
return 'alamunborder'
def description(self):
return "Removes the border around views with an ambiguous layout"
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, 0, "red")
lldb.debugger.HandleCommand('caflush')
| true | true |
f7254bd8d5c655e025a0e95e3c6aac92193bfd17 | 658 | py | Python | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 78 | 2019-09-25T15:09:18.000Z | 2022-02-09T09:56:15.000Z | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 23 | 2019-10-09T21:24:39.000Z | 2022-03-12T00:00:53.000Z | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 18 | 2020-01-24T13:13:57.000Z | 2022-02-15T18:58:12.000Z | import abc
from abc import abstractmethod
class Dataset(abc.ABC):
def __init__(
self,
input_params,
with_labels=False,
):
self.batch_size = input_params.batch_size
self.buffer_size = input_params.buffer_size
if with_labels:
self.train_dataset = self.load_data_with_labels()
else:
self.train_dataset = self.load_data()
@abstractmethod
def load_data(self):
raise NotImplementedError
@abstractmethod
def load_data_with_labels(self):
raise NotImplementedError
def __iter__(self):
return iter(self.train_dataset)
| 22.689655 | 61 | 0.641337 | import abc
from abc import abstractmethod
class Dataset(abc.ABC):
def __init__(
self,
input_params,
with_labels=False,
):
self.batch_size = input_params.batch_size
self.buffer_size = input_params.buffer_size
if with_labels:
self.train_dataset = self.load_data_with_labels()
else:
self.train_dataset = self.load_data()
@abstractmethod
def load_data(self):
raise NotImplementedError
@abstractmethod
def load_data_with_labels(self):
raise NotImplementedError
def __iter__(self):
return iter(self.train_dataset)
| true | true |
f7254bdd4bb068fb20e4ad809d0645054278cee9 | 5,620 | py | Python | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Simple HTTP server in Python for logging events to CSV file
Motivation: Use this CSV file later for data agregation and plotting
Inspired by: Very simple HTTP server in Python for logging requests
https://gist.github.com/mdonkers/63e115cc0c79b4f6b8b3a6b797e485c7
Usage::
./SimpleLoggingServerToCsvFile.py [<port>]
"""
#----------------------------------------------------------------------#
# Import #
#----------------------------------------------------------------------#
from http.server import BaseHTTPRequestHandler, HTTPServer
import logging
from datetime import datetime
import csv
from os import curdir, sep, path
from shutil import copyfile
#----------------------------------------------------------------------#
# Configuration #
#----------------------------------------------------------------------#
# Log file
LOG_FILE = r'events.log'
# Master key
MASTER_KEY = "jQw5xZVq9Kp4fm7hiZko"
# All the allowed keys
KEYS = ["q67idhrJ56oQj7IElukH",
MASTER_KEY]
#----------------------------------------------------------------------#
# Classes #
#----------------------------------------------------------------------#
class S(BaseHTTPRequestHandler):
def prepare_for_html_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
# datetime object containing current date and time
now = datetime.now()
print("now =", now)
# dd/mm/YY H:M:S
time_stamp = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", time_stamp)
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self.prepare_for_html_response()
#self.wfile.write("<html><head><title>Title goes here.</title></head>")
#self.wfile.write("<body><p>This is a test.</p>")
#self.wfile.write("<p>You accessed path: %s</p>" % self.path)
#self.wfile.write("</body></html>")
# self.wfile.write("GET request for {}".format(self.path).encode('utf-8'))
# Return HTML,CSV or LOG file if requested
if self.path.endswith(".html") or self.path.endswith(".csv") or self.path.endswith(".log") or \
self.path.endswith(".js") or self.path.endswith(".css"):
f_path = curdir + sep + self.path
if not path.exists(f_path):
# Requested file doesn't exists
self.wfile.write("Request file does not exist!".encode('utf-8'))
else:
#Open the static HTML file requested and send it
f = open(f_path,'rb')
self.wfile.write(f.read())
f.close()
# Nothing more to do
return;
# Otherwise try to log the event for given key
received_key = str(self.path)[1:]
isKeyValid = False
for key in KEYS:
if key == received_key:
self.wfile.write("Valid key! Logging event to a output file!".encode('utf-8'))
isKeyValid = True
# If master key is received, logger file is replaced with new one
if received_key == MASTER_KEY:
method_to_log = 'w'
# Back the logger file
copyfile(LOG_FILE, LOG_FILE + ".backup")
else:
method_to_log = 'a'
# Logging an event to CSV
with open(LOG_FILE, method_to_log, newline='\n') as f:
writer = csv.writer(f)
if method_to_log == 'w':
writer.writerow(["Timestamp", "Key"])
writer.writerow([time_stamp, received_key])
if not isKeyValid:
# No valid key had been received
self.wfile.write("Unknown key! Nothing to do!".encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",
str(self.path), str(self.headers), post_data.decode('utf-8'))
self.prepare_for_html_response()
self.wfile.write("POST request for {}".format(self.path).encode('utf-8'))
#----------------------------------------------------------------------#
# Functions #
#----------------------------------------------------------------------#
def run(server_class=HTTPServer, handler_class=S, port=8080):
logging.basicConfig(level=logging.INFO)
server_address = ('', port)
httpd = server_class(server_address, handler_class)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
#----------------------------------------------------------------------#
# Main #
#----------------------------------------------------------------------#
if __name__ == '__main__':
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | 37.972973 | 103 | 0.477224 |
from http.server import BaseHTTPRequestHandler, HTTPServer
import logging
from datetime import datetime
import csv
from os import curdir, sep, path
from shutil import copyfile
LOG_FILE = r'events.log'
MASTER_KEY = "jQw5xZVq9Kp4fm7hiZko"
KEYS = ["q67idhrJ56oQj7IElukH",
MASTER_KEY]
class S(BaseHTTPRequestHandler):
def prepare_for_html_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
now = datetime.now()
print("now =", now)
time_stamp = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", time_stamp)
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self.prepare_for_html_response()
if self.path.endswith(".html") or self.path.endswith(".csv") or self.path.endswith(".log") or \
self.path.endswith(".js") or self.path.endswith(".css"):
f_path = curdir + sep + self.path
if not path.exists(f_path):
self.wfile.write("Request file does not exist!".encode('utf-8'))
else:
#Open the static HTML file requested and send it
f = open(f_path,'rb')
self.wfile.write(f.read())
f.close()
# Nothing more to do
return;
# Otherwise try to log the event for given key
received_key = str(self.path)[1:]
isKeyValid = False
for key in KEYS:
if key == received_key:
self.wfile.write("Valid key! Logging event to a output file!".encode('utf-8'))
isKeyValid = True
# If master key is received, logger file is replaced with new one
if received_key == MASTER_KEY:
method_to_log = 'w'
# Back the logger file
copyfile(LOG_FILE, LOG_FILE + ".backup")
else:
method_to_log = 'a'
# Logging an event to CSV
with open(LOG_FILE, method_to_log, newline='\n') as f:
writer = csv.writer(f)
if method_to_log == 'w':
writer.writerow(["Timestamp", "Key"])
writer.writerow([time_stamp, received_key])
if not isKeyValid:
# No valid key had been received
self.wfile.write("Unknown key! Nothing to do!".encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",
str(self.path), str(self.headers), post_data.decode('utf-8'))
self.prepare_for_html_response()
self.wfile.write("POST request for {}".format(self.path).encode('utf-8'))
#----------------------------------------------------------------------#
# Functions #
#----------------------------------------------------------------------#
def run(server_class=HTTPServer, handler_class=S, port=8080):
logging.basicConfig(level=logging.INFO)
server_address = ('', port)
httpd = server_class(server_address, handler_class)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
#----------------------------------------------------------------------#
# Main #
#----------------------------------------------------------------------#
if __name__ == '__main__':
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | true | true |
f7254c126abf533a0ae20a41208a5dc83bf968ca | 712 | py | Python | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 21 | 2018-11-05T06:48:32.000Z | 2022-02-28T14:38:09.000Z | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 34 | 2019-01-28T01:32:41.000Z | 2021-05-06T09:40:14.000Z | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 28 | 2018-10-31T22:21:26.000Z | 2022-03-17T09:44:40.000Z | try:
from _json_keys import *
from _util import *
from defs_L3 import dissectors_L3
except:
from ._json_keys import *
from ._util import *
from .defs_L3 import dissectors_L3
def decoder(x):
'''
return (dissectors_L3)
or
return { JK_EMSG:(error-message) }
'''
this = None
# only show ipv6 packets
if len(x) < 1:
return { JK_EMSG:"invalid packet length" }
proto = (x[0]&0xf0)>>4
if proto in dissectors_L3:
if this != None:
this[JK_PAYLOAD] = dissectors_L3[proto](x)
return this
else:
return dissectors_L3[proto](x)
else:
return { JK_EMSG:"unsupported. L3 proto=%d" % proto }
| 22.967742 | 61 | 0.588483 | try:
from _json_keys import *
from _util import *
from defs_L3 import dissectors_L3
except:
from ._json_keys import *
from ._util import *
from .defs_L3 import dissectors_L3
def decoder(x):
this = None
if len(x) < 1:
return { JK_EMSG:"invalid packet length" }
proto = (x[0]&0xf0)>>4
if proto in dissectors_L3:
if this != None:
this[JK_PAYLOAD] = dissectors_L3[proto](x)
return this
else:
return dissectors_L3[proto](x)
else:
return { JK_EMSG:"unsupported. L3 proto=%d" % proto }
| true | true |
f7254c2e00bea89d99a5f93b0d09b97a572ca11a | 1,521 | py | Python | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 7 | 2018-08-22T19:16:33.000Z | 2021-08-14T03:50:08.000Z | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 280 | 2018-07-12T20:20:20.000Z | 2022-03-27T20:01:20.000Z | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 2 | 2020-04-02T02:30:42.000Z | 2021-07-22T21:13:04.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `mvport` package."""
import unittest
import numpy as np
from mvport.stock import Stock
class TestStock(unittest.TestCase):
"""Tests for `mvport` package."""
def setUp(self):
"""SetUp."""
self.ticker = 'AAPL'
self.returns = [-2, -1, 0, 1, 2]
self.stock = Stock(self.ticker, self.returns)
def test_get_ticker(self):
"""Test get_ticker."""
self.assertEqual(self.stock.get_ticker(), self.ticker)
def test_set_ticker(self):
"""Test set_ticker."""
self.stock.set_ticker('new_ticker')
self.assertEqual(self.stock.get_ticker(), 'new_ticker')
def test_get_returns(self):
"""Test get_returns."""
np.testing.assert_array_equal(self.stock.get_returns(), np.array(self.returns))
def test_set_returns(self):
"""Test set_ticker."""
self.stock.set_returns([-1, 0, 1])
np.testing.assert_array_equal(self.stock.get_returns(), np.array([-1, 0, 1]))
def test_get_mean(self):
"""Test get_mean."""
self.assertEqual(self.stock.get_mean(), 0)
self.stock.set_returns([0, 1, 2])
self.assertEqual(self.stock.get_mean(), 1)
def test_get_variance(self):
"""Test get_variance."""
self.assertEqual(self.stock.get_variance(), 2)
self.stock.set_returns([-3,-1,0,1,3])
self.assertEqual(self.stock.get_variance(), 4)
if __name__ == '__main__':
sys.exit(unittest.main())
| 27.654545 | 87 | 0.618014 |
import unittest
import numpy as np
from mvport.stock import Stock
class TestStock(unittest.TestCase):
def setUp(self):
self.ticker = 'AAPL'
self.returns = [-2, -1, 0, 1, 2]
self.stock = Stock(self.ticker, self.returns)
def test_get_ticker(self):
self.assertEqual(self.stock.get_ticker(), self.ticker)
def test_set_ticker(self):
self.stock.set_ticker('new_ticker')
self.assertEqual(self.stock.get_ticker(), 'new_ticker')
def test_get_returns(self):
np.testing.assert_array_equal(self.stock.get_returns(), np.array(self.returns))
def test_set_returns(self):
self.stock.set_returns([-1, 0, 1])
np.testing.assert_array_equal(self.stock.get_returns(), np.array([-1, 0, 1]))
def test_get_mean(self):
self.assertEqual(self.stock.get_mean(), 0)
self.stock.set_returns([0, 1, 2])
self.assertEqual(self.stock.get_mean(), 1)
def test_get_variance(self):
self.assertEqual(self.stock.get_variance(), 2)
self.stock.set_returns([-3,-1,0,1,3])
self.assertEqual(self.stock.get_variance(), 4)
if __name__ == '__main__':
sys.exit(unittest.main())
| true | true |
f7254caf0aa3637ad03dd57110d9475938728d0b | 301 | py | Python | compiler_gym/util/flags/output_dir.py | mostafaelhoushi/CompilerGym | cf11c58333d263b3ebc5ece2110a429e9af499c1 | [
"MIT"
] | 562 | 2020-12-21T14:10:20.000Z | 2022-03-31T21:23:55.000Z | compiler_gym/util/flags/output_dir.py | mostafaelhoushi/CompilerGym | cf11c58333d263b3ebc5ece2110a429e9af499c1 | [
"MIT"
] | 433 | 2020-12-22T03:40:41.000Z | 2022-03-31T18:16:17.000Z | compiler_gym/util/flags/output_dir.py | mostafaelhoushi/CompilerGym | cf11c58333d263b3ebc5ece2110a429e9af499c1 | [
"MIT"
] | 88 | 2020-12-22T08:22:00.000Z | 2022-03-20T19:00:40.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from absl import flags
flags.DEFINE_string(
"output_dir",
None,
"The directory to read and write files to.",
)
| 25.083333 | 65 | 0.724252 |
from absl import flags
flags.DEFINE_string(
"output_dir",
None,
"The directory to read and write files to.",
)
| true | true |
f7254ce72ce8155285a2f4a9a1febb88f4b64006 | 3,161 | py | Python | leasing/models/basis_of_rent.py | tuomas777/mvj | e9a12e42c399b9fb77fd8fad85fc8f0f6d4ce405 | [
"MIT"
] | null | null | null | leasing/models/basis_of_rent.py | tuomas777/mvj | e9a12e42c399b9fb77fd8fad85fc8f0f6d4ce405 | [
"MIT"
] | null | null | null | leasing/models/basis_of_rent.py | tuomas777/mvj | e9a12e42c399b9fb77fd8fad85fc8f0f6d4ce405 | [
"MIT"
] | null | null | null | from auditlog.registry import auditlog
from django.db import models
from django.utils.translation import ugettext_lazy as _
from enumfields import EnumField
from leasing.enums import PeriodType
from .mixins import NameModel, TimeStampedSafeDeleteModel
class BasisOfRentPlotType(NameModel):
"""
In Finnish: Tonttityyppi
"""
class BasisOfRent(TimeStampedSafeDeleteModel):
"""
In Finnish: Vuokrausperuste
"""
# In Finnish: Tonttityyppi
plot_type = models.ForeignKey(BasisOfRentPlotType, verbose_name=_("Plot type"), on_delete=models.PROTECT)
# In Finnish: Alkupäivämäärä
start_date = models.DateField(verbose_name=_("Start date"), null=True, blank=True)
# In Finnish: Loppupäivämäärä
end_date = models.DateField(verbose_name=_("End date"), null=True, blank=True)
# In Finnish: Asemakaava
detailed_plan_identifier = models.CharField(verbose_name=_("Detailed plan identifier"), null=True, blank=True,
max_length=255)
# In Finnish: Hallintamuoto
management = models.ForeignKey('leasing.Management', verbose_name=_("Form of management"), null=True, blank=True,
on_delete=models.PROTECT)
# In Finnish: Rahoitusmuoto
financing = models.ForeignKey('leasing.Financing', verbose_name=_("Form of financing"), null=True, blank=True,
on_delete=models.PROTECT)
# In Finnish: Vuokraoikeus päättyy
lease_rights_end_date = models.DateField(verbose_name=_("Lease rights end date"), null=True, blank=True)
# In Finnish: Indeksi
index = models.PositiveIntegerField(verbose_name=_("Index"))
# In Finnish: Kommentti
note = models.TextField(verbose_name=_("Note"), null=True, blank=True)
class BasisOfRentRate(TimeStampedSafeDeleteModel):
"""
In Finnish: Hinta
"""
basis_of_rent = models.ForeignKey(BasisOfRent, verbose_name=_("Basis of rent"), related_name='rent_rates',
on_delete=models.CASCADE)
# In Finnish: Pääkäyttötarkoitus
intended_use = models.ForeignKey('leasing.RentIntendedUse', verbose_name=_("Intended use"), null=True, blank=True,
on_delete=models.PROTECT)
# In Finnish: Euroa
amount = models.DecimalField(verbose_name=_("Amount"), decimal_places=2, max_digits=12)
# In Finnish: Yksikkö
period = EnumField(PeriodType, verbose_name=_("Period"), max_length=20)
class BasisOfRentPropertyIdentifier(models.Model):
"""
In Finnish: Kiinteistötunnus
"""
basis_of_rent = models.ForeignKey(BasisOfRent, verbose_name=_("Basis of rent"), related_name='property_identifiers',
on_delete=models.CASCADE)
identifier = models.CharField(verbose_name=_("Identifier"), max_length=255)
class BasisOfRentDecision(models.Model):
"""
In Finnish: Päätös
"""
basis_of_rent = models.ForeignKey(BasisOfRent, related_name='decisions', on_delete=models.CASCADE)
identifier = models.CharField(verbose_name=_("Identifier"), max_length=255)
auditlog.register(BasisOfRent)
| 35.920455 | 120 | 0.691237 | from auditlog.registry import auditlog
from django.db import models
from django.utils.translation import ugettext_lazy as _
from enumfields import EnumField
from leasing.enums import PeriodType
from .mixins import NameModel, TimeStampedSafeDeleteModel
class BasisOfRentPlotType(NameModel):
class BasisOfRent(TimeStampedSafeDeleteModel):
plot_type = models.ForeignKey(BasisOfRentPlotType, verbose_name=_("Plot type"), on_delete=models.PROTECT)
start_date = models.DateField(verbose_name=_("Start date"), null=True, blank=True)
end_date = models.DateField(verbose_name=_("End date"), null=True, blank=True)
detailed_plan_identifier = models.CharField(verbose_name=_("Detailed plan identifier"), null=True, blank=True,
max_length=255)
management = models.ForeignKey('leasing.Management', verbose_name=_("Form of management"), null=True, blank=True,
on_delete=models.PROTECT)
financing = models.ForeignKey('leasing.Financing', verbose_name=_("Form of financing"), null=True, blank=True,
on_delete=models.PROTECT)
lease_rights_end_date = models.DateField(verbose_name=_("Lease rights end date"), null=True, blank=True)
index = models.PositiveIntegerField(verbose_name=_("Index"))
note = models.TextField(verbose_name=_("Note"), null=True, blank=True)
class BasisOfRentRate(TimeStampedSafeDeleteModel):
basis_of_rent = models.ForeignKey(BasisOfRent, verbose_name=_("Basis of rent"), related_name='rent_rates',
on_delete=models.CASCADE)
intended_use = models.ForeignKey('leasing.RentIntendedUse', verbose_name=_("Intended use"), null=True, blank=True,
on_delete=models.PROTECT)
amount = models.DecimalField(verbose_name=_("Amount"), decimal_places=2, max_digits=12)
period = EnumField(PeriodType, verbose_name=_("Period"), max_length=20)
class BasisOfRentPropertyIdentifier(models.Model):
basis_of_rent = models.ForeignKey(BasisOfRent, verbose_name=_("Basis of rent"), related_name='property_identifiers',
on_delete=models.CASCADE)
identifier = models.CharField(verbose_name=_("Identifier"), max_length=255)
class BasisOfRentDecision(models.Model):
basis_of_rent = models.ForeignKey(BasisOfRent, related_name='decisions', on_delete=models.CASCADE)
identifier = models.CharField(verbose_name=_("Identifier"), max_length=255)
auditlog.register(BasisOfRent)
| true | true |
f7254d485119f2dd92ad0be5fa608833d0405c1d | 38 | py | Python | irclogs/__init__.py | dokipen/trac-irclogs-plugin | 811aa16fdaf7f6de9bfa6200073f5b33da09fc1a | [
"BSD-3-Clause"
] | null | null | null | irclogs/__init__.py | dokipen/trac-irclogs-plugin | 811aa16fdaf7f6de9bfa6200073f5b33da09fc1a | [
"BSD-3-Clause"
] | 1 | 2015-02-26T23:17:12.000Z | 2015-03-02T15:03:45.000Z | irclogs/__init__.py | dokipen/trac-irclogs-plugin | 811aa16fdaf7f6de9bfa6200073f5b33da09fc1a | [
"BSD-3-Clause"
] | null | null | null | from console import update_irc_search
| 19 | 37 | 0.894737 | from console import update_irc_search
| true | true |
f7254e36b7c014cfeea985736099019756c9cb78 | 1,026 | py | Python | program/experiments/localization/calibrate.py | JankaSvK/thesis | c440ab8242b058f580fdf9d5a1d00708a1696561 | [
"MIT"
] | 1 | 2018-11-29T14:13:47.000Z | 2018-11-29T14:13:47.000Z | program/experiments/localization/calibrate.py | JankaSvK/thesis | c440ab8242b058f580fdf9d5a1d00708a1696561 | [
"MIT"
] | 3 | 2018-04-24T18:30:00.000Z | 2018-05-11T23:25:07.000Z | program/experiments/localization/calibrate.py | JankaSvK/thesis | c440ab8242b058f580fdf9d5a1d00708a1696561 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import os
from sys import argv
if os.name == 'nt':
python = "python"
else:
python = "python3"
try:
experiment_id = int(argv[1])
except Exception:
experiment_id = 63
def command(video1, video2, chessboard):
return "{} ../../Main.py --video1={} --video2={} --chessboard={}".format(python, video1, video2, chessboard)
calib_videos = "../videos/calibration/"
if experiment_id == 38:
chessboard = "7,8,22"
video1 = calib_videos + "38/1.avi"
video2 = calib_videos + "38/2.avi"
elif experiment_id == 16:
chessboard = "7,8,22"
video1 = calib_videos + "16/1.avi"
video2 = calib_videos + "16/2.avi"
elif experiment_id == 43:
chessboard = "6,9,26"
video1 = calib_videos + "43/1.avi"
video2 = calib_videos + "43/2.avi"
elif experiment_id == 63:
chessboard = "7,8,22"
video1 = calib_videos + "63/1.avi"
video2 = calib_videos + "63/2.avi"
else:
print("Not recognized set of videos")
exit(0)
os.system(command(video1, video2, chessboard))
| 24.428571 | 112 | 0.640351 |
import os
from sys import argv
if os.name == 'nt':
python = "python"
else:
python = "python3"
try:
experiment_id = int(argv[1])
except Exception:
experiment_id = 63
def command(video1, video2, chessboard):
return "{} ../../Main.py --video1={} --video2={} --chessboard={}".format(python, video1, video2, chessboard)
calib_videos = "../videos/calibration/"
if experiment_id == 38:
chessboard = "7,8,22"
video1 = calib_videos + "38/1.avi"
video2 = calib_videos + "38/2.avi"
elif experiment_id == 16:
chessboard = "7,8,22"
video1 = calib_videos + "16/1.avi"
video2 = calib_videos + "16/2.avi"
elif experiment_id == 43:
chessboard = "6,9,26"
video1 = calib_videos + "43/1.avi"
video2 = calib_videos + "43/2.avi"
elif experiment_id == 63:
chessboard = "7,8,22"
video1 = calib_videos + "63/1.avi"
video2 = calib_videos + "63/2.avi"
else:
print("Not recognized set of videos")
exit(0)
os.system(command(video1, video2, chessboard))
| true | true |
f7254e6e57d13e131c3fc738bd9c4a2d139d00b0 | 5,481 | py | Python | src/polytopes/run_polychora_examples.py | mohi7solanki/pywonderland | 2b9d61a8414d4cfa92d34325e5e2b9b5d501abca | [
"MIT"
] | null | null | null | src/polytopes/run_polychora_examples.py | mohi7solanki/pywonderland | 2b9d61a8414d4cfa92d34325e5e2b9b5d501abca | [
"MIT"
] | null | null | null | src/polytopes/run_polychora_examples.py | mohi7solanki/pywonderland | 2b9d61a8414d4cfa92d34325e5e2b9b5d501abca | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Render curved 4d polychoron examples
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This script draws uniform polychoron whose vertices lie
on the unit sphere S^3 by using stereographic projection
to map them into 3d space.
:copyright (c) 2018 by Zhao Liang.
"""
import subprocess
from fractions import Fraction
import numpy as np
from models import Polychora
import helpers
POV_EXE = "povray" # POV-Ray exe binary
SCENE_FILE = "polychora_curved.pov" # the main scene file
IMAGE_SIZE = 600 # image size in pixels
IMAGE_QUALITY_LEVEL = 11 # between 0-11
SUPER_SAMPLING_LEVEL = 7 # between 1-9
ANTIALIASING_LEVEL = 0.001 # lower for better quality
POV_COMMAND = " cd povray && " + \
" {} +I{}".format(POV_EXE, SCENE_FILE) + \
" +W{} +H{}".format(IMAGE_SIZE, IMAGE_SIZE) + \
" +Q{}".format(IMAGE_QUALITY_LEVEL) + \
" +A{}".format(ANTIALIASING_LEVEL) + \
" +R{}".format(SUPER_SAMPLING_LEVEL) + \
" +O../{}"
POV_TEMPLATE = """
#declare vertex_size = {};
#declare edge_size = {};
#declare camera_location = {};
#declare object_rotation = {};
#declare extent = {};
#declare vertices = array[{}] {{{}}};
#declare size_func = {};
#declare face_max= {};
#declare face_min = {};
#declare face_index = {};
// this macro is used for adjusting the size of edges
// according to their positions in the space.
#macro get_size(q)
#local len = vlength(q);
#if (size_func = 0)
#local len = (1.0 + len * len) / 4;
#else #if (size_func = 1)
#local len = 2.0 * log(2.0 + len * len);
#else
#local len = 2.0 * log(1.13 + len * len);
#end
#end
len
#end
#macro choose_face(i, face_size)
#local chosen = false;
#for (ind, 0, dimension_size(face_index, 1) - 1)
#if (i = face_index[ind])
#if (face_size > face_min & face_size < face_max)
#local chosen = true;
#end
#end
#end
chosen
#end
{}
{}
{}
"""
VERT_MACRO = "Vert(vertices, {})"
EDGE_MACRO = "Edge(vertices, {}, {}, {})"
def write_to_pov(P,
camera=(0, 0, 180),
rotation=(0, 0, 0),
vertex_size=0.04,
edge_size=0.02,
size_func=0,
face_index=(0,),
face_max=3,
face_min=0.5):
"""Write the data of a polytope `P` to the include file.
:param camera: camera location.
:param rotation: rotation angles (in degree) of the polytope.
:param vertex_size: controls size of the vertices.
:param edge_size: controls size of the edges.
:param size_func: choose which way to adjust the size of the edges.
currently there are three choices, so it can only be 0-2.
:param face_index: controls which type of faces are rendered,
must be a list of integers.
:param face_max: faces larger than this value will not be rendered.
:param face_min: faces smaller than this value will not be rendered.
"""
with open("./povray/polychora-data.inc", "w") as f:
extent = max(np.linalg.norm(helpers.proj3d(v)) for v in P.vertex_coords)
vert_macros = "\n".join(VERT_MACRO.format(k) for k in range(P.num_vertices))
edge_macros = "\n".join(EDGE_MACRO.format(i, e[0], e[1])
for i, elist in enumerate(P.edge_indices)
for e in elist)
face_macros = "\n".join(helpers.export_face(i, face)
for i, flist in enumerate(P.face_coords)
for face in flist)
f.write(POV_TEMPLATE.format(
vertex_size,
edge_size,
helpers.pov_vector(camera),
helpers.pov_vector(rotation),
extent,
P.num_vertices,
helpers.pov_vector_list(P.vertex_coords),
size_func,
face_max,
face_min,
helpers.pov_array(face_index),
vert_macros,
edge_macros,
face_macros))
def draw(coxeter_diagram,
trunc_type,
description="polychora",
extra_relations=(),
**kwargs):
P = Polychora(coxeter_diagram, trunc_type, extra_relations)
P.build_geometry()
write_to_pov(P, **kwargs)
print("rendering {} with {} vertices, {} edges, {} faces".format(
description,
P.num_vertices,
P.num_edges,
P.num_faces))
process = subprocess.Popen(
POV_COMMAND.format(description),
shell=True,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
_, err = process.communicate()
if process.returncode:
print(type(err), err)
raise IOError("POVRay error: " + err.decode("ascii"))
def main():
"""
draw((3, 2, 2, 3, 2, 3), (1, 0, 0, 0), "5-cell", camera=(0, 0, 200),
vertex_size=0.08, edge_size=0.04, rotation=(-30, 60, 0), size_func=1)
draw((5, 2, 2, 3, 2, 3), (1, 0, 0, 1), "runcinated-120-cell", camera=(0, 0, 105),
vertex_size=0.028, edge_size=0.014, face_min=20)
"""
draw((3, 2, 2, 3, 2, 5), (1, 0, 0, 0), "600-cell", camera=(0, 0, 200),
vertex_size=0.12, edge_size=0.04, size_func=2, face_max=4.0, face_min=3.0)
if __name__ == "__main__":
main()
| 29.95082 | 85 | 0.561759 |
import subprocess
from fractions import Fraction
import numpy as np
from models import Polychora
import helpers
POV_EXE = "povray"
SCENE_FILE = "polychora_curved.pov"
IMAGE_SIZE = 600
IMAGE_QUALITY_LEVEL = 11
SUPER_SAMPLING_LEVEL = 7
ANTIALIASING_LEVEL = 0.001
POV_COMMAND = " cd povray && " + \
" {} +I{}".format(POV_EXE, SCENE_FILE) + \
" +W{} +H{}".format(IMAGE_SIZE, IMAGE_SIZE) + \
" +Q{}".format(IMAGE_QUALITY_LEVEL) + \
" +A{}".format(ANTIALIASING_LEVEL) + \
" +R{}".format(SUPER_SAMPLING_LEVEL) + \
" +O../{}"
POV_TEMPLATE = """
#declare vertex_size = {};
#declare edge_size = {};
#declare camera_location = {};
#declare object_rotation = {};
#declare extent = {};
#declare vertices = array[{}] {{{}}};
#declare size_func = {};
#declare face_max= {};
#declare face_min = {};
#declare face_index = {};
// this macro is used for adjusting the size of edges
// according to their positions in the space.
#macro get_size(q)
#local len = vlength(q);
#if (size_func = 0)
#local len = (1.0 + len * len) / 4;
#else #if (size_func = 1)
#local len = 2.0 * log(2.0 + len * len);
#else
#local len = 2.0 * log(1.13 + len * len);
#end
#end
len
#end
#macro choose_face(i, face_size)
#local chosen = false;
#for (ind, 0, dimension_size(face_index, 1) - 1)
#if (i = face_index[ind])
#if (face_size > face_min & face_size < face_max)
#local chosen = true;
#end
#end
#end
chosen
#end
{}
{}
{}
"""
VERT_MACRO = "Vert(vertices, {})"
EDGE_MACRO = "Edge(vertices, {}, {}, {})"
def write_to_pov(P,
camera=(0, 0, 180),
rotation=(0, 0, 0),
vertex_size=0.04,
edge_size=0.02,
size_func=0,
face_index=(0,),
face_max=3,
face_min=0.5):
with open("./povray/polychora-data.inc", "w") as f:
extent = max(np.linalg.norm(helpers.proj3d(v)) for v in P.vertex_coords)
vert_macros = "\n".join(VERT_MACRO.format(k) for k in range(P.num_vertices))
edge_macros = "\n".join(EDGE_MACRO.format(i, e[0], e[1])
for i, elist in enumerate(P.edge_indices)
for e in elist)
face_macros = "\n".join(helpers.export_face(i, face)
for i, flist in enumerate(P.face_coords)
for face in flist)
f.write(POV_TEMPLATE.format(
vertex_size,
edge_size,
helpers.pov_vector(camera),
helpers.pov_vector(rotation),
extent,
P.num_vertices,
helpers.pov_vector_list(P.vertex_coords),
size_func,
face_max,
face_min,
helpers.pov_array(face_index),
vert_macros,
edge_macros,
face_macros))
def draw(coxeter_diagram,
trunc_type,
description="polychora",
extra_relations=(),
**kwargs):
P = Polychora(coxeter_diagram, trunc_type, extra_relations)
P.build_geometry()
write_to_pov(P, **kwargs)
print("rendering {} with {} vertices, {} edges, {} faces".format(
description,
P.num_vertices,
P.num_edges,
P.num_faces))
process = subprocess.Popen(
POV_COMMAND.format(description),
shell=True,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
_, err = process.communicate()
if process.returncode:
print(type(err), err)
raise IOError("POVRay error: " + err.decode("ascii"))
def main():
draw((3, 2, 2, 3, 2, 5), (1, 0, 0, 0), "600-cell", camera=(0, 0, 200),
vertex_size=0.12, edge_size=0.04, size_func=2, face_max=4.0, face_min=3.0)
if __name__ == "__main__":
main()
| true | true |
f7254fe63f765868c8428af33d90ac77ae356bbd | 7,055 | py | Python | deltametrics/sample_data/sample_data.py | amoodie/DeltaMetrics | 9b823bea36851adcebc446c8941f8783325b1a4f | [
"MIT"
] | null | null | null | deltametrics/sample_data/sample_data.py | amoodie/DeltaMetrics | 9b823bea36851adcebc446c8941f8783325b1a4f | [
"MIT"
] | null | null | null | deltametrics/sample_data/sample_data.py | amoodie/DeltaMetrics | 9b823bea36851adcebc446c8941f8783325b1a4f | [
"MIT"
] | null | null | null | import sys
import os
import pkg_resources
import warnings
import numpy as np
import netCDF4
import pooch
from .. import cube
from .. import utils
# deltametrics version
__version__ = utils._get_version()
# enusre DeprecationWarning is shown
warnings.simplefilter("default")
# configure the data registry
REGISTRY = pooch.create(
path=pooch.os_cache("deltametrics"),
base_url='https://github.com/DeltaRCM/DeltaMetrics/raw/develop/deltametrics/sample_data/files/',
env="DELTAMETRICS_DATA_DIR",
)
with pkg_resources.resource_stream("deltametrics.sample_data", "registry.txt") as registry_file:
REGISTRY.load_registry(registry_file)
def _get_golf_path():
unpack = pooch.Unzip()
fnames = REGISTRY.fetch('golf.zip', processor=unpack)
nc_bool = [os.path.splitext(fname)[1] == '.nc' for fname in fnames]
nc_idx = [i for i, b in enumerate(nc_bool) if b]
golf_path = fnames[nc_idx[0]]
return golf_path
def golf():
"""Golf Delta dataset.
This is a synthetic delta dataset generated from the pyDeltaRCM numerical
model. This model run was created to generate sample data. Model was run
on 10/14/2021, at the University of Texas at Austin.
Run was computed with pyDeltaRCM v2.1.0. See log file for complete
information on system and model configuration.
Data available at Zenodo, https://doi.org/10.5281/zenodo.4456143.
Version history:
* v1.1: 10.5281/zenodo.5570962
* v1.0: 10.5281/zenodo.4456144
.. plot::
golf = dm.sample_data.golf()
nt = 5
ts = np.linspace(0, golf['eta'].shape[0]-1, num=nt, dtype=np.int)
fig, ax = plt.subplots(1, nt, figsize=(12, 2))
for i, t in enumerate(ts):
ax[i].imshow(golf['eta'][t, :, :], vmin=-2, vmax=0.5)
ax[i].set_title('t = ' + str(t))
ax[i].axes.get_xaxis().set_ticks([])
ax[i].axes.get_yaxis().set_ticks([])
ax[0].set_ylabel('dim1 direction')
ax[0].set_xlabel('dim2 direction')
plt.show()
"""
golf_path = _get_golf_path()
return cube.DataCube(golf_path, coordinates={'x': 'y', 'y': 'x'})
def tdb12():
raise NotImplementedError
def _get_aeolian_path():
aeolian_path = REGISTRY.fetch('swanson_aeolian_expt1.nc')
return aeolian_path
def aeolian():
"""An aeolian dune field dataset.
This is a synthetic delta dataset generated from the Swanson et al.,
2017 "A Surface Model for Aeolian Dune Topography" numerical model. The
data have been subsetted, only keeping the first 500 saved timesteps, and
formatted into a netCDF file.
Swanson, T., Mohrig, D., Kocurek, G. et al. A Surface Model for Aeolian
Dune Topography. Math Geosci 49, 635–655
(2017). https://doi.org/10.1007/s11004-016-9654-x
Dataset reference: https://doi.org/10.6084/m9.figshare.17118827.v1
Details:
* default simualtion parameters were used.
* only the first 500 timesteps of the simulation were recorded into
the netcdf file.
* the *ordering* for "easting" and "northing" coordinates in the
netCDF file is opposite from the paper---that is the source region
is along the second axis, i.e., ``dim1[source_regiom]==0``. The
display of this dataset is thus different from the original
paper, *but the data are the same*.
* simulation used the model code included as a supplement to the paper
found here:
https://static-content.springer.com/esm/art%3A10.1007%2Fs11004-016-9654-x/MediaObjects/11004_2016_9654_MOESM5_ESM.txt
* simulation was executed on 12/02/2021 with Matlab R2021a on Ubuntu
20.04.
.. plot::
aeolian = dm.sample_data.aeolian()
nt = 5
ts = np.linspace(0, aeolian['eta'].shape[0]-1, num=nt, dtype=np.int)
fig, ax = plt.subplots(1, nt, figsize=(8, 4))
for i, t in enumerate(ts):
ax[i].imshow(aeolian['eta'][t, :, :], vmin=-5, vmax=7)
ax[i].set_title('t = ' + str(t))
ax[i].axes.get_xaxis().set_ticks([])
ax[i].axes.get_yaxis().set_ticks([])
ax[0].set_ylabel('northing')
ax[0].set_xlabel('easting')
plt.show()
"""
aeolian_path = _get_aeolian_path()
return cube.DataCube(aeolian_path)
def _get_rcm8_path():
rcm8_path = REGISTRY.fetch('pyDeltaRCM_Output_8.nc')
return rcm8_path
def rcm8():
"""Rcm8 Delta dataset.
This is a synthetic delta dataset generated from the pyDeltaRCM numerical
model. Unfortunately, we do not know the specific version of pyDeltaRCM
the model run was executed with. Moreover, many new coupling features have
been added to pyDeltaRCM and DeltaMetrics since this run. As a result,
this dataset is slated to be deprecated at some point, in favor of the
:obj:`golf` dataset.
.. important::
If you are learning to use DeltaMetrics or developing new codes or
documentation, please use the :obj:`golf` delta dataset.
.. warning:: This cube may be removed in future releases.
.. plot::
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
rcm8 = dm.sample_data.rcm8()
nt = 5
ts = np.linspace(0, rcm8['eta'].shape[0]-1, num=nt, dtype=np.int)
fig, ax = plt.subplots(1, nt, figsize=(12, 2))
for i, t in enumerate(ts):
ax[i].imshow(rcm8['eta'][t, :, :], vmin=-2, vmax=0.5)
ax[i].set_title('t = ' + str(t))
ax[i].axes.get_xaxis().set_ticks([])
ax[i].axes.get_yaxis().set_ticks([])
ax[0].set_ylabel('y-direction')
ax[0].set_xlabel('x-direction')
plt.show()
"""
rcm8_path = _get_rcm8_path()
return cube.DataCube(rcm8_path)
def _get_landsat_path():
landsat_path = REGISTRY.fetch('LandsatEx.hdf5')
return landsat_path
def landsat():
"""Landsat image dataset.
This is a set of satellite images from the Landsat 5 satellite, collected
over the Krishna River delta, India. The dataset includes annual-composite
scenes from four different years (`[1995, 2000, 2005, 2010]`) and includes
data collected from four bands (`['Red', 'Green', 'Blue', 'NIR']`).
.. plot::
landsat = dm.sample_data.landsat()
nt = landsat.shape[0]
maxr = np.max(landsat['Red'][:])
maxg = np.max(landsat['Green'][:])
maxb = np.max(landsat['Blue'][:])
fig, ax = plt.subplots(1, nt, figsize=(12, 2))
for i in np.arange(nt):
_arr = np.dstack((landsat['Red'][i, :, :]/maxr,
landsat['Green'][i, :, :]/maxg,
landsat['Blue'][i, :, :]/maxb))
ax[i].imshow(_arr)
ax[i].set_title('year = ' + str(landsat.t[i]))
ax[i].axes.get_xaxis().set_ticks([])
ax[i].axes.get_yaxis().set_ticks([])
plt.show()
"""
landsat_path = _get_landsat_path()
return cube.DataCube(landsat_path)
| 32.813953 | 125 | 0.632034 | import sys
import os
import pkg_resources
import warnings
import numpy as np
import netCDF4
import pooch
from .. import cube
from .. import utils
__version__ = utils._get_version()
warnings.simplefilter("default")
REGISTRY = pooch.create(
path=pooch.os_cache("deltametrics"),
base_url='https://github.com/DeltaRCM/DeltaMetrics/raw/develop/deltametrics/sample_data/files/',
env="DELTAMETRICS_DATA_DIR",
)
with pkg_resources.resource_stream("deltametrics.sample_data", "registry.txt") as registry_file:
REGISTRY.load_registry(registry_file)
def _get_golf_path():
unpack = pooch.Unzip()
fnames = REGISTRY.fetch('golf.zip', processor=unpack)
nc_bool = [os.path.splitext(fname)[1] == '.nc' for fname in fnames]
nc_idx = [i for i, b in enumerate(nc_bool) if b]
golf_path = fnames[nc_idx[0]]
return golf_path
def golf():
golf_path = _get_golf_path()
return cube.DataCube(golf_path, coordinates={'x': 'y', 'y': 'x'})
def tdb12():
raise NotImplementedError
def _get_aeolian_path():
aeolian_path = REGISTRY.fetch('swanson_aeolian_expt1.nc')
return aeolian_path
def aeolian():
aeolian_path = _get_aeolian_path()
return cube.DataCube(aeolian_path)
def _get_rcm8_path():
rcm8_path = REGISTRY.fetch('pyDeltaRCM_Output_8.nc')
return rcm8_path
def rcm8():
rcm8_path = _get_rcm8_path()
return cube.DataCube(rcm8_path)
def _get_landsat_path():
landsat_path = REGISTRY.fetch('LandsatEx.hdf5')
return landsat_path
def landsat():
landsat_path = _get_landsat_path()
return cube.DataCube(landsat_path)
| true | true |
f72550174de22fb7301842e0d293f4de18995253 | 3,836 | py | Python | cirq-core/cirq/ops/two_qubit_diagonal_gate_test.py | LLcat1217/Cirq | b88069f7b01457e592ad69d6b413642ef11a56b8 | [
"Apache-2.0"
] | null | null | null | cirq-core/cirq/ops/two_qubit_diagonal_gate_test.py | LLcat1217/Cirq | b88069f7b01457e592ad69d6b413642ef11a56b8 | [
"Apache-2.0"
] | null | null | null | cirq-core/cirq/ops/two_qubit_diagonal_gate_test.py | LLcat1217/Cirq | b88069f7b01457e592ad69d6b413642ef11a56b8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import sympy
import cirq
@pytest.mark.parametrize(
'gate',
(
(
cirq.TwoQubitDiagonalGate([2, 3, 5, 7]),
cirq.TwoQubitDiagonalGate([0, 0, 0, 0]),
cirq.TwoQubitDiagonalGate([2, 3, 5, sympy.Symbol('a')]),
cirq.TwoQubitDiagonalGate([0.34, 0.12, 0, 0.96]),
)
),
)
def test_consistent_protocols(gate):
cirq.testing.assert_implements_consistent_protocols(gate)
def test_parameterized_decompose():
angles = sympy.symbols('x0, x1, x2, x3')
parameterized_op = cirq.TwoQubitDiagonalGate(angles).on(*cirq.LineQubit.range(2))
decomposed_circuit = cirq.Circuit(cirq.decompose(parameterized_op))
for resolver in (
cirq.Linspace('x0', -2, 2, 6)
* cirq.Linspace('x1', -2, 2, 6)
* cirq.Linspace('x2', -2, 2, 6)
* cirq.Linspace('x3', -2, 2, 6)
):
np.testing.assert_allclose(
cirq.unitary(cirq.resolve_parameters(parameterized_op, resolver)),
cirq.unitary(cirq.resolve_parameters(decomposed_circuit, resolver)),
)
def test_unitary():
diagonal_angles = [2, 3, 5, 7]
assert cirq.has_unitary(cirq.TwoQubitDiagonalGate(diagonal_angles))
np.testing.assert_allclose(
cirq.unitary(cirq.TwoQubitDiagonalGate(diagonal_angles)),
np.diag([np.exp(1j * angle) for angle in diagonal_angles]),
atol=1e-8,
)
def test_diagram():
a, b = cirq.LineQubit.range(2)
diagonal_circuit = cirq.Circuit(cirq.TwoQubitDiagonalGate([2, 3, 5, 7])(a, b))
cirq.testing.assert_has_diagram(
diagonal_circuit,
"""
0: ───diag(2, 3, 5, 7)───
│
1: ───#2─────────────────
""",
)
cirq.testing.assert_has_diagram(
diagonal_circuit,
"""
0: ---diag(2, 3, 5, 7)---
|
1: ---#2-----------------
""",
use_unicode_characters=False,
)
def test_diagonal_exponent():
diagonal_angles = [2, 3, 5, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(diagonal_angles)
sqrt_diagonal_gate = diagonal_gate**0.5
expected_angles = [prime / 2 for prime in diagonal_angles]
assert cirq.approx_eq(sqrt_diagonal_gate, cirq.TwoQubitDiagonalGate(expected_angles))
assert cirq.pow(cirq.TwoQubitDiagonalGate(diagonal_angles), "test", None) is None
def test_protocols_mul_not_implemented():
diagonal_angles = [2, 3, None, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(diagonal_angles)
with pytest.raises(TypeError):
cirq.protocols.pow(diagonal_gate, 3)
@pytest.mark.parametrize('resolve_fn', [cirq.resolve_parameters, cirq.resolve_parameters_once])
def test_resolve(resolve_fn):
diagonal_angles = [2, 3, 5, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(
diagonal_angles[:2] + [sympy.Symbol('a'), sympy.Symbol('b')]
)
assert cirq.is_parameterized(diagonal_gate)
diagonal_gate = resolve_fn(diagonal_gate, {'a': 5})
assert diagonal_gate == cirq.TwoQubitDiagonalGate(diagonal_angles[:3] + [sympy.Symbol('b')])
assert cirq.is_parameterized(diagonal_gate)
diagonal_gate = resolve_fn(diagonal_gate, {'b': 7})
assert diagonal_gate == cirq.TwoQubitDiagonalGate(diagonal_angles)
assert not cirq.is_parameterized(diagonal_gate)
| 31.966667 | 96 | 0.676747 |
import numpy as np
import pytest
import sympy
import cirq
@pytest.mark.parametrize(
'gate',
(
(
cirq.TwoQubitDiagonalGate([2, 3, 5, 7]),
cirq.TwoQubitDiagonalGate([0, 0, 0, 0]),
cirq.TwoQubitDiagonalGate([2, 3, 5, sympy.Symbol('a')]),
cirq.TwoQubitDiagonalGate([0.34, 0.12, 0, 0.96]),
)
),
)
def test_consistent_protocols(gate):
cirq.testing.assert_implements_consistent_protocols(gate)
def test_parameterized_decompose():
angles = sympy.symbols('x0, x1, x2, x3')
parameterized_op = cirq.TwoQubitDiagonalGate(angles).on(*cirq.LineQubit.range(2))
decomposed_circuit = cirq.Circuit(cirq.decompose(parameterized_op))
for resolver in (
cirq.Linspace('x0', -2, 2, 6)
* cirq.Linspace('x1', -2, 2, 6)
* cirq.Linspace('x2', -2, 2, 6)
* cirq.Linspace('x3', -2, 2, 6)
):
np.testing.assert_allclose(
cirq.unitary(cirq.resolve_parameters(parameterized_op, resolver)),
cirq.unitary(cirq.resolve_parameters(decomposed_circuit, resolver)),
)
def test_unitary():
diagonal_angles = [2, 3, 5, 7]
assert cirq.has_unitary(cirq.TwoQubitDiagonalGate(diagonal_angles))
np.testing.assert_allclose(
cirq.unitary(cirq.TwoQubitDiagonalGate(diagonal_angles)),
np.diag([np.exp(1j * angle) for angle in diagonal_angles]),
atol=1e-8,
)
def test_diagram():
a, b = cirq.LineQubit.range(2)
diagonal_circuit = cirq.Circuit(cirq.TwoQubitDiagonalGate([2, 3, 5, 7])(a, b))
cirq.testing.assert_has_diagram(
diagonal_circuit,
"""
0: ───diag(2, 3, 5, 7)───
│
1: ───#2─────────────────
""",
)
cirq.testing.assert_has_diagram(
diagonal_circuit,
"""
0: ---diag(2, 3, 5, 7)---
|
1: ---#2-----------------
""",
use_unicode_characters=False,
)
def test_diagonal_exponent():
diagonal_angles = [2, 3, 5, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(diagonal_angles)
sqrt_diagonal_gate = diagonal_gate**0.5
expected_angles = [prime / 2 for prime in diagonal_angles]
assert cirq.approx_eq(sqrt_diagonal_gate, cirq.TwoQubitDiagonalGate(expected_angles))
assert cirq.pow(cirq.TwoQubitDiagonalGate(diagonal_angles), "test", None) is None
def test_protocols_mul_not_implemented():
diagonal_angles = [2, 3, None, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(diagonal_angles)
with pytest.raises(TypeError):
cirq.protocols.pow(diagonal_gate, 3)
@pytest.mark.parametrize('resolve_fn', [cirq.resolve_parameters, cirq.resolve_parameters_once])
def test_resolve(resolve_fn):
diagonal_angles = [2, 3, 5, 7]
diagonal_gate = cirq.TwoQubitDiagonalGate(
diagonal_angles[:2] + [sympy.Symbol('a'), sympy.Symbol('b')]
)
assert cirq.is_parameterized(diagonal_gate)
diagonal_gate = resolve_fn(diagonal_gate, {'a': 5})
assert diagonal_gate == cirq.TwoQubitDiagonalGate(diagonal_angles[:3] + [sympy.Symbol('b')])
assert cirq.is_parameterized(diagonal_gate)
diagonal_gate = resolve_fn(diagonal_gate, {'b': 7})
assert diagonal_gate == cirq.TwoQubitDiagonalGate(diagonal_angles)
assert not cirq.is_parameterized(diagonal_gate)
| true | true |
f725507deeef1871cc78aa29f14a4bf893392a8e | 3,010 | py | Python | mt4forexparser/UniLogger.py | Tim55667757/MT4ForexParser | 4aceab05f150cfccc0ad4622c612476e279b68f0 | [
"MIT"
] | 11 | 2020-07-23T22:34:07.000Z | 2022-03-03T04:42:22.000Z | mt4forexparser/UniLogger.py | Tim55667757/MT4ForexParser | 4aceab05f150cfccc0ad4622c612476e279b68f0 | [
"MIT"
] | null | null | null | mt4forexparser/UniLogger.py | Tim55667757/MT4ForexParser | 4aceab05f150cfccc0ad4622c612476e279b68f0 | [
"MIT"
] | 4 | 2021-03-02T08:04:15.000Z | 2021-08-24T13:46:35.000Z | # -*- coding: utf-8 -*-
#
# Author: Timur Gilmullin
# This module initialize standard python logging system.
import sys
import logging.handlers
# initialize Main Parent Logger:
UniLogger = logging.getLogger("UniLogger")
formatString = "%(filename)-20sL:%(lineno)-5d%(levelname)-8s[%(asctime)s] %(message)s"
formatter = logging.Formatter(formatString)
sys.stderr = sys.stdout
def SetLevel(vLevel='ERROR'):
"""
This procedure setting up UniLogger verbosity level.
"""
UniLogger.level = logging.NOTSET
if isinstance(vLevel, str):
if vLevel == '5' or vLevel.upper() == 'CRITICAL':
UniLogger.level = logging.CRITICAL
elif vLevel == '4' or vLevel.upper() == 'ERROR':
UniLogger.level = logging.ERROR
elif vLevel == '3' or vLevel.upper() == 'WARNING':
UniLogger.level = logging.WARNING
elif vLevel == '2' or vLevel.upper() == 'INFO':
UniLogger.level = logging.INFO
elif vLevel == '1' or vLevel.upper() == 'DEBUG':
UniLogger.level = logging.DEBUG
class LevelFilter(logging.Filter):
"""
Class using to set up log level filtering.
"""
def __init__(self, level):
super().__init__()
self.level = level
def filter(self, record):
return record.levelno >= self.level
def EnableLogger(logFile, parentHandler=UniLogger, useFormat=formatter):
"""
Adding new file logger with rotation.
"""
# logHandler = logging.FileHandler(logFile)
maxSizeBytes = 50 * 1024 * 1024 # 5Mb log rotate by default
logHandler = logging.handlers.RotatingFileHandler(logFile, encoding="UTF-8", maxBytes=maxSizeBytes, backupCount=4)
logHandler.level = logging.DEBUG # set up DEBUG verbosity level by default for file logging
logHandler.addFilter(LevelFilter(logging.DEBUG))
if useFormat:
logHandler.setFormatter(useFormat)
else:
logHandler.setFormatter(formatter)
parentHandler.addHandler(logHandler)
return logHandler
def DisableLogger(handler, parentHandler=UniLogger):
"""
Disable given file logger.
"""
if handler:
handler.flush()
handler.close()
if handler in parentHandler.handlers:
parentHandler.removeHandler(handler)
# --- Main init:
SetLevel('DEBUG') # set up DEBUG verbosity level by default for UniLogger
streamHandler = logging.StreamHandler() # initialize STDOUT UniLogger
streamHandler.setFormatter(formatter) # set formatter for STDOUT UniLogger
streamHandler.level = logging.INFO # set up INFO verbosity level by default for STDOUT UniLogger
UniLogger.addHandler(streamHandler) # adding STDOUT UniLogger handler to Parent UniLogger
# fileLogHandler = EnableLogger(logFile='log.txt', parentHandler=UniLogger, useFormat=formatter) # add logging to file
sepWide = '-' * 120 # long-long log separator
sepLong = '-' * 80 # long log separator
sepShort = '-' * 40 # short log separator
sepLine = '=--=' * 20 # log part separator
| 29.223301 | 119 | 0.68206 |
import sys
import logging.handlers
UniLogger = logging.getLogger("UniLogger")
formatString = "%(filename)-20sL:%(lineno)-5d%(levelname)-8s[%(asctime)s] %(message)s"
formatter = logging.Formatter(formatString)
sys.stderr = sys.stdout
def SetLevel(vLevel='ERROR'):
UniLogger.level = logging.NOTSET
if isinstance(vLevel, str):
if vLevel == '5' or vLevel.upper() == 'CRITICAL':
UniLogger.level = logging.CRITICAL
elif vLevel == '4' or vLevel.upper() == 'ERROR':
UniLogger.level = logging.ERROR
elif vLevel == '3' or vLevel.upper() == 'WARNING':
UniLogger.level = logging.WARNING
elif vLevel == '2' or vLevel.upper() == 'INFO':
UniLogger.level = logging.INFO
elif vLevel == '1' or vLevel.upper() == 'DEBUG':
UniLogger.level = logging.DEBUG
class LevelFilter(logging.Filter):
def __init__(self, level):
super().__init__()
self.level = level
def filter(self, record):
return record.levelno >= self.level
def EnableLogger(logFile, parentHandler=UniLogger, useFormat=formatter):
maxSizeBytes = 50 * 1024 * 1024
logHandler = logging.handlers.RotatingFileHandler(logFile, encoding="UTF-8", maxBytes=maxSizeBytes, backupCount=4)
logHandler.level = logging.DEBUG
logHandler.addFilter(LevelFilter(logging.DEBUG))
if useFormat:
logHandler.setFormatter(useFormat)
else:
logHandler.setFormatter(formatter)
parentHandler.addHandler(logHandler)
return logHandler
def DisableLogger(handler, parentHandler=UniLogger):
if handler:
handler.flush()
handler.close()
if handler in parentHandler.handlers:
parentHandler.removeHandler(handler)
SetLevel('DEBUG')
streamHandler = logging.StreamHandler()
streamHandler.setFormatter(formatter)
streamHandler.level = logging.INFO
UniLogger.addHandler(streamHandler)
sepLong = '-' * 80
sepShort = '-' * 40
sepLine = '=--=' * 20
| true | true |
f72551baa697a8dbad37b7a7ff3d920bbdc9c06b | 271 | py | Python | Functional Programming/Sum_of_numbers_Recursion.py | youngtech515/PythonScripts | c890c84113ba4e05aea15d1347886dbfb52d3bf9 | [
"MIT"
] | null | null | null | Functional Programming/Sum_of_numbers_Recursion.py | youngtech515/PythonScripts | c890c84113ba4e05aea15d1347886dbfb52d3bf9 | [
"MIT"
] | null | null | null | Functional Programming/Sum_of_numbers_Recursion.py | youngtech515/PythonScripts | c890c84113ba4e05aea15d1347886dbfb52d3bf9 | [
"MIT"
] | null | null | null | print("To print the sum of numbers using recursion")
def calculatatesum(num):
if(num):
a=num+calculatatesum(num-1)
return a
else:
return 0
n=int(input("Enter the Number value:"))
print("The Sum of numbers is,",calculatatesum(n))
| 27.1 | 53 | 0.630996 | print("To print the sum of numbers using recursion")
def calculatatesum(num):
if(num):
a=num+calculatatesum(num-1)
return a
else:
return 0
n=int(input("Enter the Number value:"))
print("The Sum of numbers is,",calculatatesum(n))
| true | true |
f725525a43e73f257b95eb328de9c2b2b9780b5e | 494 | py | Python | optoanalysis/optoanalysis/__init__.py | markusrademacher/DataHandling | 240c7c8378541cc2624fec049a185646f3016233 | [
"MIT"
] | 2 | 2017-07-12T11:18:51.000Z | 2018-08-26T10:31:00.000Z | optoanalysis/optoanalysis/__init__.py | markusrademacher/DataHandling | 240c7c8378541cc2624fec049a185646f3016233 | [
"MIT"
] | 7 | 2017-04-24T18:42:23.000Z | 2017-06-20T13:00:09.000Z | optoanalysis/optoanalysis/__init__.py | AshleySetter/optoanalysis | 2b24a4176508d5e0e5e8644bb617a34f73b041f7 | [
"MIT"
] | 3 | 2017-04-09T19:15:06.000Z | 2017-04-28T09:31:32.000Z | """
optoanalysis
============
Package of functions for the Matter-Wave Interferometry
group for handling experimental data.
"""
# init file
import os
_mypackage_root_dir = os.path.dirname(__file__)
_version_file = open(os.path.join(_mypackage_root_dir, 'VERSION'))
__version__ = _version_file.read().strip()
# the following line imports all the functions from optoanalysis.py
from .optoanalysis import *
import optoanalysis.thermo
import optoanalysis.LeCroy
import optoanalysis.Saleae
| 19.76 | 67 | 0.777328 |
import os
_mypackage_root_dir = os.path.dirname(__file__)
_version_file = open(os.path.join(_mypackage_root_dir, 'VERSION'))
__version__ = _version_file.read().strip()
from .optoanalysis import *
import optoanalysis.thermo
import optoanalysis.LeCroy
import optoanalysis.Saleae
| true | true |
f72552862a7eea93f641832290eb243900232978 | 1,055 | py | Python | web_audio/helpers.py | SaxAlien/crap-code | bff99bc4501d1c7bc8c169c1b66a2d6bd7ad3494 | [
"Apache-2.0"
] | null | null | null | web_audio/helpers.py | SaxAlien/crap-code | bff99bc4501d1c7bc8c169c1b66a2d6bd7ad3494 | [
"Apache-2.0"
] | null | null | null | web_audio/helpers.py | SaxAlien/crap-code | bff99bc4501d1c7bc8c169c1b66a2d6bd7ad3494 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Crap class
but make code more compact. lmao
WARNING! WARNING!
HIGH CONCENTRATION OF SHIT!
and in future here will be adding more and more methods and classes
but i'm not shure
"""
import os
def success(message):
return '<div class="alert alert-success alert-dismissable">' \
'<button type="button" class="close" data-dismiss="alert">×</button>' \
'{}</div>'.format(message)
def warning(message):
return '<div class="alert alert-danger alert-dismissable">' \
'<button type="button" class="close" data-dismiss="alert">×</button>' \
'{}</div>'.format(message)
def playlist(path):
"""
Especially here ._.
:param path:
:return:
"""
listdir = os.listdir(path)
raw_html = ''
for i in listdir:
raw_html += '<option>{}</option>'.format(unicode.encode(unicode(str(i), 'utf-8'), 'utf8'))
return raw_html # fix utf-8 encode and some useful stuff such as <option> format
| 25.731707 | 98 | 0.611374 |
import os
def success(message):
return '<div class="alert alert-success alert-dismissable">' \
'<button type="button" class="close" data-dismiss="alert">×</button>' \
'{}</div>'.format(message)
def warning(message):
return '<div class="alert alert-danger alert-dismissable">' \
'<button type="button" class="close" data-dismiss="alert">×</button>' \
'{}</div>'.format(message)
def playlist(path):
listdir = os.listdir(path)
raw_html = ''
for i in listdir:
raw_html += '<option>{}</option>'.format(unicode.encode(unicode(str(i), 'utf-8'), 'utf8'))
return raw_html
| true | true |
f725541a544124a0f547895b485bcf1f5d21572b | 1,467 | py | Python | problems/pctsp/salesman/pctsp/application.py | AYaddaden/attention-learn-to-route | 74c2d6533d9e0faae80fa85f6bee2df2142708ba | [
"MIT"
] | 540 | 2019-02-07T13:52:30.000Z | 2022-03-31T12:51:46.000Z | problems/pctsp/salesman/pctsp/application.py | AYaddaden/attention-learn-to-route | 74c2d6533d9e0faae80fa85f6bee2df2142708ba | [
"MIT"
] | 40 | 2019-02-06T17:57:11.000Z | 2022-03-18T12:18:48.000Z | problems/pctsp/salesman/pctsp/application.py | AYaddaden/attention-learn-to-route | 74c2d6533d9e0faae80fa85f6bee2df2142708ba | [
"MIT"
] | 227 | 2019-02-15T09:25:02.000Z | 2022-03-27T10:42:21.000Z | # module application.py
#
# Copyright (c) 2015 Rafael Reis
#
"""
application module - Main module that solves the Prize Collecting Travelling Salesman Problem
"""
from pctsp.model.pctsp import *
from pctsp.model import solution
from pctsp.algo.genius import genius
from pctsp.algo import ilocal_search as ils
from pkg_resources import resource_filename
import random
INPUT_INSTANCE_FILE = resource_filename('pctsp', 'data/problem_20_100_100_1000.pctsp')
def solve_instance(filename, min_prize, runs=10, seed=1234):
random.seed(seed)
pctsp = Pctsp()
pctsp.load(filename, min_prize)
s = solution.random(pctsp, size=int(len(pctsp.prize) * 0.7))
s = ils.ilocal_search(s, n_runs=runs)
return (s.route[1:], s.quality)
def main():
"""Main function, that solves the PCTSP.
"""
pctsp = Pctsp()
pctsp.load(INPUT_INSTANCE_FILE, 386)
#pctsp.prize = np.array([0, 4, 8, 3])
#pctsp.penal = np.array([1000, 7, 11, 17])
#pctsp.cost = np.array([[0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 1, 1, 0]])
# print(pctsp.type)
size = int(len(pctsp.prize)*0.7)
s = solution.random(pctsp, size=size)
print(s.route)
print(s.size)
print(s.quality)
print(s.is_valid())
print("\n")
# s = genius(pctsp)
# print(s.route)
# print(s.quality)
s = ils.ilocal_search(s)
print(s.route)
print(s.size)
print(s.quality)
print(s.is_valid())
if __name__ == '__main__':
main()
| 24.04918 | 93 | 0.652352 |
from pctsp.model.pctsp import *
from pctsp.model import solution
from pctsp.algo.genius import genius
from pctsp.algo import ilocal_search as ils
from pkg_resources import resource_filename
import random
INPUT_INSTANCE_FILE = resource_filename('pctsp', 'data/problem_20_100_100_1000.pctsp')
def solve_instance(filename, min_prize, runs=10, seed=1234):
random.seed(seed)
pctsp = Pctsp()
pctsp.load(filename, min_prize)
s = solution.random(pctsp, size=int(len(pctsp.prize) * 0.7))
s = ils.ilocal_search(s, n_runs=runs)
return (s.route[1:], s.quality)
def main():
pctsp = Pctsp()
pctsp.load(INPUT_INSTANCE_FILE, 386)
size = int(len(pctsp.prize)*0.7)
s = solution.random(pctsp, size=size)
print(s.route)
print(s.size)
print(s.quality)
print(s.is_valid())
print("\n")
s = ils.ilocal_search(s)
print(s.route)
print(s.size)
print(s.quality)
print(s.is_valid())
if __name__ == '__main__':
main()
| true | true |
f72555b7e4d80fd6f6a428c1e413cc4fa2ac3266 | 5,723 | py | Python | azure-batch/azure/batch/models/cloud_job_schedule.py | jmalobicky/azure-sdk-for-python | 61234a3d83f8fb481d1dd2386e54e888864878fd | [
"MIT"
] | 1 | 2018-07-23T08:59:24.000Z | 2018-07-23T08:59:24.000Z | azure-batch/azure/batch/models/cloud_job_schedule.py | jmalobicky/azure-sdk-for-python | 61234a3d83f8fb481d1dd2386e54e888864878fd | [
"MIT"
] | null | null | null | azure-batch/azure/batch/models/cloud_job_schedule.py | jmalobicky/azure-sdk-for-python | 61234a3d83f8fb481d1dd2386e54e888864878fd | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CloudJobSchedule(Model):
"""A job schedule that allows recurring jobs by specifying when to run jobs
and a specification used to create each job.
:param id: A string that uniquely identifies the schedule within the
account.
:type id: str
:param display_name: The display name for the schedule.
:type display_name: str
:param url: The URL of the job schedule.
:type url: str
:param e_tag: The ETag of the job schedule. This is an opaque string. You
can use it to detect whether the job schedule has changed between
requests. In particular, you can be pass the ETag with an Update Job
Schedule request to specify that your changes should take effect only if
nobody else has modified the schedule in the meantime.
:type e_tag: str
:param last_modified: The last modified time of the job schedule. This is
the last time at which the schedule level data, such as the job
specification or recurrence information, changed. It does not factor in
job-level changes such as new jobs being created or jobs changing state.
:type last_modified: datetime
:param creation_time: The creation time of the job schedule.
:type creation_time: datetime
:param state: The current state of the job schedule. Possible values
include: 'active', 'completed', 'disabled', 'terminating', 'deleting'
:type state: str or ~azure.batch.models.JobScheduleState
:param state_transition_time: The time at which the job schedule entered
the current state.
:type state_transition_time: datetime
:param previous_state: The previous state of the job schedule. This
property is not present if the job schedule is in its initial active
state. Possible values include: 'active', 'completed', 'disabled',
'terminating', 'deleting'
:type previous_state: str or ~azure.batch.models.JobScheduleState
:param previous_state_transition_time: The time at which the job schedule
entered its previous state. This property is not present if the job
schedule is in its initial active state.
:type previous_state_transition_time: datetime
:param schedule: The schedule according to which jobs will be created.
:type schedule: ~azure.batch.models.Schedule
:param job_specification: The details of the jobs to be created on this
schedule.
:type job_specification: ~azure.batch.models.JobSpecification
:param execution_info: Information about jobs that have been and will be
run under this schedule.
:type execution_info: ~azure.batch.models.JobScheduleExecutionInformation
:param metadata: A list of name-value pairs associated with the schedule
as metadata. The Batch service does not assign any meaning to metadata; it
is solely for the use of user code.
:type metadata: list[~azure.batch.models.MetadataItem]
:param stats: The lifetime resource usage statistics for the job schedule.
The statistics may not be immediately available. The Batch service
performs periodic roll-up of statistics. The typical delay is about 30
minutes.
:type stats: ~azure.batch.models.JobScheduleStatistics
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'JobScheduleState'},
'state_transition_time': {'key': 'stateTransitionTime', 'type': 'iso-8601'},
'previous_state': {'key': 'previousState', 'type': 'JobScheduleState'},
'previous_state_transition_time': {'key': 'previousStateTransitionTime', 'type': 'iso-8601'},
'schedule': {'key': 'schedule', 'type': 'Schedule'},
'job_specification': {'key': 'jobSpecification', 'type': 'JobSpecification'},
'execution_info': {'key': 'executionInfo', 'type': 'JobScheduleExecutionInformation'},
'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
'stats': {'key': 'stats', 'type': 'JobScheduleStatistics'},
}
def __init__(self, id=None, display_name=None, url=None, e_tag=None, last_modified=None, creation_time=None, state=None, state_transition_time=None, previous_state=None, previous_state_transition_time=None, schedule=None, job_specification=None, execution_info=None, metadata=None, stats=None):
super(CloudJobSchedule, self).__init__()
self.id = id
self.display_name = display_name
self.url = url
self.e_tag = e_tag
self.last_modified = last_modified
self.creation_time = creation_time
self.state = state
self.state_transition_time = state_transition_time
self.previous_state = previous_state
self.previous_state_transition_time = previous_state_transition_time
self.schedule = schedule
self.job_specification = job_specification
self.execution_info = execution_info
self.metadata = metadata
self.stats = stats
| 52.990741 | 298 | 0.687227 |
from msrest.serialization import Model
class CloudJobSchedule(Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'JobScheduleState'},
'state_transition_time': {'key': 'stateTransitionTime', 'type': 'iso-8601'},
'previous_state': {'key': 'previousState', 'type': 'JobScheduleState'},
'previous_state_transition_time': {'key': 'previousStateTransitionTime', 'type': 'iso-8601'},
'schedule': {'key': 'schedule', 'type': 'Schedule'},
'job_specification': {'key': 'jobSpecification', 'type': 'JobSpecification'},
'execution_info': {'key': 'executionInfo', 'type': 'JobScheduleExecutionInformation'},
'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
'stats': {'key': 'stats', 'type': 'JobScheduleStatistics'},
}
def __init__(self, id=None, display_name=None, url=None, e_tag=None, last_modified=None, creation_time=None, state=None, state_transition_time=None, previous_state=None, previous_state_transition_time=None, schedule=None, job_specification=None, execution_info=None, metadata=None, stats=None):
super(CloudJobSchedule, self).__init__()
self.id = id
self.display_name = display_name
self.url = url
self.e_tag = e_tag
self.last_modified = last_modified
self.creation_time = creation_time
self.state = state
self.state_transition_time = state_transition_time
self.previous_state = previous_state
self.previous_state_transition_time = previous_state_transition_time
self.schedule = schedule
self.job_specification = job_specification
self.execution_info = execution_info
self.metadata = metadata
self.stats = stats
| true | true |
f725565a4176de2b5beb230d5de6c67dc05f4158 | 4,574 | py | Python | troposphere/iotwireless.py | vasinov/troposphere | db117248dfb0fc500ae9d10db34c42608240bb8d | [
"BSD-2-Clause"
] | null | null | null | troposphere/iotwireless.py | vasinov/troposphere | db117248dfb0fc500ae9d10db34c42608240bb8d | [
"BSD-2-Clause"
] | null | null | null | troposphere/iotwireless.py | vasinov/troposphere | db117248dfb0fc500ae9d10db34c42608240bb8d | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import boolean, integer
class Destination(AWSObject):
resource_type = "AWS::IoTWireless::Destination"
props = {
"Description": (str, False),
"Expression": (str, True),
"ExpressionType": (str, True),
"Name": (str, True),
"RoleArn": (str, True),
"Tags": (Tags, False),
}
class LoRaWANDeviceProfile(AWSProperty):
props = {
"ClassBTimeout": (integer, False),
"ClassCTimeout": (integer, False),
"MacVersion": (str, False),
"MaxDutyCycle": (integer, False),
"MaxEirp": (integer, False),
"PingSlotDr": (integer, False),
"PingSlotFreq": (integer, False),
"PingSlotPeriod": (integer, False),
"RegParamsRevision": (str, False),
"RfRegion": (str, False),
"Supports32BitFCnt": (boolean, False),
"SupportsClassB": (boolean, False),
"SupportsClassC": (boolean, False),
"SupportsJoin": (boolean, False),
}
class DeviceProfile(AWSObject):
resource_type = "AWS::IoTWireless::DeviceProfile"
props = {
"LoRaWAN": (LoRaWANDeviceProfile, False),
"Name": (str, False),
"Tags": (Tags, False),
}
class LoRaWANServiceProfile(AWSProperty):
props = {
"AddGwMetadata": (boolean, False),
"ChannelMask": (str, False),
"DevStatusReqFreq": (integer, False),
"DlBucketSize": (integer, False),
"DlRate": (integer, False),
"DlRatePolicy": (str, False),
"DrMax": (integer, False),
"DrMin": (integer, False),
"HrAllowed": (boolean, False),
"MinGwDiversity": (integer, False),
"NwkGeoLoc": (boolean, False),
"PrAllowed": (boolean, False),
"RaAllowed": (boolean, False),
"ReportDevStatusBattery": (boolean, False),
"ReportDevStatusMargin": (boolean, False),
"TargetPer": (integer, False),
"UlBucketSize": (integer, False),
"UlRate": (integer, False),
"UlRatePolicy": (str, False),
}
class ServiceProfile(AWSObject):
resource_type = "AWS::IoTWireless::ServiceProfile"
props = {
"LoRaWAN": (LoRaWANServiceProfile, False),
"Name": (str, False),
"Tags": (Tags, False),
}
class SessionKeysAbpV10x(AWSProperty):
props = {
"AppSKey": (str, True),
"NwkSKey": (str, True),
}
class AbpV10x(AWSProperty):
props = {
"DevAddr": (str, True),
"SessionKeys": (SessionKeysAbpV10x, True),
}
class SessionKeysAbpV11(AWSProperty):
props = {
"AppSKey": (str, True),
"FNwkSIntKey": (str, True),
"NwkSEncKey": (str, True),
"SNwkSIntKey": (str, True),
}
class AbpV11(AWSProperty):
props = {
"DevAddr": (str, True),
"SessionKeys": (SessionKeysAbpV11, True),
}
class OtaaV10x(AWSProperty):
props = {
"AppEui": (str, True),
"AppKey": (str, True),
}
class OtaaV11(AWSProperty):
props = {
"AppKey": (str, True),
"JoinEui": (str, True),
"NwkKey": (str, True),
}
class LoRaWANDevice(AWSProperty):
props = {
"AbpV10x": (AbpV10x, False),
"AbpV11": (AbpV11, False),
"DevEui": (str, False),
"DeviceProfileId": (str, False),
"OtaaV10x": (OtaaV10x, False),
"OtaaV11": (OtaaV11, False),
"ServiceProfileId": (str, False),
}
class WirelessDevice(AWSObject):
resource_type = "AWS::IoTWireless::WirelessDevice"
props = {
"Description": (str, False),
"DestinationName": (str, True),
"LastUplinkReceivedAt": (str, False),
"LoRaWAN": (LoRaWANDevice, False),
"Name": (str, False),
"Tags": (Tags, False),
"ThingArn": (str, False),
"Type": (str, True),
}
class LoRaWANGateway(AWSProperty):
props = {
"GatewayEui": (str, True),
"RfRegion": (str, True),
}
class WirelessGateway(AWSObject):
resource_type = "AWS::IoTWireless::WirelessGateway"
props = {
"Description": (str, False),
"LastUplinkReceivedAt": (str, False),
"LoRaWAN": (LoRaWANGateway, True),
"Name": (str, False),
"Tags": (Tags, False),
"ThingArn": (str, False),
}
| 25.131868 | 55 | 0.566463 |
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import boolean, integer
class Destination(AWSObject):
resource_type = "AWS::IoTWireless::Destination"
props = {
"Description": (str, False),
"Expression": (str, True),
"ExpressionType": (str, True),
"Name": (str, True),
"RoleArn": (str, True),
"Tags": (Tags, False),
}
class LoRaWANDeviceProfile(AWSProperty):
props = {
"ClassBTimeout": (integer, False),
"ClassCTimeout": (integer, False),
"MacVersion": (str, False),
"MaxDutyCycle": (integer, False),
"MaxEirp": (integer, False),
"PingSlotDr": (integer, False),
"PingSlotFreq": (integer, False),
"PingSlotPeriod": (integer, False),
"RegParamsRevision": (str, False),
"RfRegion": (str, False),
"Supports32BitFCnt": (boolean, False),
"SupportsClassB": (boolean, False),
"SupportsClassC": (boolean, False),
"SupportsJoin": (boolean, False),
}
class DeviceProfile(AWSObject):
resource_type = "AWS::IoTWireless::DeviceProfile"
props = {
"LoRaWAN": (LoRaWANDeviceProfile, False),
"Name": (str, False),
"Tags": (Tags, False),
}
class LoRaWANServiceProfile(AWSProperty):
props = {
"AddGwMetadata": (boolean, False),
"ChannelMask": (str, False),
"DevStatusReqFreq": (integer, False),
"DlBucketSize": (integer, False),
"DlRate": (integer, False),
"DlRatePolicy": (str, False),
"DrMax": (integer, False),
"DrMin": (integer, False),
"HrAllowed": (boolean, False),
"MinGwDiversity": (integer, False),
"NwkGeoLoc": (boolean, False),
"PrAllowed": (boolean, False),
"RaAllowed": (boolean, False),
"ReportDevStatusBattery": (boolean, False),
"ReportDevStatusMargin": (boolean, False),
"TargetPer": (integer, False),
"UlBucketSize": (integer, False),
"UlRate": (integer, False),
"UlRatePolicy": (str, False),
}
class ServiceProfile(AWSObject):
resource_type = "AWS::IoTWireless::ServiceProfile"
props = {
"LoRaWAN": (LoRaWANServiceProfile, False),
"Name": (str, False),
"Tags": (Tags, False),
}
class SessionKeysAbpV10x(AWSProperty):
props = {
"AppSKey": (str, True),
"NwkSKey": (str, True),
}
class AbpV10x(AWSProperty):
props = {
"DevAddr": (str, True),
"SessionKeys": (SessionKeysAbpV10x, True),
}
class SessionKeysAbpV11(AWSProperty):
props = {
"AppSKey": (str, True),
"FNwkSIntKey": (str, True),
"NwkSEncKey": (str, True),
"SNwkSIntKey": (str, True),
}
class AbpV11(AWSProperty):
props = {
"DevAddr": (str, True),
"SessionKeys": (SessionKeysAbpV11, True),
}
class OtaaV10x(AWSProperty):
props = {
"AppEui": (str, True),
"AppKey": (str, True),
}
class OtaaV11(AWSProperty):
props = {
"AppKey": (str, True),
"JoinEui": (str, True),
"NwkKey": (str, True),
}
class LoRaWANDevice(AWSProperty):
props = {
"AbpV10x": (AbpV10x, False),
"AbpV11": (AbpV11, False),
"DevEui": (str, False),
"DeviceProfileId": (str, False),
"OtaaV10x": (OtaaV10x, False),
"OtaaV11": (OtaaV11, False),
"ServiceProfileId": (str, False),
}
class WirelessDevice(AWSObject):
resource_type = "AWS::IoTWireless::WirelessDevice"
props = {
"Description": (str, False),
"DestinationName": (str, True),
"LastUplinkReceivedAt": (str, False),
"LoRaWAN": (LoRaWANDevice, False),
"Name": (str, False),
"Tags": (Tags, False),
"ThingArn": (str, False),
"Type": (str, True),
}
class LoRaWANGateway(AWSProperty):
props = {
"GatewayEui": (str, True),
"RfRegion": (str, True),
}
class WirelessGateway(AWSObject):
resource_type = "AWS::IoTWireless::WirelessGateway"
props = {
"Description": (str, False),
"LastUplinkReceivedAt": (str, False),
"LoRaWAN": (LoRaWANGateway, True),
"Name": (str, False),
"Tags": (Tags, False),
"ThingArn": (str, False),
}
| true | true |
f7255693150e9fa60c4098be227a094b573e4ddb | 2,130 | py | Python | tests/test_http.py | flome/uproot | eb2ae1ffe6fb2c2ce8cb7cbdc0919d5b51c0ff0f | [
"BSD-3-Clause"
] | null | null | null | tests/test_http.py | flome/uproot | eb2ae1ffe6fb2c2ce8cb7cbdc0919d5b51c0ff0f | [
"BSD-3-Clause"
] | null | null | null | tests/test_http.py | flome/uproot | eb2ae1ffe6fb2c2ce8cb7cbdc0919d5b51c0ff0f | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE
import pytest
import mock
HTTPError = pytest.importorskip('requests.exceptions').HTTPError
import uproot
FILE = "foriter"
LOCAL = "tests/samples/{FILE}.root".format(FILE=FILE)
URL = "http://scikit-hep.org/uproot/examples/{FILE}.root".format(FILE=FILE)
URL_AUTH = "http://scikit-hep.org/uproot/authentication/{FILE}.root".format(FILE=FILE)
AUTH = ("scikit-hep", "uproot")
def mock_get_local_instead_of_http(url="", headers={}, auth=None, **kwargs):
class MockResponse:
def __init__(self, status_code):
self.status_code = status_code
if self.status_code == 200:
with open(LOCAL, "rb") as f:
self.content = f.read()
self.headers = {"Content-Range": str(len(self.content))}
def raise_for_status(self):
if self.status_code == 401: # Authentication Error
raise HTTPError
elif self.status_code == 200: # Ok
pass
if url == URL:
return MockResponse(200)
elif url == URL_AUTH and auth == None:
return MockResponse(401)
elif url == URL_AUTH and auth == AUTH:
return MockResponse(200)
elif url == URL_AUTH:
return MockResponse(401)
@mock.patch("requests.get", mock_get_local_instead_of_http)
class Test(object):
def test_no_auth_needed_no_auth(self):
f = uproot.open(URL)
assert type(f) == uproot.rootio.ROOTDirectory
def test_no_auth_needed_with_auth(self):
f = uproot.open(URL, httpsource={"auth": AUTH})
assert type(f) == uproot.rootio.ROOTDirectory
def test_auth_needed_no_auth(self):
with pytest.raises(HTTPError):
f = uproot.open(URL_AUTH)
def test_auth_needed_correct_auth(self):
f = uproot.open(URL_AUTH, httpsource={"auth": AUTH})
assert type(f) == uproot.rootio.ROOTDirectory
def test_auth_needed_wrong_auth(self):
with pytest.raises(HTTPError):
f = uproot.open(URL_AUTH, httpsource={"auth": ("", "")})
| 34.354839 | 86 | 0.643662 |
import pytest
import mock
HTTPError = pytest.importorskip('requests.exceptions').HTTPError
import uproot
FILE = "foriter"
LOCAL = "tests/samples/{FILE}.root".format(FILE=FILE)
URL = "http://scikit-hep.org/uproot/examples/{FILE}.root".format(FILE=FILE)
URL_AUTH = "http://scikit-hep.org/uproot/authentication/{FILE}.root".format(FILE=FILE)
AUTH = ("scikit-hep", "uproot")
def mock_get_local_instead_of_http(url="", headers={}, auth=None, **kwargs):
class MockResponse:
def __init__(self, status_code):
self.status_code = status_code
if self.status_code == 200:
with open(LOCAL, "rb") as f:
self.content = f.read()
self.headers = {"Content-Range": str(len(self.content))}
def raise_for_status(self):
if self.status_code == 401:
raise HTTPError
elif self.status_code == 200:
pass
if url == URL:
return MockResponse(200)
elif url == URL_AUTH and auth == None:
return MockResponse(401)
elif url == URL_AUTH and auth == AUTH:
return MockResponse(200)
elif url == URL_AUTH:
return MockResponse(401)
@mock.patch("requests.get", mock_get_local_instead_of_http)
class Test(object):
def test_no_auth_needed_no_auth(self):
f = uproot.open(URL)
assert type(f) == uproot.rootio.ROOTDirectory
def test_no_auth_needed_with_auth(self):
f = uproot.open(URL, httpsource={"auth": AUTH})
assert type(f) == uproot.rootio.ROOTDirectory
def test_auth_needed_no_auth(self):
with pytest.raises(HTTPError):
f = uproot.open(URL_AUTH)
def test_auth_needed_correct_auth(self):
f = uproot.open(URL_AUTH, httpsource={"auth": AUTH})
assert type(f) == uproot.rootio.ROOTDirectory
def test_auth_needed_wrong_auth(self):
with pytest.raises(HTTPError):
f = uproot.open(URL_AUTH, httpsource={"auth": ("", "")})
| true | true |
f72557569101118345344ad6a9f06be139d1e4a8 | 424 | py | Python | core/migrations/0036_alter_event_submission_type.py | saggins/lynbrook-app-backend | d5bad6e0742853bb39c5a15d3b7332b7114b671d | [
"MIT"
] | null | null | null | core/migrations/0036_alter_event_submission_type.py | saggins/lynbrook-app-backend | d5bad6e0742853bb39c5a15d3b7332b7114b671d | [
"MIT"
] | 1 | 2022-02-17T07:01:51.000Z | 2022-02-17T07:01:51.000Z | core/migrations/0036_alter_event_submission_type.py | saggins/lynbrook-app-backend | d5bad6e0742853bb39c5a15d3b7332b7114b671d | [
"MIT"
] | 1 | 2022-02-17T05:16:58.000Z | 2022-02-17T05:16:58.000Z | # Generated by Django 3.2.5 on 2021-08-27 09:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0035_alter_event_code'),
]
operations = [
migrations.AlterField(
model_name='event',
name='submission_type',
field=models.IntegerField(choices=[(1, 'Code'), (2, 'File')], default=1),
),
]
| 22.315789 | 85 | 0.589623 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0035_alter_event_code'),
]
operations = [
migrations.AlterField(
model_name='event',
name='submission_type',
field=models.IntegerField(choices=[(1, 'Code'), (2, 'File')], default=1),
),
]
| true | true |
f7255769c302384481f7ccaa713331893763eddb | 2,983 | py | Python | core/brain/dialog.py | vsilent/smarty-bot | 963cba05433be14494ba339343c9903ccab3c37d | [
"MIT"
] | 1 | 2016-10-08T09:01:05.000Z | 2016-10-08T09:01:05.000Z | core/brain/dialog.py | vsilent/smarty-bot | 963cba05433be14494ba339343c9903ccab3c37d | [
"MIT"
] | 1 | 2019-09-24T09:56:52.000Z | 2019-09-24T09:56:52.000Z | core/brain/dialog.py | vsilent/smarty-bot | 963cba05433be14494ba339343c9903ccab3c37d | [
"MIT"
] | null | null | null | def start_dialog(text):
if text is None:
text = recognize_by_google()
if text is None:
return
logging.debug( "You said: " + text )
c = Confirm(text)
state = c.get_state( sentence=text )
logging.debug(type(state))
logging.debug(state)
if(( state == 0) or (state is None)):
sentence = 'DID YOU SAY' + text.upper() + '?'
say(sentence)
logging.debug('start confirm')
listen()
os.system('mv ' + settings.app_dirs['tmp_input_audio_dir'] + 'speech.flac' + ' ' + settings.app_dirs['tmp_input_audio_dir'] + 'last-speech.flac')
#confirmation = recognize_by_google()
confirmation = ask_julius()
if (confirmation is not None) and ('yes' in confirmation.strip().lower()):
s = 'You said %s' % confirmation
_utils = Utils()
path = _utils.get_full_path_to_module_by_request(text)
#copy default reaction files
if not os.path.isfile( path + '/reaction.py' ):
_utils.copy_default_reaction_files( path + '/' )
logging.debug(s)
say('OKAY, NICE!')
c.confirm(1)
logging.debug('Searching for media in internet...')
say('NOTHING FOUND. IM TRYING TO FIND INFORMATION IN THE INTERNET!')
link_to_audio = search_www(text)
logging.debug(text)
downloaded = _utils.download_audio_resource(link_to_audio, text)
if downloaded:
play(text)
else:
say("SORRY !, COULD NOT FIND, MEDIA, FILE, AT WIKI WEBSITE")
suggest_info(text)
elif confirmation is not None and 'no' in confirmation.strip().lower():
say('SORRY, PLEASE, COME CLOSER, AND, REPEAT YOUR QUESTION')
else:
say('PLEASE ASK AGAIN')
os.system('rm -f ' + settings.app_dirs['tmp_input_audio_dir'] + '*.wav ')
#start dialog from begining
#listen()
#start_dialog()
else:
#already know the answer :) play it
play(text)
finish_dialog()
def search_www( text_to_search ):
"""docstring for search_www"""
#small hack for searching exactly wiki or dictionary files
json_results = search( text_to_search)
# now grep the results and find wiki info
if not json_results:
say('OOPS, COULD NOT CONNECT GOOGLE')
return False
_wiki = Wiki()
wiki_page_link = _wiki.find_resourse_link(json_results)
if wiki_page_link:
link_to_audio = _wiki.find_audio_resourse(wiki_page_link)
info = { 'audio_external': link_to_audio
,'wiki_external' : wiki_page_link
,'audio_local' : ''
}
#logging.debug('save json %s' % info)
_utils = Utils()
_utils.save_file_json_info(text_to_search, info)
if link_to_audio:
return link_to_audio
return False
| 35.511905 | 153 | 0.584982 | def start_dialog(text):
if text is None:
text = recognize_by_google()
if text is None:
return
logging.debug( "You said: " + text )
c = Confirm(text)
state = c.get_state( sentence=text )
logging.debug(type(state))
logging.debug(state)
if(( state == 0) or (state is None)):
sentence = 'DID YOU SAY' + text.upper() + '?'
say(sentence)
logging.debug('start confirm')
listen()
os.system('mv ' + settings.app_dirs['tmp_input_audio_dir'] + 'speech.flac' + ' ' + settings.app_dirs['tmp_input_audio_dir'] + 'last-speech.flac')
confirmation = ask_julius()
if (confirmation is not None) and ('yes' in confirmation.strip().lower()):
s = 'You said %s' % confirmation
_utils = Utils()
path = _utils.get_full_path_to_module_by_request(text)
if not os.path.isfile( path + '/reaction.py' ):
_utils.copy_default_reaction_files( path + '/' )
logging.debug(s)
say('OKAY, NICE!')
c.confirm(1)
logging.debug('Searching for media in internet...')
say('NOTHING FOUND. IM TRYING TO FIND INFORMATION IN THE INTERNET!')
link_to_audio = search_www(text)
logging.debug(text)
downloaded = _utils.download_audio_resource(link_to_audio, text)
if downloaded:
play(text)
else:
say("SORRY !, COULD NOT FIND, MEDIA, FILE, AT WIKI WEBSITE")
suggest_info(text)
elif confirmation is not None and 'no' in confirmation.strip().lower():
say('SORRY, PLEASE, COME CLOSER, AND, REPEAT YOUR QUESTION')
else:
say('PLEASE ASK AGAIN')
os.system('rm -f ' + settings.app_dirs['tmp_input_audio_dir'] + '*.wav ')
else:
play(text)
finish_dialog()
def search_www( text_to_search ):
json_results = search( text_to_search)
if not json_results:
say('OOPS, COULD NOT CONNECT GOOGLE')
return False
_wiki = Wiki()
wiki_page_link = _wiki.find_resourse_link(json_results)
if wiki_page_link:
link_to_audio = _wiki.find_audio_resourse(wiki_page_link)
info = { 'audio_external': link_to_audio
,'wiki_external' : wiki_page_link
,'audio_local' : ''
}
_utils = Utils()
_utils.save_file_json_info(text_to_search, info)
if link_to_audio:
return link_to_audio
return False
| true | true |
f72558e0ae0df4436c838a4cc5685909f6d3e117 | 711 | py | Python | distribute_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | null | null | null | distribute_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | 1 | 2021-06-17T04:35:05.000Z | 2021-06-17T04:35:05.000Z | distribute_repo.py | ewhitesides/pulp_operations | b6a3541559e48c717926b245bbbf2dd87638e093 | [
"MIT"
] | null | null | null | """
script to distribute repos from repo_data.py
"""
import urllib3
import pulp_operations
from repo_data import repo_data
#disable ssl warnings for now
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
#release latest version of the repo to distribution 'latest'
for os in repo_data:
for repo in repo_data[os]:
repo_name = f"{os}-{repo}"
dist_name = f"{repo_name}-latest"
pulp_operations.release(repo_name, 0, dist_name)
#output distribution url info (optional)
for os in repo_data:
for repo in repo_data[os]:
repo_name = f"{os}-{repo}"
dist_name = f"{repo_name}-latest"
pulp_operations.distribution.get_distribution_url(dist_name)
| 28.44 | 68 | 0.729958 |
import urllib3
import pulp_operations
from repo_data import repo_data
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
for os in repo_data:
for repo in repo_data[os]:
repo_name = f"{os}-{repo}"
dist_name = f"{repo_name}-latest"
pulp_operations.release(repo_name, 0, dist_name)
for os in repo_data:
for repo in repo_data[os]:
repo_name = f"{os}-{repo}"
dist_name = f"{repo_name}-latest"
pulp_operations.distribution.get_distribution_url(dist_name)
| true | true |
f72558ec957eca8761389e372a0381b0b817ae58 | 21,798 | py | Python | Old/hoop_detection_angle.py | multirotorsociety/SAFMC-19-D2-Autonomous-Drone | fd9f0fae5d7cbf618b327224e06a7f459612b4ca | [
"MIT"
] | 6 | 2019-04-01T02:38:40.000Z | 2021-06-05T18:23:06.000Z | Old/hoop_detection_angle.py | multirotorsociety/SAFMC-19-D2-Autonomous-Drone | fd9f0fae5d7cbf618b327224e06a7f459612b4ca | [
"MIT"
] | null | null | null | Old/hoop_detection_angle.py | multirotorsociety/SAFMC-19-D2-Autonomous-Drone | fd9f0fae5d7cbf618b327224e06a7f459612b4ca | [
"MIT"
] | 1 | 2019-09-01T08:58:28.000Z | 2019-09-01T08:58:28.000Z | from __future__ import print_function
import time
import math
import thread
# Dk imports
from pymavlink import mavutil
from dronekit import connect, VehicleMode, LocationGlobal, LocationGlobalRelative
# Mux and TOF imports
import I2CMultiplexer
import VL53L1X
# CV imports
import cv2
import numpy as np
from picamera.array import PiRGBArray
from picamera import PiCamera
from fractions import Fraction
from PIL import Image
import random
from sympy import Point, Polygon, pi
#cap = cv2.VideoCapture(0)
camera = PiCamera()
camera.resolution = (426, 240)
camera.framerate = 24
camera.exposure_mode = 'auto'
camera.exposure_compensation = -3
camera.drc_strength = 'off'
camera.still_stats = False
camera.awb_mode = 'off'
camera.awb_gains = (Fraction(167, 103), Fraction(27,16))
rawCapture = PiRGBArray(camera, size=(426, 240))
out = cv2.VideoWriter(str(time.time()) + ".avi",cv2.VideoWriter_fourcc('M','J','P','G'), 10, (426, 240))
# allow the camera to warmup
time.sleep(0.1)
# Connect to Vehicle
connection_string = '/dev/ttyUSB0'
sitl = None
# Start SITL if no connection string specified
if not connection_string:
import dronekit_sitl
sitl = dronekit_sitl.start_default()
connection_string = sitl.connection_string()
# Connect to the Vehicle
print('Connecting to vehicle on: %s' % connection_string)
vehicle = connect(connection_string, wait_ready=True, baud=57600)
# Global variables for distance:
distance_in_mm_N = 0 # North Sensor
distance_in_mm_S = 0 # South Sensor
distance_in_mm_E = 0 # East Sensor
distance_in_mm_W = 0 # West Sensor
distance_in_mm_45 = 0 # 45 degree south east sensor
dX = 0
dY = 0
#Create an I2C Multiplexer object, the address of I2C Multiplexer is 0X70
I2CMulti = I2CMultiplexer.I2CMultiplexer(0x70)
# Init TOF obj
tof = VL53L1X.VL53L1X()
# STarts the TOFs on their respective ports
try:
# for i in [0,2,4,6]:
for i in [0,1,2,7,3]:
I2CMulti.selectPort(i)
tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=0x29)
tof.open() # Initialise the i2c bus and configure the sensor
tof.start_ranging(3) # Start ranging, 1 = Short Range, 2 = Medium Range, 3 = Long Range
except:
print("port init failed")
def detect_circle():
global dX
global dY
for img in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
for i in range(5): # Clears the 5 frame buffer
frame = img.array
height, width = frame.shape[:2]
centre = (int(width/2), int(height/2))
b_channel = np.array(frame[:,:,0]).astype('float')
g_channel = np.array(frame[:,:,1]).astype('float')
r_channel = np.array(frame[:,:,2]).astype('float')
bgr_channel = np.add((np.add(b_channel, g_channel)), r_channel)
img_rec_red2 = np.subtract(r_channel,((b_channel + g_channel)/ 2))
#img_rec_red2 = np.divide(r_channel, 255)
img_rec_red2 = np.divide(img_rec_red2,255)
#img_rec_red2 = np.square(img_rec_red2)
img_rec_red2[img_rec_red2 < 0.3] = 0
#dX, dY = 0,0
trials = 1
try:
# Get the array of indices of detected pixels
thresholded_array = np.argwhere(img_rec_red2 >= 0.3)
thresholded_list = thresholded_array.tolist()
#print(thresholded_list)
if len(thresholded_list) > trials*3:
# sets the number of trials before averaging to get the centre
total_centres_X = 0
total_centres_Y = 0
hoop_centre = (0,0)
arr_len_3rd = int(len(thresholded_list) / 3)
for i in range(trials):
r1 = random.randrange(0, int(arr_len_3rd/2))
#r2 = random.randrange(0, arr_len_3rd)
# rerolls if the same number was rolled
#while r2 == r1:
r2 = random.randrange(arr_len_3rd, 2*arr_len_3rd)
r3 = random.randrange(int(2.5*arr_len_3rd), len(thresholded_list))
#while r3 == r1 or r3 == r2:
#r3 = random.randrange(0, len(thresholded_list))
print(thresholded_list[r1],thresholded_list[r2],thresholded_list[r3])
current_centre = Polygon(thresholded_list[r1],thresholded_list[r2],thresholded_list[r3]).circumcenter
#print(current_centre)
total_centres_X += int(current_centre.y)
total_centres_Y += int(current_centre.x)
cv2.circle(frame, (thresholded_list[r1][1], thresholded_list[r1][0]), 5, (0, 0, 255), -1)
cv2.circle(frame, (thresholded_list[r2][1], thresholded_list[r2][0]), 5, (0, 0, 255), -1)
cv2.circle(frame, (thresholded_list[r3][1], thresholded_list[r3][0]), 5, (0, 0, 255), -1)
cX = int(total_centres_X / trials)
cY = int(total_centres_Y / trials)
#print(cX,cY)
except:
print("no hoop detected")
# put text and highlight the center
try:
cv2.circle(frame, (cX, cY), 5, (255, 255, 255), -1)
cv2.line(frame, centre, (cX, cY), (255,0,0), 2)
#cv2.putText(frame, "centroid", (cX - 25, cY - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
dX = cX - centre[0]
dY = centre[1] - cY
cv2.putText(frame, ("(" + str(dX) + ", " + str(dY) + " )"), (centre[0] - 20, centre[1] - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
#print('Velocities: ' + str(dX) + "," + str(dY))
except:
#print("No centre detected")
#dX = 0
#dY = 0
dX = None
dY = None
out.write(frame)
k = cv2.waitKey(1)
rawCapture.truncate(0)
# Arm and rakeoff to specific altitude
def arm_and_takeoff(aTargetAltitude):
"""
Arms vehicle and fly to aTargetAltitude.
"""
print("Basic pre-arm checks")
#Don't try to arm until autopilot is ready
# while not vehicle.is_armable:
# print(" Waiting for vehicle to initialise...")
# time.sleep(1)
print("Arming motors")
# Copter should arm in GUIDED mode
vehicle.mode = VehicleMode("GUIDED")
vehicle.armed = True
# while not vehicle.armed == True:
# print("Not Armed")
# time.sleep(0.4)
# while not vehicle.armed == True:
# vehicle.armed = True
# print("Not Armed 2")
# time.sleep(0.4)
#Confirm vehicle armed before attempting to take off
while not vehicle.armed:
print(" Waiting for arming...")
time.sleep(1)
print("Taking off!")
vehicle.simple_takeoff(aTargetAltitude) # Take off to target altitude
# Wait until the vehicle reaches a safe height before processing the goto
# (otherwise the command after Vehicle.simple_takeoff will execute
# immediately).
while True:
print(" Altitude: ", vehicle.rangefinder.distance)
current_alt = vehicle.rangefinder.distance
if current_alt > 20:
current_alt = 0
print(" Arm state: ", vehicle.armed)
# Break and return from function just below target altitude.
if current_alt >= aTargetAltitude * 0.95:
print("Reached target altitude")
break
time.sleep(1)
def goto_position_target_local_ned(north, east, down):
"""
Send SET_POSITION_TARGET_LOCAL_NED command to request the vehicle fly to a specified
location in the North, East, Down frame.
"""
msg = vehicle.message_factory.set_position_target_local_ned_encode(
0, # time_boot_ms (not used)
0, 0, # target system, target component
mavutil.mavlink.MAV_FRAME_LOCAL_NED, # frame
0b0000111111111000, # type_mask (only positions enabled)
north, east, down,
0, 0, 0, # x, y, z velocity in m/s (not used)
0, 0, 0, # x, y, z acceleration (not supported yet, ignored in GCS_Mavlink)
0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
# send command to vehicle
vehicle.send_mavlink(msg)
def get_distance_metres(aLocation1, aLocation2):
"""
Returns the ground distance in metres between two LocationGlobal objects.
This method is an approximation, and will not be accurate over large distances and close to the
earth's poles. It comes from the ArduPilot test code:
https://github.com/diydrones/ardupilot/blob/master/Tools/autotest/common.py
"""
dlat = aLocation2.lat - aLocation1.lat
dlong = aLocation2.lon - aLocation1.lon
return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5
def get_location_metres(original_location, dNorth, dEast):
"""
Returns a LocationGlobal object containing the latitude/longitude `dNorth` and `dEast` metres from the
specified `original_location`. The returned LocationGlobal has the same `alt` value
as `original_location`.
The function is useful when you want to move the vehicle around specifying locations relative to
the current vehicle position.
The algorithm is relatively accurate over small distances (10m within 1km) except close to the poles.
For more information see:
http://gis.stackexchange.com/questions/2951/algorithm-for-offsetting-a-latitude-longitude-by-some-amount-of-meters
"""
earth_radius = 6378137.0 #Radius of "spherical" earth
#Coordinate offsets in radians
dLat = dNorth/earth_radius
dLon = dEast/(earth_radius*math.cos(math.pi*original_location.lat/180))
#New position in decimal degrees
newlat = original_location.lat + (dLat * 180/math.pi)
newlon = original_location.lon + (dLon * 180/math.pi)
if type(original_location) is LocationGlobal:
targetlocation=LocationGlobal(newlat, newlon,original_location.alt)
elif type(original_location) is LocationGlobalRelative:
targetlocation=LocationGlobalRelative(newlat, newlon,original_location.alt)
else:
raise Exception("Invalid Location object passed")
return targetlocation
def goto(dNorth, dEast, gotoFunction=vehicle.simple_goto):
"""
Moves the vehicle to a position dNorth metres North and dEast metres East of the current position.
The method takes a function pointer argument with a single `dronekit.lib.LocationGlobal` parameter for
the target position. This allows it to be called with different position-setting commands.
By default it uses the standard method: dronekit.lib.Vehicle.simple_goto().
The method reports the distance to target every two seconds.
"""
currentLocation = vehicle.location.global_relative_frame
targetLocation = get_location_metres(currentLocation, dNorth, dEast)
targetDistance = get_distance_metres(currentLocation, targetLocation)
gotoFunction(targetLocation)
#print "DEBUG: targetLocation: %s" % targetLocation
#print "DEBUG: targetLocation: %s" % targetDistance
print("Initiating GOTO")
while vehicle.mode.name=="GUIDED": #Stop action if we are no longer in guided mode.
#print "DEBUG: mode: %s" % vehicle.mode.name
remainingDistance=get_distance_metres(vehicle.location.global_relative_frame, targetLocation)
print("Distance to target: " + str(remainingDistance))
if remainingDistance < 0.11: #Just below target, in case of undershoot.
print("Reached target")
break;
time.sleep(2)
# Sends a velocity to the drone at a rate of 2 Hx
def send_global_velocity(velocity_x, velocity_y, velocity_z, duration):
"""
Move vehicle in direction based on specified velocity vectors.
"""
msg = vehicle.message_factory.set_position_target_global_int_encode(
0, # time_boot_ms (not used)
0, 0, # target system, target component
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, # frame
0b0000111111000111, # type_mask (only speeds enabled)
0, # lat_int - X Position in WGS84 frame in 1e7 * meters
0, # lon_int - Y Position in WGS84 frame in 1e7 * meters
0, # alt - Altitude in meters in AMSL altitude(not WGS84 if absolute or relative)
# altitude above terrain if GLOBAL_TERRAIN_ALT_INT
velocity_x, # X velocity in NED frame in m/s
velocity_y, # Y velocity in NED frame in m/s
velocity_z, # Z velocity in NED frame in m/s
0, 0, 0, # afx, afy, afz acceleration (not supported yet, ignored in GCS_Mavlink)
0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
# send command to vehicle on 1 Hz cycle
for x in range(0,duration):
vehicle.send_mavlink(msg)
time.sleep(0.5)
# Sets the Yaw - vehicle will yaw according to the yaw slew rate set in params
# give the vehicle more time (give a 0 velocity vector for x amount of seconds - enough for
# the drone to complete the yaw)
def condition_yaw(heading, relative=False):
"""
Send MAV_CMD_CONDITION_YAW message to point vehicle at a specified heading (in degrees).
This method sets an absolute heading by default, but you can set the `relative` parameter
to `True` to set yaw relative to the current yaw heading.
By default the yaw of the vehicle will follow the direction of travel. After setting
the yaw using this function there is no way to return to the default yaw "follow direction
of travel" behaviour (https://github.com/diydrones/ardupilot/issues/2427)
For more information see:
http://copter.ardupilot.com/wiki/common-mavlink-mission-command-messages-mav_cmd/#mav_cmd_condition_yaw
"""
if relative:
is_relative = 1 #yaw relative to direction of travel
else:
is_relative = 0 #yaw is an absolute angle
# create the CONDITION_YAW command using command_long_encode()
msg = vehicle.message_factory.command_long_encode(
0, 0, # target system, target component
mavutil.mavlink.MAV_CMD_CONDITION_YAW, #command
0, #confirmation
heading, # param 1, yaw in degrees
0, # param 2, yaw speed deg/s
1, # param 3, direction -1 ccw, 1 cw
is_relative, # param 4, relative offset 1, absolute angle 0
0, 0, 0) # param 5 ~ 7 not used
# send command to vehicle
vehicle.send_mavlink(msg)
# The following 2 methods allow for the drone attitude to be directly controlled
# the movement is not OF corrected - avoid usage where possible
def set_attitude(roll_angle = 0.0, pitch_angle = 0.0, yaw_rate = 0.0, thrust = 0.5, duration = 0):
"""
Note that from AC3.3 the message should be re-sent every second (after about 3 seconds
with no message the velocity will drop back to zero). In AC3.2.1 and earlier the specified
velocity persists until it is canceled. The code below should work on either version
(sending the message multiple times does not cause problems).
"""
"""
The roll and pitch rate cannot be controllbed with rate in radian in AC3.4.4 or earlier,
so you must use quaternion to control the pitch and roll for those vehicles.
"""
# Thrust > 0.5: Ascend
# Thrust == 0.5: Hold the altitude
# Thrust < 0.5: Descend
msg = vehicle.message_factory.set_attitude_target_encode(
0, # time_boot_ms
1, # Target system
1, # Target component
0b00000000, # Type mask: bit 1 is LSB
to_quaternion(roll_angle, pitch_angle), # Quaternion
0, # Body roll rate in radian
0, # Body pitch rate in radian
math.radians(yaw_rate), # Body yaw rate in radian
thrust # Thrust
)
vehicle.send_mavlink(msg)
start = time.time()
while time.time() - start < duration:
vehicle.send_mavlink(msg)
#time.sleep(0.1)
def to_quaternion(roll = 0.0, pitch = 0.0, yaw = 0.0):
"""
Convert degrees to quaternions
"""
t0 = math.cos(math.radians(yaw * 0.5))
t1 = math.sin(math.radians(yaw * 0.5))
t2 = math.cos(math.radians(roll * 0.5))
t3 = math.sin(math.radians(roll * 0.5))
t4 = math.cos(math.radians(pitch * 0.5))
t5 = math.sin(math.radians(pitch * 0.5))
w = t0 * t2 * t4 + t1 * t3 * t5
x = t0 * t3 * t4 - t1 * t2 * t5
y = t0 * t2 * t5 + t1 * t3 * t4
z = t1 * t2 * t4 - t0 * t3 * t5
# Gets the readings from the TOF sensors and updates the distance vars
def get_I2C_readings():
global distance_in_mm_N
global distance_in_mm_S
global distance_in_mm_E
global distance_in_mm_W
global distance_in_mm_45
while(True):
I2CMulti.selectPort(0)
distance_in_mm_N = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(3)
distance_in_mm_S = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(7)
distance_in_mm_E = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(2)
distance_in_mm_W = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(1)
distance_in_mm_45 = tof.get_distance() # Grab the range in mm
#print("Sensor N distance: " + str(distance_in_mm_N) + " \nSensor S distance: " + str(distance_in_mm_S) + "\nSensor E distance: " + str(distance_in_mm_E) + "\nSensor W distance: " + str(distance_in_mm_W))
time.sleep(0.05)
def calculate_velocity(ground_heading, angle):
rads = math.radian(angle)
rads += math.radians(ground_heading)
if rads > math.radians(360):
rads -= math.radians(360)
elif rads < -math.radians(360):
rads += math.radians(360)
vel_x = (np.cos(heading_rad) / 5)
vel_y = (np.sin(heading_rad) / 5)
return vel_x, vel_y
# Starts TOF readings before takeoff
#thread.start_new_thread(get_I2C_readings, ())
# Starts CV code
thread.start_new_thread(detect_circle, ())
# Gets vehcle heading on thr ground (this is assumed to be the forward heading)
ground_heading = vehicle.heading
# Takeoff to 1.5m
arm_and_takeoff(1.5)
# Corridor Variables
INCREMENT_DISTANCE = 0.1
CORRIDOR_WIDTH_HALVED = 1300 # in mm
THRESHOLD_DISTANCE = 100
lower_bound = CORRIDOR_WIDTH_HALVED - THRESHOLD_DISTANCE
upper_bound = CORRIDOR_WIDTH_HALVED + THRESHOLD_DISTANCE
#print(str(right_X) + str(right_Y))
VEL_SCALE_Y = 0.005 # velocity scaling factor from openCV
VEL_SCALE_X = 0.001
px_threshold = 10 # sets the threshold before any velocity is taken
print(dX, dY)
# Hoop alignment code
x_aligned = False
y_aligned = False
### SINGLE AXIS ALIGNMENT CODE
# while True:
# if dX < -px_threshold or dX > px_threshold:
# # remember, negative means up
# up_vel = -dX*VEL_SCALE
# if up_vel > 0.05:
# up_vel = 0.05
# elif up_vel < 0.05:
# up_vel = -0.05
# send_global_velocity(0,0,(up_vel), 2)
# send_global_velocity(0,0,0,1) # reset the global vels
# else:
# break
# print("x aligned")
# while True:
# if dY < -px_threshold or dY > px_threshold:
# right_vel_X = -right_X*dY*VEL_SCALE
# right_vel_Y = -right_Y*dY*VEL_SCALE
# if right_vel_X > 0.05:
# right_vel_X = 0.05
# elif right_vel_X < -0.05:
# right_vel_X = -0.05
# if right_vel_Y > 0.05:
# right_vel_Y = 0.05
# elif right_vel_Y < -0.05:
# right_vel_Y = -0.05
# send_global_velocity(right_vel_X,right_vel_Y,0,2)
# send_global_velocity(0,0,0,1) # reset the global vels
# else :
# break
### DOUBLE AXIS ALIGNMENT
up_vel, right_vel_X, right_vel_Y = 0,0,0
forward_scale = 0.1
stab_seconds_X = 0
stab_seconds_Y = 0
stab_threshold = 1
while (not x_aligned) or (not y_aligned):
if dX == None:
print("hoop not detected")
break
line_d = (dX**2 + dY**2)**0.5
if line_d == 0:
fwd_x, fwd_y = calculate_velocity(ground_heading, 0)
send_global_velocity(fwd_X,fwd_Y,0,2)
send_global_velocity(0,0,0,1)
total_scale = forward_scale/line_d
print(dX, dY)
if dX < -px_threshold or dX > px_threshold:
x_aligned = False
up_vel = round((-dX*VEL_SCALE_X), 3)
if up_vel > 0.1:
up_vel = 0.1
elif up_vel < -0.1:
up_vel = -0.1
stab_seconds_X = 0
else:
if stab_seconds_X == stab_threshold:
x_aligned = True
else:
x_aligned = False
stab_seconds_X += 1
up_vel = 0
if dY < -px_threshold or dY > px_threshold:
y_aligned = False
angle = math.degrees(np.arctan2(total_scale / line_d))
right_vel_X, right_vel_Y = calculate_velocity(ground_heading, angle)
stab_seconds_Y = 0
else:
if stab_seconds_Y == stab_threshold:
y_aligned = True
else:
y_aligned = False
stab_seconds_Y += 1
right_vel_X = 0
right_vel_Y = 0
print("alignment x: " + str(x_aligned))
print("alignment y: " + str(y_aligned))
print("velocity: " + str(right_vel_X) + " : " + str(right_vel_Y) + " : " + str(up_vel))
send_global_velocity(right_vel_X,right_vel_Y,up_vel,2)
send_global_velocity(0,0,0,1) # reset the global vels
print("Fully Aligned")
send_global_velocity(0,0,0,10) # reset the global vels
# condition_yaw(90, True)
# condition_yaw(-90, True)
print("Landing")
vehicle.mode = VehicleMode("LAND")
# Close vehicle object before exiting script
print("Close vehicle object")
vehicle.close()
# Shut down simulator if it was started.
if sitl:
sitl.stop()
I2CMulti.i2c.write_byte(0x70,0) # how it closes?
tof.stop_ranging() # Stop ranging
out.release()
| 34.6 | 206 | 0.65006 | from __future__ import print_function
import time
import math
import thread
from pymavlink import mavutil
from dronekit import connect, VehicleMode, LocationGlobal, LocationGlobalRelative
import I2CMultiplexer
import VL53L1X
import cv2
import numpy as np
from picamera.array import PiRGBArray
from picamera import PiCamera
from fractions import Fraction
from PIL import Image
import random
from sympy import Point, Polygon, pi
camera = PiCamera()
camera.resolution = (426, 240)
camera.framerate = 24
camera.exposure_mode = 'auto'
camera.exposure_compensation = -3
camera.drc_strength = 'off'
camera.still_stats = False
camera.awb_mode = 'off'
camera.awb_gains = (Fraction(167, 103), Fraction(27,16))
rawCapture = PiRGBArray(camera, size=(426, 240))
out = cv2.VideoWriter(str(time.time()) + ".avi",cv2.VideoWriter_fourcc('M','J','P','G'), 10, (426, 240))
time.sleep(0.1)
connection_string = '/dev/ttyUSB0'
sitl = None
if not connection_string:
import dronekit_sitl
sitl = dronekit_sitl.start_default()
connection_string = sitl.connection_string()
print('Connecting to vehicle on: %s' % connection_string)
vehicle = connect(connection_string, wait_ready=True, baud=57600)
distance_in_mm_N = 0
distance_in_mm_S = 0
distance_in_mm_E = 0
distance_in_mm_W = 0
distance_in_mm_45 = 0
dX = 0
dY = 0
I2CMulti = I2CMultiplexer.I2CMultiplexer(0x70)
tof = VL53L1X.VL53L1X()
try:
for i in [0,1,2,7,3]:
I2CMulti.selectPort(i)
tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=0x29)
tof.open()
tof.start_ranging(3)
except:
print("port init failed")
def detect_circle():
global dX
global dY
for img in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
for i in range(5):
frame = img.array
height, width = frame.shape[:2]
centre = (int(width/2), int(height/2))
b_channel = np.array(frame[:,:,0]).astype('float')
g_channel = np.array(frame[:,:,1]).astype('float')
r_channel = np.array(frame[:,:,2]).astype('float')
bgr_channel = np.add((np.add(b_channel, g_channel)), r_channel)
img_rec_red2 = np.subtract(r_channel,((b_channel + g_channel)/ 2))
img_rec_red2 = np.divide(img_rec_red2,255)
img_rec_red2[img_rec_red2 < 0.3] = 0
trials = 1
try:
thresholded_array = np.argwhere(img_rec_red2 >= 0.3)
thresholded_list = thresholded_array.tolist()
if len(thresholded_list) > trials*3:
total_centres_X = 0
total_centres_Y = 0
hoop_centre = (0,0)
arr_len_3rd = int(len(thresholded_list) / 3)
for i in range(trials):
r1 = random.randrange(0, int(arr_len_3rd/2))
r2 = random.randrange(arr_len_3rd, 2*arr_len_3rd)
r3 = random.randrange(int(2.5*arr_len_3rd), len(thresholded_list))
print(thresholded_list[r1],thresholded_list[r2],thresholded_list[r3])
current_centre = Polygon(thresholded_list[r1],thresholded_list[r2],thresholded_list[r3]).circumcenter
total_centres_X += int(current_centre.y)
total_centres_Y += int(current_centre.x)
cv2.circle(frame, (thresholded_list[r1][1], thresholded_list[r1][0]), 5, (0, 0, 255), -1)
cv2.circle(frame, (thresholded_list[r2][1], thresholded_list[r2][0]), 5, (0, 0, 255), -1)
cv2.circle(frame, (thresholded_list[r3][1], thresholded_list[r3][0]), 5, (0, 0, 255), -1)
cX = int(total_centres_X / trials)
cY = int(total_centres_Y / trials)
except:
print("no hoop detected")
try:
cv2.circle(frame, (cX, cY), 5, (255, 255, 255), -1)
cv2.line(frame, centre, (cX, cY), (255,0,0), 2)
dX = cX - centre[0]
dY = centre[1] - cY
cv2.putText(frame, ("(" + str(dX) + ", " + str(dY) + " )"), (centre[0] - 20, centre[1] - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
except:
dX = None
dY = None
out.write(frame)
k = cv2.waitKey(1)
rawCapture.truncate(0)
def arm_and_takeoff(aTargetAltitude):
print("Basic pre-arm checks")
# while not vehicle.is_armable:
# print(" Waiting for vehicle to initialise...")
# time.sleep(1)
print("Arming motors")
# Copter should arm in GUIDED mode
vehicle.mode = VehicleMode("GUIDED")
vehicle.armed = True
# while not vehicle.armed == True:
# print("Not Armed")
# time.sleep(0.4)
# while not vehicle.armed == True:
# vehicle.armed = True
# print("Not Armed 2")
# time.sleep(0.4)
#Confirm vehicle armed before attempting to take off
while not vehicle.armed:
print(" Waiting for arming...")
time.sleep(1)
print("Taking off!")
vehicle.simple_takeoff(aTargetAltitude) # Take off to target altitude
# Wait until the vehicle reaches a safe height before processing the goto
# (otherwise the command after Vehicle.simple_takeoff will execute
# immediately).
while True:
print(" Altitude: ", vehicle.rangefinder.distance)
current_alt = vehicle.rangefinder.distance
if current_alt > 20:
current_alt = 0
print(" Arm state: ", vehicle.armed)
# Break and return from function just below target altitude.
if current_alt >= aTargetAltitude * 0.95:
print("Reached target altitude")
break
time.sleep(1)
def goto_position_target_local_ned(north, east, down):
msg = vehicle.message_factory.set_position_target_local_ned_encode(
0, # time_boot_ms (not used)
0, 0, # target system, target component
mavutil.mavlink.MAV_FRAME_LOCAL_NED, # frame
0b0000111111111000, # type_mask (only positions enabled)
north, east, down,
0, 0, 0, # x, y, z velocity in m/s (not used)
0, 0, 0, # x, y, z acceleration (not supported yet, ignored in GCS_Mavlink)
0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
# send command to vehicle
vehicle.send_mavlink(msg)
def get_distance_metres(aLocation1, aLocation2):
dlat = aLocation2.lat - aLocation1.lat
dlong = aLocation2.lon - aLocation1.lon
return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5
def get_location_metres(original_location, dNorth, dEast):
earth_radius = 6378137.0 #Radius of "spherical" earth
#Coordinate offsets in radians
dLat = dNorth/earth_radius
dLon = dEast/(earth_radius*math.cos(math.pi*original_location.lat/180))
#New position in decimal degrees
newlat = original_location.lat + (dLat * 180/math.pi)
newlon = original_location.lon + (dLon * 180/math.pi)
if type(original_location) is LocationGlobal:
targetlocation=LocationGlobal(newlat, newlon,original_location.alt)
elif type(original_location) is LocationGlobalRelative:
targetlocation=LocationGlobalRelative(newlat, newlon,original_location.alt)
else:
raise Exception("Invalid Location object passed")
return targetlocation
def goto(dNorth, dEast, gotoFunction=vehicle.simple_goto):
currentLocation = vehicle.location.global_relative_frame
targetLocation = get_location_metres(currentLocation, dNorth, dEast)
targetDistance = get_distance_metres(currentLocation, targetLocation)
gotoFunction(targetLocation)
#print "DEBUG: targetLocation: %s" % targetLocation
#print "DEBUG: targetLocation: %s" % targetDistance
print("Initiating GOTO")
while vehicle.mode.name=="GUIDED": #Stop action if we are no longer in guided mode.
#print "DEBUG: mode: %s" % vehicle.mode.name
remainingDistance=get_distance_metres(vehicle.location.global_relative_frame, targetLocation)
print("Distance to target: " + str(remainingDistance))
if remainingDistance < 0.11: #Just below target, in case of undershoot.
print("Reached target")
break;
time.sleep(2)
# Sends a velocity to the drone at a rate of 2 Hx
def send_global_velocity(velocity_x, velocity_y, velocity_z, duration):
msg = vehicle.message_factory.set_position_target_global_int_encode(
0, # time_boot_ms (not used)
0, 0, # target system, target component
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, # frame
0b0000111111000111, # type_mask (only speeds enabled)
0, # lat_int - X Position in WGS84 frame in 1e7 * meters
0, # lon_int - Y Position in WGS84 frame in 1e7 * meters
0, # alt - Altitude in meters in AMSL altitude(not WGS84 if absolute or relative)
# altitude above terrain if GLOBAL_TERRAIN_ALT_INT
velocity_x, # X velocity in NED frame in m/s
velocity_y, # Y velocity in NED frame in m/s
velocity_z, # Z velocity in NED frame in m/s
0, 0, 0, # afx, afy, afz acceleration (not supported yet, ignored in GCS_Mavlink)
0, 0) # yaw, yaw_rate (not supported yet, ignored in GCS_Mavlink)
# send command to vehicle on 1 Hz cycle
for x in range(0,duration):
vehicle.send_mavlink(msg)
time.sleep(0.5)
# Sets the Yaw - vehicle will yaw according to the yaw slew rate set in params
# give the vehicle more time (give a 0 velocity vector for x amount of seconds - enough for
# the drone to complete the yaw)
def condition_yaw(heading, relative=False):
if relative:
is_relative = 1 #yaw relative to direction of travel
else:
is_relative = 0 #yaw is an absolute angle
# create the CONDITION_YAW command using command_long_encode()
msg = vehicle.message_factory.command_long_encode(
0, 0, # target system, target component
mavutil.mavlink.MAV_CMD_CONDITION_YAW, #command
0, #confirmation
heading, # param 1, yaw in degrees
0, # param 2, yaw speed deg/s
1, # param 3, direction -1 ccw, 1 cw
is_relative, # param 4, relative offset 1, absolute angle 0
0, 0, 0) # param 5 ~ 7 not used
# send command to vehicle
vehicle.send_mavlink(msg)
# The following 2 methods allow for the drone attitude to be directly controlled
# the movement is not OF corrected - avoid usage where possible
def set_attitude(roll_angle = 0.0, pitch_angle = 0.0, yaw_rate = 0.0, thrust = 0.5, duration = 0):
# Thrust > 0.5: Ascend
# Thrust == 0.5: Hold the altitude
# Thrust < 0.5: Descend
msg = vehicle.message_factory.set_attitude_target_encode(
0, # time_boot_ms
1, # Target system
1, # Target component
0b00000000, # Type mask: bit 1 is LSB
to_quaternion(roll_angle, pitch_angle), # Quaternion
0, # Body roll rate in radian
0, # Body pitch rate in radian
math.radians(yaw_rate), # Body yaw rate in radian
thrust # Thrust
)
vehicle.send_mavlink(msg)
start = time.time()
while time.time() - start < duration:
vehicle.send_mavlink(msg)
#time.sleep(0.1)
def to_quaternion(roll = 0.0, pitch = 0.0, yaw = 0.0):
t0 = math.cos(math.radians(yaw * 0.5))
t1 = math.sin(math.radians(yaw * 0.5))
t2 = math.cos(math.radians(roll * 0.5))
t3 = math.sin(math.radians(roll * 0.5))
t4 = math.cos(math.radians(pitch * 0.5))
t5 = math.sin(math.radians(pitch * 0.5))
w = t0 * t2 * t4 + t1 * t3 * t5
x = t0 * t3 * t4 - t1 * t2 * t5
y = t0 * t2 * t5 + t1 * t3 * t4
z = t1 * t2 * t4 - t0 * t3 * t5
# Gets the readings from the TOF sensors and updates the distance vars
def get_I2C_readings():
global distance_in_mm_N
global distance_in_mm_S
global distance_in_mm_E
global distance_in_mm_W
global distance_in_mm_45
while(True):
I2CMulti.selectPort(0)
distance_in_mm_N = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(3)
distance_in_mm_S = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(7)
distance_in_mm_E = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(2)
distance_in_mm_W = tof.get_distance() # Grab the range in mm
I2CMulti.selectPort(1)
distance_in_mm_45 = tof.get_distance() # Grab the range in mm
#print("Sensor N distance: " + str(distance_in_mm_N) + " \nSensor S distance: " + str(distance_in_mm_S) + "\nSensor E distance: " + str(distance_in_mm_E) + "\nSensor W distance: " + str(distance_in_mm_W))
time.sleep(0.05)
def calculate_velocity(ground_heading, angle):
rads = math.radian(angle)
rads += math.radians(ground_heading)
if rads > math.radians(360):
rads -= math.radians(360)
elif rads < -math.radians(360):
rads += math.radians(360)
vel_x = (np.cos(heading_rad) / 5)
vel_y = (np.sin(heading_rad) / 5)
return vel_x, vel_y
# Starts TOF readings before takeoff
#thread.start_new_thread(get_I2C_readings, ())
# Starts CV code
thread.start_new_thread(detect_circle, ())
# Gets vehcle heading on thr ground (this is assumed to be the forward heading)
ground_heading = vehicle.heading
# Takeoff to 1.5m
arm_and_takeoff(1.5)
# Corridor Variables
INCREMENT_DISTANCE = 0.1
CORRIDOR_WIDTH_HALVED = 1300 # in mm
THRESHOLD_DISTANCE = 100
lower_bound = CORRIDOR_WIDTH_HALVED - THRESHOLD_DISTANCE
upper_bound = CORRIDOR_WIDTH_HALVED + THRESHOLD_DISTANCE
#print(str(right_X) + str(right_Y))
VEL_SCALE_Y = 0.005 # velocity scaling factor from openCV
VEL_SCALE_X = 0.001
px_threshold = 10 # sets the threshold before any velocity is taken
print(dX, dY)
# Hoop alignment code
x_aligned = False
y_aligned = False
### SINGLE AXIS ALIGNMENT CODE
# while True:
# if dX < -px_threshold or dX > px_threshold:
# # remember, negative means up
# up_vel = -dX*VEL_SCALE
# if up_vel > 0.05:
# up_vel = 0.05
# elif up_vel < 0.05:
# up_vel = -0.05
# send_global_velocity(0,0,(up_vel), 2)
# send_global_velocity(0,0,0,1) # reset the global vels
# else:
# break
# print("x aligned")
# while True:
# if dY < -px_threshold or dY > px_threshold:
# right_vel_X = -right_X*dY*VEL_SCALE
# right_vel_Y = -right_Y*dY*VEL_SCALE
# if right_vel_X > 0.05:
# right_vel_X = 0.05
# elif right_vel_X < -0.05:
# right_vel_X = -0.05
# if right_vel_Y > 0.05:
# right_vel_Y = 0.05
# elif right_vel_Y < -0.05:
# right_vel_Y = -0.05
# send_global_velocity(right_vel_X,right_vel_Y,0,2)
# send_global_velocity(0,0,0,1) # reset the global vels
# else :
# break
### DOUBLE AXIS ALIGNMENT
up_vel, right_vel_X, right_vel_Y = 0,0,0
forward_scale = 0.1
stab_seconds_X = 0
stab_seconds_Y = 0
stab_threshold = 1
while (not x_aligned) or (not y_aligned):
if dX == None:
print("hoop not detected")
break
line_d = (dX**2 + dY**2)**0.5
if line_d == 0:
fwd_x, fwd_y = calculate_velocity(ground_heading, 0)
send_global_velocity(fwd_X,fwd_Y,0,2)
send_global_velocity(0,0,0,1)
total_scale = forward_scale/line_d
print(dX, dY)
if dX < -px_threshold or dX > px_threshold:
x_aligned = False
up_vel = round((-dX*VEL_SCALE_X), 3)
if up_vel > 0.1:
up_vel = 0.1
elif up_vel < -0.1:
up_vel = -0.1
stab_seconds_X = 0
else:
if stab_seconds_X == stab_threshold:
x_aligned = True
else:
x_aligned = False
stab_seconds_X += 1
up_vel = 0
if dY < -px_threshold or dY > px_threshold:
y_aligned = False
angle = math.degrees(np.arctan2(total_scale / line_d))
right_vel_X, right_vel_Y = calculate_velocity(ground_heading, angle)
stab_seconds_Y = 0
else:
if stab_seconds_Y == stab_threshold:
y_aligned = True
else:
y_aligned = False
stab_seconds_Y += 1
right_vel_X = 0
right_vel_Y = 0
print("alignment x: " + str(x_aligned))
print("alignment y: " + str(y_aligned))
print("velocity: " + str(right_vel_X) + " : " + str(right_vel_Y) + " : " + str(up_vel))
send_global_velocity(right_vel_X,right_vel_Y,up_vel,2)
send_global_velocity(0,0,0,1) # reset the global vels
print("Fully Aligned")
send_global_velocity(0,0,0,10) # reset the global vels
# condition_yaw(90, True)
# condition_yaw(-90, True)
print("Landing")
vehicle.mode = VehicleMode("LAND")
# Close vehicle object before exiting script
print("Close vehicle object")
vehicle.close()
# Shut down simulator if it was started.
if sitl:
sitl.stop()
I2CMulti.i2c.write_byte(0x70,0) # how it closes?
tof.stop_ranging() # Stop ranging
out.release()
| true | true |
f7255a8df81f50ed5c1b02a6f21936c3d0283313 | 449 | py | Python | msg/urls_api.py | paul-wolf/django-stack | 7b45b3087659e15d936182b15ba6b07c14549584 | [
"MIT"
] | null | null | null | msg/urls_api.py | paul-wolf/django-stack | 7b45b3087659e15d936182b15ba6b07c14549584 | [
"MIT"
] | null | null | null | msg/urls_api.py | paul-wolf/django-stack | 7b45b3087659e15d936182b15ba6b07c14549584 | [
"MIT"
] | null | null | null | from django.conf.urls import url, include, patterns
from rest_framework import routers
from . import views
# this gets our Foo model routed
router = routers.DefaultRouter()
router.register(r'foo', views.FooViewSet)
urlpatterns = patterns(
'',
url(r'^', include(router.urls)), # Foo REST urls
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')), # browsable api login urls
)
| 24.944444 | 87 | 0.674833 | from django.conf.urls import url, include, patterns
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'foo', views.FooViewSet)
urlpatterns = patterns(
'',
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
)
| true | true |
f7255b2ec1f0592cd0f12cd48061fd80b46db0c4 | 1,544 | py | Python | src/socialhand/forms.py | GonzaloAvilez/site | 10866c826fa0df89f7f25e7392fd0fc8e395f54d | [
"MIT"
] | null | null | null | src/socialhand/forms.py | GonzaloAvilez/site | 10866c826fa0df89f7f25e7392fd0fc8e395f54d | [
"MIT"
] | 12 | 2019-10-02T17:18:09.000Z | 2022-03-11T23:54:53.000Z | src/socialhand/forms.py | GonzaloAvilez/site | 10866c826fa0df89f7f25e7392fd0fc8e395f54d | [
"MIT"
] | null | null | null | from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field, Fieldset
from crispy_forms.bootstrap import InlineField
class ContactForm(forms.Form):
subject = forms.CharField (required = False,)
contact_name = forms.CharField(required=True,
label='Your Name')
contact_email = forms.EmailField(required=True,
label='Your Email')
content = forms.CharField (required=True,
widget=forms.Textarea,
label='Your Message')
def __init__(self, *args,**kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper ()
# self.helper.form_class = 'form-inline'
# self.helper.laber_class = 'col-lg-2'
# self.helper.field_class = 'col-lg-8'
self.helper.layout=Layout(
# Div(
# InlineField('contact_name'),
# InlineField('contact_email'),
# InlineField('subject'),
# InlineField('content'),
# ),
Div(
InlineField('subject',css_class='form-control input-contact input-lg'),
InlineField('contact_email',css_class='form-control input-contact input-lg'),
InlineField('contact_name',css_class='form-control input-contact input-lg'),
css_class="col-lg-6 form-group lead",
),
Div(
InlineField('content', css_class='input-contact'),
css_class="col-lg-6 form-group",
),
Submit('submit', 'Send Message',css_class="btn btn-contact btn-default"),
)
| 33.565217 | 90 | 0.654793 | from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field, Fieldset
from crispy_forms.bootstrap import InlineField
class ContactForm(forms.Form):
subject = forms.CharField (required = False,)
contact_name = forms.CharField(required=True,
label='Your Name')
contact_email = forms.EmailField(required=True,
label='Your Email')
content = forms.CharField (required=True,
widget=forms.Textarea,
label='Your Message')
def __init__(self, *args,**kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper ()
self.helper.layout=Layout(
Div(
InlineField('subject',css_class='form-control input-contact input-lg'),
InlineField('contact_email',css_class='form-control input-contact input-lg'),
InlineField('contact_name',css_class='form-control input-contact input-lg'),
css_class="col-lg-6 form-group lead",
),
Div(
InlineField('content', css_class='input-contact'),
css_class="col-lg-6 form-group",
),
Submit('submit', 'Send Message',css_class="btn btn-contact btn-default"),
)
| true | true |
f7255b8e53122ef5294b9fbb88a6f9fa7e69aa5f | 12,799 | py | Python | python_packages_static/flopy/mf6/modflow/mfgwtsrc.py | usgs/neversink_workflow | acd61435b8553e38d4a903c8cd7a3afc612446f9 | [
"CC0-1.0"
] | null | null | null | python_packages_static/flopy/mf6/modflow/mfgwtsrc.py | usgs/neversink_workflow | acd61435b8553e38d4a903c8cd7a3afc612446f9 | [
"CC0-1.0"
] | null | null | null | python_packages_static/flopy/mf6/modflow/mfgwtsrc.py | usgs/neversink_workflow | acd61435b8553e38d4a903c8cd7a3afc612446f9 | [
"CC0-1.0"
] | null | null | null | # DO NOT MODIFY THIS FILE DIRECTLY. THIS FILE MUST BE CREATED BY
# mf6/utils/createpackages.py
# FILE created on February 18, 2021 16:23:05 UTC
from .. import mfpackage
from ..data.mfdatautil import ListTemplateGenerator
class ModflowGwtsrc(mfpackage.MFPackage):
"""
ModflowGwtsrc defines a src package within a gwt6 model.
Parameters
----------
model : MFModel
Model that this package is a part of. Package is automatically
added to model when it is initialized.
loading_package : bool
Do not set this parameter. It is intended for debugging and internal
processing purposes only.
auxiliary : [string]
* auxiliary (string) defines an array of one or more auxiliary variable
names. There is no limit on the number of auxiliary variables that
can be provided on this line; however, lists of information provided
in subsequent blocks must have a column of data for each auxiliary
variable name defined here. The number of auxiliary variables
detected on this line determines the value for naux. Comments cannot
be provided anywhere on this line as they will be interpreted as
auxiliary variable names. Auxiliary variables may not be used by the
package, but they will be available for use by other parts of the
program. The program will terminate with an error if auxiliary
variables are specified on more than one line in the options block.
auxmultname : string
* auxmultname (string) name of auxiliary variable to be used as
multiplier of mass loading rate.
boundnames : boolean
* boundnames (boolean) keyword to indicate that boundary names may be
provided with the list of mass source cells.
print_input : boolean
* print_input (boolean) keyword to indicate that the list of mass
source information will be written to the listing file immediately
after it is read.
print_flows : boolean
* print_flows (boolean) keyword to indicate that the list of mass
source flow rates will be printed to the listing file for every
stress period time step in which "BUDGET PRINT" is specified in
Output Control. If there is no Output Control option and
"PRINT_FLOWS" is specified, then flow rates are printed for the last
time step of each stress period.
save_flows : boolean
* save_flows (boolean) keyword to indicate that mass source flow terms
will be written to the file specified with "BUDGET FILEOUT" in Output
Control.
timeseries : {varname:data} or timeseries data
* Contains data for the ts package. Data can be stored in a dictionary
containing data for the ts package with variable names as keys and
package data as values. Data just for the timeseries variable is also
acceptable. See ts package documentation for more information.
observations : {varname:data} or continuous data
* Contains data for the obs package. Data can be stored in a dictionary
containing data for the obs package with variable names as keys and
package data as values. Data just for the observations variable is
also acceptable. See obs package documentation for more information.
maxbound : integer
* maxbound (integer) integer value specifying the maximum number of
sources cells that will be specified for use during any stress
period.
stress_period_data : [cellid, smassrate, aux, boundname]
* cellid ((integer, ...)) is the cell identifier, and depends on the
type of grid that is used for the simulation. For a structured grid
that uses the DIS input file, CELLID is the layer, row, and column.
For a grid that uses the DISV input file, CELLID is the layer and
CELL2D number. If the model uses the unstructured discretization
(DISU) input file, CELLID is the node number for the cell. This
argument is an index variable, which means that it should be treated
as zero-based when working with FloPy and Python. Flopy will
automatically subtract one when loading index variables and add one
when writing index variables.
* smassrate (double) is the mass source loading rate. A positive value
indicates addition of solute mass and a negative value indicates
removal of solute mass. If the Options block includes a
TIMESERIESFILE entry (see the "Time-Variable Input" section), values
can be obtained from a time series by entering the time-series name
in place of a numeric value.
* aux (double) represents the values of the auxiliary variables for
each mass source. The values of auxiliary variables must be present
for each mass source. The values must be specified in the order of
the auxiliary variables specified in the OPTIONS block. If the
package supports time series and the Options block includes a
TIMESERIESFILE entry (see the "Time-Variable Input" section), values
can be obtained from a time series by entering the time-series name
in place of a numeric value.
* boundname (string) name of the mass source cell. BOUNDNAME is an
ASCII character variable that can contain as many as 40 characters.
If BOUNDNAME contains spaces in it, then the entire name must be
enclosed within single quotes.
filename : String
File name for this package.
pname : String
Package name for this package.
parent_file : MFPackage
Parent package file that references this package. Only needed for
utility packages (mfutl*). For example, mfutllaktab package must have
a mfgwflak package parent_file.
"""
auxiliary = ListTemplateGenerator(("gwt6", "src", "options", "auxiliary"))
ts_filerecord = ListTemplateGenerator(
("gwt6", "src", "options", "ts_filerecord")
)
obs_filerecord = ListTemplateGenerator(
("gwt6", "src", "options", "obs_filerecord")
)
stress_period_data = ListTemplateGenerator(
("gwt6", "src", "period", "stress_period_data")
)
package_abbr = "gwtsrc"
_package_type = "src"
dfn_file_name = "gwt-src.dfn"
dfn = [
[
"block options",
"name auxiliary",
"type string",
"shape (naux)",
"reader urword",
"optional true",
],
[
"block options",
"name auxmultname",
"type string",
"shape",
"reader urword",
"optional true",
],
[
"block options",
"name boundnames",
"type keyword",
"shape",
"reader urword",
"optional true",
],
[
"block options",
"name print_input",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name print_flows",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name save_flows",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name ts_filerecord",
"type record ts6 filein ts6_filename",
"shape",
"reader urword",
"tagged true",
"optional true",
"construct_package ts",
"construct_data timeseries",
"parameter_name timeseries",
],
[
"block options",
"name ts6",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name filein",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name ts6_filename",
"type string",
"preserve_case true",
"in_record true",
"reader urword",
"optional false",
"tagged false",
],
[
"block options",
"name obs_filerecord",
"type record obs6 filein obs6_filename",
"shape",
"reader urword",
"tagged true",
"optional true",
"construct_package obs",
"construct_data continuous",
"parameter_name observations",
],
[
"block options",
"name obs6",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name obs6_filename",
"type string",
"preserve_case true",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block dimensions",
"name maxbound",
"type integer",
"reader urword",
"optional false",
],
[
"block period",
"name iper",
"type integer",
"block_variable True",
"in_record true",
"tagged false",
"shape",
"valid",
"reader urword",
"optional false",
],
[
"block period",
"name stress_period_data",
"type recarray cellid smassrate aux boundname",
"shape (maxbound)",
"reader urword",
],
[
"block period",
"name cellid",
"type integer",
"shape (ncelldim)",
"tagged false",
"in_record true",
"reader urword",
],
[
"block period",
"name smassrate",
"type double precision",
"shape",
"tagged false",
"in_record true",
"reader urword",
"time_series true",
],
[
"block period",
"name aux",
"type double precision",
"in_record true",
"tagged false",
"shape (naux)",
"reader urword",
"optional true",
"time_series true",
],
[
"block period",
"name boundname",
"type string",
"shape",
"tagged false",
"in_record true",
"reader urword",
"optional true",
],
]
def __init__(
self,
model,
loading_package=False,
auxiliary=None,
auxmultname=None,
boundnames=None,
print_input=None,
print_flows=None,
save_flows=None,
timeseries=None,
observations=None,
maxbound=None,
stress_period_data=None,
filename=None,
pname=None,
parent_file=None,
):
super(ModflowGwtsrc, self).__init__(
model, "src", filename, pname, loading_package, parent_file
)
# set up variables
self.auxiliary = self.build_mfdata("auxiliary", auxiliary)
self.auxmultname = self.build_mfdata("auxmultname", auxmultname)
self.boundnames = self.build_mfdata("boundnames", boundnames)
self.print_input = self.build_mfdata("print_input", print_input)
self.print_flows = self.build_mfdata("print_flows", print_flows)
self.save_flows = self.build_mfdata("save_flows", save_flows)
self._ts_filerecord = self.build_mfdata("ts_filerecord", None)
self._ts_package = self.build_child_package(
"ts", timeseries, "timeseries", self._ts_filerecord
)
self._obs_filerecord = self.build_mfdata("obs_filerecord", None)
self._obs_package = self.build_child_package(
"obs", observations, "continuous", self._obs_filerecord
)
self.maxbound = self.build_mfdata("maxbound", maxbound)
self.stress_period_data = self.build_mfdata(
"stress_period_data", stress_period_data
)
self._init_complete = True
| 36.464387 | 79 | 0.57067 |
from .. import mfpackage
from ..data.mfdatautil import ListTemplateGenerator
class ModflowGwtsrc(mfpackage.MFPackage):
auxiliary = ListTemplateGenerator(("gwt6", "src", "options", "auxiliary"))
ts_filerecord = ListTemplateGenerator(
("gwt6", "src", "options", "ts_filerecord")
)
obs_filerecord = ListTemplateGenerator(
("gwt6", "src", "options", "obs_filerecord")
)
stress_period_data = ListTemplateGenerator(
("gwt6", "src", "period", "stress_period_data")
)
package_abbr = "gwtsrc"
_package_type = "src"
dfn_file_name = "gwt-src.dfn"
dfn = [
[
"block options",
"name auxiliary",
"type string",
"shape (naux)",
"reader urword",
"optional true",
],
[
"block options",
"name auxmultname",
"type string",
"shape",
"reader urword",
"optional true",
],
[
"block options",
"name boundnames",
"type keyword",
"shape",
"reader urword",
"optional true",
],
[
"block options",
"name print_input",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name print_flows",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name save_flows",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name ts_filerecord",
"type record ts6 filein ts6_filename",
"shape",
"reader urword",
"tagged true",
"optional true",
"construct_package ts",
"construct_data timeseries",
"parameter_name timeseries",
],
[
"block options",
"name ts6",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name filein",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name ts6_filename",
"type string",
"preserve_case true",
"in_record true",
"reader urword",
"optional false",
"tagged false",
],
[
"block options",
"name obs_filerecord",
"type record obs6 filein obs6_filename",
"shape",
"reader urword",
"tagged true",
"optional true",
"construct_package obs",
"construct_data continuous",
"parameter_name observations",
],
[
"block options",
"name obs6",
"type keyword",
"shape",
"in_record true",
"reader urword",
"tagged true",
"optional false",
],
[
"block options",
"name obs6_filename",
"type string",
"preserve_case true",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block dimensions",
"name maxbound",
"type integer",
"reader urword",
"optional false",
],
[
"block period",
"name iper",
"type integer",
"block_variable True",
"in_record true",
"tagged false",
"shape",
"valid",
"reader urword",
"optional false",
],
[
"block period",
"name stress_period_data",
"type recarray cellid smassrate aux boundname",
"shape (maxbound)",
"reader urword",
],
[
"block period",
"name cellid",
"type integer",
"shape (ncelldim)",
"tagged false",
"in_record true",
"reader urword",
],
[
"block period",
"name smassrate",
"type double precision",
"shape",
"tagged false",
"in_record true",
"reader urword",
"time_series true",
],
[
"block period",
"name aux",
"type double precision",
"in_record true",
"tagged false",
"shape (naux)",
"reader urword",
"optional true",
"time_series true",
],
[
"block period",
"name boundname",
"type string",
"shape",
"tagged false",
"in_record true",
"reader urword",
"optional true",
],
]
def __init__(
self,
model,
loading_package=False,
auxiliary=None,
auxmultname=None,
boundnames=None,
print_input=None,
print_flows=None,
save_flows=None,
timeseries=None,
observations=None,
maxbound=None,
stress_period_data=None,
filename=None,
pname=None,
parent_file=None,
):
super(ModflowGwtsrc, self).__init__(
model, "src", filename, pname, loading_package, parent_file
)
self.auxiliary = self.build_mfdata("auxiliary", auxiliary)
self.auxmultname = self.build_mfdata("auxmultname", auxmultname)
self.boundnames = self.build_mfdata("boundnames", boundnames)
self.print_input = self.build_mfdata("print_input", print_input)
self.print_flows = self.build_mfdata("print_flows", print_flows)
self.save_flows = self.build_mfdata("save_flows", save_flows)
self._ts_filerecord = self.build_mfdata("ts_filerecord", None)
self._ts_package = self.build_child_package(
"ts", timeseries, "timeseries", self._ts_filerecord
)
self._obs_filerecord = self.build_mfdata("obs_filerecord", None)
self._obs_package = self.build_child_package(
"obs", observations, "continuous", self._obs_filerecord
)
self.maxbound = self.build_mfdata("maxbound", maxbound)
self.stress_period_data = self.build_mfdata(
"stress_period_data", stress_period_data
)
self._init_complete = True
| true | true |
f7255db587da3c1bac8640b99f5f51ab4e48fd2b | 1,492 | py | Python | hmm/scripts/easy_casino_learn.py | ondrejba/hmm | 1e9fe47a6057d93e7c77614016a89d5d46959e97 | [
"MIT"
] | null | null | null | hmm/scripts/easy_casino_learn.py | ondrejba/hmm | 1e9fe47a6057d93e7c77614016a89d5d46959e97 | [
"MIT"
] | null | null | null | hmm/scripts/easy_casino_learn.py | ondrejba/hmm | 1e9fe47a6057d93e7c77614016a89d5d46959e97 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from ..easy_casino import Casino
from ..hmm_multinoulli import HMMMultinoulli
hmm = HMMMultinoulli(Casino.A, Casino.PX, Casino.INIT)
# generate sequence
seq_length = 300
batch_size = 500
xs_batch = []
zs_batch = []
for j in range(batch_size):
casino = Casino()
xs = [casino.observe()]
zs = [casino.z]
for i in range(seq_length - 1):
casino.transition()
xs.append(casino.observe())
zs.append(casino.z)
xs_batch.append(xs)
zs_batch.append(zs)
xs_batch = np.array(xs_batch)
zs_batch = np.array(zs_batch)
num_hidden_states = len(np.unique(zs_batch))
# learn
hmm.initialize_em(2, 6)
for i in range(200):
# learn
print("step", i)
print(hmm.A)
print(hmm.init)
print(hmm.PX)
print()
ll = hmm.learn_em(xs_batch)
print("log likelihood:", ll)
print()
# calculate probabilities
alphas, log_evidence, betas, gammas, etas = hmm.forward_backward(xs_batch[0])
# plot alphas and gammas
plot_zs = np.array(zs_batch[0])
plot_alphas = alphas[:, 1]
plot_gammas = gammas[:, 1]
plot_xs = np.linspace(1, len(plot_zs), num=len(plot_zs))
plt.figure(figsize=(12, 9))
plt.subplot(2, 1, 1)
plt.title("filtering")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_alphas, label="P(z) = 1")
plt.legend()
plt.subplot(2, 1, 2)
plt.title("smoothing")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_gammas, label="P(z) = 1")
plt.legend()
plt.show()
| 20.438356 | 77 | 0.678954 | import numpy as np
import matplotlib.pyplot as plt
from ..easy_casino import Casino
from ..hmm_multinoulli import HMMMultinoulli
hmm = HMMMultinoulli(Casino.A, Casino.PX, Casino.INIT)
seq_length = 300
batch_size = 500
xs_batch = []
zs_batch = []
for j in range(batch_size):
casino = Casino()
xs = [casino.observe()]
zs = [casino.z]
for i in range(seq_length - 1):
casino.transition()
xs.append(casino.observe())
zs.append(casino.z)
xs_batch.append(xs)
zs_batch.append(zs)
xs_batch = np.array(xs_batch)
zs_batch = np.array(zs_batch)
num_hidden_states = len(np.unique(zs_batch))
hmm.initialize_em(2, 6)
for i in range(200):
print("step", i)
print(hmm.A)
print(hmm.init)
print(hmm.PX)
print()
ll = hmm.learn_em(xs_batch)
print("log likelihood:", ll)
print()
alphas, log_evidence, betas, gammas, etas = hmm.forward_backward(xs_batch[0])
plot_zs = np.array(zs_batch[0])
plot_alphas = alphas[:, 1]
plot_gammas = gammas[:, 1]
plot_xs = np.linspace(1, len(plot_zs), num=len(plot_zs))
plt.figure(figsize=(12, 9))
plt.subplot(2, 1, 1)
plt.title("filtering")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_alphas, label="P(z) = 1")
plt.legend()
plt.subplot(2, 1, 2)
plt.title("smoothing")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_gammas, label="P(z) = 1")
plt.legend()
plt.show()
| true | true |
f7255dc9372185d8116fca049ef881d946cb5401 | 5,629 | py | Python | test/functional/mempool_persist.py | HunterCanimun/surgeofficial-surge-coin | 663dc25517e9045a65a9b1e0993bbaa06d564284 | [
"MIT"
] | null | null | null | test/functional/mempool_persist.py | HunterCanimun/surgeofficial-surge-coin | 663dc25517e9045a65a9b1e0993bbaa06d564284 | [
"MIT"
] | null | null | null | test/functional/mempool_persist.py | HunterCanimun/surgeofficial-surge-coin | 663dc25517e9045a65a9b1e0993bbaa06d564284 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=false command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=false
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=false, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=false. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=false,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool=true. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=false
does not overwrite a previously valid mempool stored on disk.
"""
from decimal import Decimal
import os
from test_framework.test_framework import SurgeTestFramework
from test_framework.util import (
assert_equal,
wait_until,
)
class MempoolPersistTest(SurgeTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
assert self.nodes[0].getmempoolinfo()["loaded"] # start_node is blocking on the mempool being loaded
assert self.nodes[2].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 5)
# Following code is ahead of our current repository state. Future back port.
'''
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
'''
if __name__ == '__main__':
MempoolPersistTest().main()
| 45.764228 | 127 | 0.693907 |
from decimal import Decimal
import os
from test_framework.test_framework import SurgeTestFramework
from test_framework.util import (
assert_equal,
wait_until,
)
class MempoolPersistTest(SurgeTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.")
self.stop_nodes()
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
assert self.nodes[0].getmempoolinfo()["loaded"]
assert self.nodes[2].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 5)
if __name__ == '__main__':
MempoolPersistTest().main()
| true | true |
f7256127fb8bcd69a2e2866490517a48fb8fa051 | 2,814 | py | Python | examples/00-basic-examples/Create_Netlist.py | sparfenyuk/PyAEDT | efe8d219be974fa8a164d84ca9bc5c0e1b32256c | [
"MIT"
] | null | null | null | examples/00-basic-examples/Create_Netlist.py | sparfenyuk/PyAEDT | efe8d219be974fa8a164d84ca9bc5c0e1b32256c | [
"MIT"
] | null | null | null | examples/00-basic-examples/Create_Netlist.py | sparfenyuk/PyAEDT | efe8d219be974fa8a164d84ca9bc5c0e1b32256c | [
"MIT"
] | null | null | null | """
Netlist Example Analysis
--------------------------------------------
# This Example shows how to Import Netlist in AEDT Nexxim
Netlists supported are HSPICE and, partially, Mentor
"""
import sys
import os
#########################################################
# Import Packages
# Setup The local path to the Path Containing AEDTLIb
from pyaedt import examples
netlist = examples.download_netlist()
from pyaedt import generate_unique_name
if os.name == "posix":
tmpfold = os.environ["TMPDIR"]
else:
tmpfold = os.environ["TEMP"]
temp_folder = os.path.join(tmpfold, generate_unique_name("Example"))
if not os.path.exists(temp_folder): os.makedirs(temp_folder)
myfile = os.path.join(netlist)
print(temp_folder)
#########################################################
# Import of Main Classes needed: Desktop and Circuit
from pyaedt import Circuit
from pyaedt import Desktop
###############################################################################
# Launch Desktop and Circuit
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples will use AEDT 2021.1 in Graphical mode
# This examples will use SI units.
desktopVersion = "2021.1"
###############################################################################
# NonGraphical
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Change Boolean to False to open AEDT in graphical mode
NonGraphical = False
NewThread = True
###############################################################################
# Launch AEDT and Circuit Design
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Desktop Class initialize Aedt and start it on specified version and specified graphical mode. NewThread Boolean variables defines if
# a user wants to create a new instance of AEDT or try to connect to existing instance of it
desktop = Desktop(desktopVersion, NonGraphical, NewThread)
aedtapp = Circuit()
#########################################################
# Save Project to temp folder. Can be changed
aedtapp.save_project(os.path.join(temp_folder, "my_firt_netlist.aedt"))
#########################################################
# Define a design variable
# using $ prefix user will create a project variable
aedtapp["Voltage"]="5"
#########################################################
# Launch command to create Schematic
# This method will read the netlist and parse it. All components will be parsed but only speficied
# categories will be mapped. In particular : R, L, C, Q, U, J, V, I components will be mapped
aedtapp.create_schematic_from_netlist(myfile)
#########################################################
# Close Project....or continue adding functionalities
if os.name != "posix":
aedtapp.close_project()
desktop.force_close_desktop()
#########################################################
| 25.125 | 134 | 0.542644 |
import sys
import os
| true | true |
f725615748650d8ad2045fa8fddd4a9cd2da6c74 | 2,799 | py | Python | torch/fx/experimental/unification/utils.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 60,067 | 2017-01-18T17:21:31.000Z | 2022-03-31T21:37:45.000Z | torch/fx/experimental/unification/utils.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 66,955 | 2017-01-18T17:21:38.000Z | 2022-03-31T23:56:11.000Z | torch/fx/experimental/unification/utils.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 19,210 | 2017-01-18T17:45:04.000Z | 2022-03-31T23:51:56.000Z | def hashable(x):
try:
hash(x)
return True
except TypeError:
return False
def transitive_get(key, d):
""" Transitive dict.get
>>> d = {1: 2, 2: 3, 3: 4}
>>> d.get(1)
2
>>> transitive_get(1, d)
4
"""
while hashable(key) and key in d:
key = d[key]
return key
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
# Taken from theano/theano/gof/sched.py
# Avoids licensing issues because this was written by Matthew Rocklin
def _toposort(edges):
""" Topological sort algorithm by Kahn [1] - O(nodes + vertices)
inputs:
edges - a dict of the form {a: {b, c}} where b and c depend on a
outputs:
L - an ordered list of nodes that satisfy the dependencies of edges
>>> _toposort({1: (2, 3), 2: (3, )})
[1, 2, 3]
Closely follows the wikipedia page [2]
[1] Kahn, Arthur B. (1962), "Topological sorting of large networks",
Communications of the ACM
[2] http://en.wikipedia.org/wiki/Toposort#Algorithms
"""
incoming_edges = reverse_dict(edges)
incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items())
S = set((v for v in edges if v not in incoming_edges))
L = []
while S:
n = S.pop()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S.add(m)
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
return L
def reverse_dict(d):
"""Reverses direction of dependence dict
>>> d = {'a': (1, 2), 'b': (2, 3), 'c':()}
>>> reverse_dict(d) # doctest: +SKIP
{1: ('a',), 2: ('a', 'b'), 3: ('b',)}
:note: dict order are not deterministic. As we iterate on the
input dict, it make the output of this function depend on the
dict order. So this function output order should be considered
as undeterministic.
"""
result = {} # type: ignore[var-annotated]
for key in d:
for val in d[key]:
result[val] = result.get(val, tuple()) + (key, )
return result
def xfail(func):
try:
func()
raise Exception("XFailed test passed") # pragma:nocover
except Exception:
pass
def freeze(d):
""" Freeze container to hashable form
>>> freeze(1)
1
>>> freeze([1, 2])
(1, 2)
>>> freeze({1: 2}) # doctest: +SKIP
frozenset([(1, 2)])
"""
if isinstance(d, dict):
return frozenset(map(freeze, d.items()))
if isinstance(d, set):
return frozenset(map(freeze, d))
if isinstance(d, (tuple, list)):
return tuple(map(freeze, d))
return d
| 26.913462 | 77 | 0.568775 | def hashable(x):
try:
hash(x)
return True
except TypeError:
return False
def transitive_get(key, d):
while hashable(key) and key in d:
key = d[key]
return key
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
def _toposort(edges):
incoming_edges = reverse_dict(edges)
incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items())
S = set((v for v in edges if v not in incoming_edges))
L = []
while S:
n = S.pop()
L.append(n)
for m in edges.get(n, ()):
assert n in incoming_edges[m]
incoming_edges[m].remove(n)
if not incoming_edges[m]:
S.add(m)
if any(incoming_edges.get(v, None) for v in edges):
raise ValueError("Input has cycles")
return L
def reverse_dict(d):
result = {}
for key in d:
for val in d[key]:
result[val] = result.get(val, tuple()) + (key, )
return result
def xfail(func):
try:
func()
raise Exception("XFailed test passed")
except Exception:
pass
def freeze(d):
if isinstance(d, dict):
return frozenset(map(freeze, d.items()))
if isinstance(d, set):
return frozenset(map(freeze, d))
if isinstance(d, (tuple, list)):
return tuple(map(freeze, d))
return d
| true | true |
f725615e2782bb4b02f5432ef2200adf2c96f1c1 | 1,971 | py | Python | dpaycli/instance.py | dpays/dpay-cli | dfa80898e1faea2cee92ebec6fe04873381bd40f | [
"MIT"
] | null | null | null | dpaycli/instance.py | dpays/dpay-cli | dfa80898e1faea2cee92ebec6fe04873381bd40f | [
"MIT"
] | null | null | null | dpaycli/instance.py | dpays/dpay-cli | dfa80898e1faea2cee92ebec6fe04873381bd40f | [
"MIT"
] | null | null | null | # This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import object
import dpaycli as stm
class SharedInstance(object):
"""Singelton for the DPay Instance"""
instance = None
config = {}
def shared_dpay_instance():
""" This method will initialize ``SharedInstance.instance`` and return it.
The purpose of this method is to have offer single default
dpay instance that can be reused by multiple classes.
.. code-block:: python
from dpaycli.account import Account
from dpaycli.instance import shared_dpay_instance
account = Account("test")
# is equivalent with
account = Account("test", dpay_instance=shared_dpay_instance())
"""
if not SharedInstance.instance:
clear_cache()
SharedInstance.instance = stm.DPay(**SharedInstance.config)
return SharedInstance.instance
def set_shared_dpay_instance(dpay_instance):
""" This method allows us to override default dpay instance for all users of
``SharedInstance.instance``.
:param dpaycli.dpay.DPay dpay_instance: DPay instance
"""
clear_cache()
SharedInstance.instance = dpay_instance
def clear_cache():
""" Clear Caches
"""
from .blockchainobject import BlockchainObject
BlockchainObject.clear_cache()
def set_shared_config(config):
""" This allows to set a config that will be used when calling
``shared_dpay_instance`` and allows to define the configuration
without requiring to actually create an instance
"""
if not isinstance(config, dict):
raise AssertionError()
SharedInstance.config.update(config)
# if one is already set, delete
if SharedInstance.instance:
clear_cache()
SharedInstance.instance = None
| 29.863636 | 80 | 0.702689 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import object
import dpaycli as stm
class SharedInstance(object):
instance = None
config = {}
def shared_dpay_instance():
if not SharedInstance.instance:
clear_cache()
SharedInstance.instance = stm.DPay(**SharedInstance.config)
return SharedInstance.instance
def set_shared_dpay_instance(dpay_instance):
clear_cache()
SharedInstance.instance = dpay_instance
def clear_cache():
from .blockchainobject import BlockchainObject
BlockchainObject.clear_cache()
def set_shared_config(config):
if not isinstance(config, dict):
raise AssertionError()
SharedInstance.config.update(config)
if SharedInstance.instance:
clear_cache()
SharedInstance.instance = None
| true | true |
f72562f7f2fd98b8968e23cb2d33be250c5fd8dd | 8,348 | py | Python | script/hassfest/requirements.py | basicpail/core | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | [
"Apache-2.0"
] | 1 | 2020-10-01T13:36:50.000Z | 2020-10-01T13:36:50.000Z | script/hassfest/requirements.py | basicpail/core | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | [
"Apache-2.0"
] | 69 | 2020-08-04T09:03:43.000Z | 2022-03-31T06:13:01.000Z | script/hassfest/requirements.py | basicpail/core | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | [
"Apache-2.0"
] | 1 | 2020-10-26T10:44:32.000Z | 2020-10-26T10:44:32.000Z | """Validate requirements."""
from __future__ import annotations
from collections import deque
import json
import operator
import os
import re
import subprocess
import sys
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
from stdlib_list import stdlib_list
from tqdm import tqdm
from homeassistant.const import REQUIRED_PYTHON_VER
import homeassistant.util.package as pkg_util
from script.gen_requirements_all import COMMENT_REQUIREMENTS
from .model import Config, Integration
IGNORE_PACKAGES = {
commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS
}
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)")
SUPPORTED_PYTHON_TUPLES = [
REQUIRED_PYTHON_VER[:2],
tuple(map(operator.add, REQUIRED_PYTHON_VER, (0, 1, 0)))[:2],
]
SUPPORTED_PYTHON_VERSIONS = [
".".join(map(str, version_tuple)) for version_tuple in SUPPORTED_PYTHON_TUPLES
]
STD_LIBS = {version: set(stdlib_list(version)) for version in SUPPORTED_PYTHON_VERSIONS}
PIPDEPTREE_CACHE = None
IGNORE_VIOLATIONS = {
# Still has standard library requirements.
"acmeda",
"blink",
"ezviz",
"hdmi_cec",
"juicenet",
"lupusec",
"rainbird",
"slide",
"suez_water",
}
def normalize_package_name(requirement: str) -> str:
"""Return a normalized package name from a requirement string."""
match = PACKAGE_REGEX.search(requirement)
if not match:
return ""
# pipdeptree needs lowercase and dash instead of underscore as separator
package = match.group(1).lower().replace("_", "-")
return package
def validate(integrations: dict[str, Integration], config: Config):
"""Handle requirements for integrations."""
# Check if we are doing format-only validation.
if not config.requirements:
for integration in integrations.values():
validate_requirements_format(integration)
return
ensure_cache()
# check for incompatible requirements
disable_tqdm = config.specific_integrations or os.environ.get("CI", False)
for integration in tqdm(integrations.values(), disable=disable_tqdm):
if not integration.manifest:
continue
validate_requirements(integration)
def validate_requirements_format(integration: Integration) -> bool:
"""Validate requirements format.
Returns if valid.
"""
start_errors = len(integration.errors)
for req in integration.requirements:
if " " in req:
integration.add_error(
"requirements",
f'Requirement "{req}" contains a space',
)
continue
pkg, sep, version = req.partition("==")
if not sep and integration.core:
integration.add_error(
"requirements",
f'Requirement {req} need to be pinned "<pkg name>==<version>".',
)
continue
if AwesomeVersion(version).strategy == AwesomeVersionStrategy.UNKNOWN:
integration.add_error(
"requirements",
f"Unable to parse package version ({version}) for {pkg}.",
)
continue
return len(integration.errors) == start_errors
def validate_requirements(integration: Integration):
"""Validate requirements."""
if not validate_requirements_format(integration):
return
# Some integrations have not been fixed yet so are allowed to have violations.
if integration.domain in IGNORE_VIOLATIONS:
return
integration_requirements = set()
integration_packages = set()
for req in integration.requirements:
package = normalize_package_name(req)
if not package:
integration.add_error(
"requirements",
f"Failed to normalize package name from requirement {req}",
)
return
if package in IGNORE_PACKAGES:
continue
integration_requirements.add(req)
integration_packages.add(package)
if integration.disabled:
return
install_ok = install_requirements(integration, integration_requirements)
if not install_ok:
return
all_integration_requirements = get_requirements(integration, integration_packages)
if integration_requirements and not all_integration_requirements:
integration.add_error(
"requirements",
f"Failed to resolve requirements {integration_requirements}",
)
return
# Check for requirements incompatible with standard library.
for version, std_libs in STD_LIBS.items():
for req in all_integration_requirements:
if req in std_libs:
integration.add_error(
"requirements",
f"Package {req} is not compatible with Python {version} standard library",
)
def ensure_cache():
"""Ensure we have a cache of pipdeptree.
{
"flake8-docstring": {
"key": "flake8-docstrings",
"package_name": "flake8-docstrings",
"installed_version": "1.5.0"
"dependencies": {"flake8"}
}
}
"""
global PIPDEPTREE_CACHE
if PIPDEPTREE_CACHE is not None:
return
cache = {}
for item in json.loads(
subprocess.run(
["pipdeptree", "-w", "silence", "--json"],
check=True,
capture_output=True,
text=True,
).stdout
):
cache[item["package"]["key"]] = {
**item["package"],
"dependencies": {dep["key"] for dep in item["dependencies"]},
}
PIPDEPTREE_CACHE = cache
def get_requirements(integration: Integration, packages: set[str]) -> set[str]:
"""Return all (recursively) requirements for an integration."""
ensure_cache()
all_requirements = set()
to_check = deque(packages)
while to_check:
package = to_check.popleft()
if package in all_requirements:
continue
all_requirements.add(package)
item = PIPDEPTREE_CACHE.get(package)
if item is None:
# Only warn if direct dependencies could not be resolved
if package in packages:
integration.add_error(
"requirements", f"Failed to resolve requirements for {package}"
)
continue
to_check.extend(item["dependencies"])
return all_requirements
def install_requirements(integration: Integration, requirements: set[str]) -> bool:
"""Install integration requirements.
Return True if successful.
"""
global PIPDEPTREE_CACHE
ensure_cache()
for req in requirements:
match = PIP_REGEX.search(req)
if not match:
integration.add_error(
"requirements",
f"Failed to parse requirement {req} before installation",
)
continue
install_args = match.group(1)
requirement_arg = match.group(2)
is_installed = False
normalized = normalize_package_name(requirement_arg)
if normalized and "==" in requirement_arg:
ver = requirement_arg.split("==")[-1]
item = PIPDEPTREE_CACHE.get(normalized)
is_installed = item and item["installed_version"] == ver
if not is_installed:
try:
is_installed = pkg_util.is_installed(req)
except ValueError:
is_installed = False
if is_installed:
continue
args = [sys.executable, "-m", "pip", "install", "--quiet"]
if install_args:
args.append(install_args)
args.append(requirement_arg)
try:
result = subprocess.run(args, check=True, capture_output=True, text=True)
except subprocess.SubprocessError:
integration.add_error(
"requirements",
f"Requirement {req} failed to install",
)
else:
# Clear the pipdeptree cache if something got installed
if "Successfully installed" in result.stdout:
PIPDEPTREE_CACHE = None
if integration.errors:
return False
return True
| 28.298305 | 94 | 0.622185 | from __future__ import annotations
from collections import deque
import json
import operator
import os
import re
import subprocess
import sys
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
from stdlib_list import stdlib_list
from tqdm import tqdm
from homeassistant.const import REQUIRED_PYTHON_VER
import homeassistant.util.package as pkg_util
from script.gen_requirements_all import COMMENT_REQUIREMENTS
from .model import Config, Integration
IGNORE_PACKAGES = {
commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS
}
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)")
SUPPORTED_PYTHON_TUPLES = [
REQUIRED_PYTHON_VER[:2],
tuple(map(operator.add, REQUIRED_PYTHON_VER, (0, 1, 0)))[:2],
]
SUPPORTED_PYTHON_VERSIONS = [
".".join(map(str, version_tuple)) for version_tuple in SUPPORTED_PYTHON_TUPLES
]
STD_LIBS = {version: set(stdlib_list(version)) for version in SUPPORTED_PYTHON_VERSIONS}
PIPDEPTREE_CACHE = None
IGNORE_VIOLATIONS = {
"acmeda",
"blink",
"ezviz",
"hdmi_cec",
"juicenet",
"lupusec",
"rainbird",
"slide",
"suez_water",
}
def normalize_package_name(requirement: str) -> str:
match = PACKAGE_REGEX.search(requirement)
if not match:
return ""
package = match.group(1).lower().replace("_", "-")
return package
def validate(integrations: dict[str, Integration], config: Config):
if not config.requirements:
for integration in integrations.values():
validate_requirements_format(integration)
return
ensure_cache()
disable_tqdm = config.specific_integrations or os.environ.get("CI", False)
for integration in tqdm(integrations.values(), disable=disable_tqdm):
if not integration.manifest:
continue
validate_requirements(integration)
def validate_requirements_format(integration: Integration) -> bool:
start_errors = len(integration.errors)
for req in integration.requirements:
if " " in req:
integration.add_error(
"requirements",
f'Requirement "{req}" contains a space',
)
continue
pkg, sep, version = req.partition("==")
if not sep and integration.core:
integration.add_error(
"requirements",
f'Requirement {req} need to be pinned "<pkg name>==<version>".',
)
continue
if AwesomeVersion(version).strategy == AwesomeVersionStrategy.UNKNOWN:
integration.add_error(
"requirements",
f"Unable to parse package version ({version}) for {pkg}.",
)
continue
return len(integration.errors) == start_errors
def validate_requirements(integration: Integration):
if not validate_requirements_format(integration):
return
if integration.domain in IGNORE_VIOLATIONS:
return
integration_requirements = set()
integration_packages = set()
for req in integration.requirements:
package = normalize_package_name(req)
if not package:
integration.add_error(
"requirements",
f"Failed to normalize package name from requirement {req}",
)
return
if package in IGNORE_PACKAGES:
continue
integration_requirements.add(req)
integration_packages.add(package)
if integration.disabled:
return
install_ok = install_requirements(integration, integration_requirements)
if not install_ok:
return
all_integration_requirements = get_requirements(integration, integration_packages)
if integration_requirements and not all_integration_requirements:
integration.add_error(
"requirements",
f"Failed to resolve requirements {integration_requirements}",
)
return
for version, std_libs in STD_LIBS.items():
for req in all_integration_requirements:
if req in std_libs:
integration.add_error(
"requirements",
f"Package {req} is not compatible with Python {version} standard library",
)
def ensure_cache():
global PIPDEPTREE_CACHE
if PIPDEPTREE_CACHE is not None:
return
cache = {}
for item in json.loads(
subprocess.run(
["pipdeptree", "-w", "silence", "--json"],
check=True,
capture_output=True,
text=True,
).stdout
):
cache[item["package"]["key"]] = {
**item["package"],
"dependencies": {dep["key"] for dep in item["dependencies"]},
}
PIPDEPTREE_CACHE = cache
def get_requirements(integration: Integration, packages: set[str]) -> set[str]:
ensure_cache()
all_requirements = set()
to_check = deque(packages)
while to_check:
package = to_check.popleft()
if package in all_requirements:
continue
all_requirements.add(package)
item = PIPDEPTREE_CACHE.get(package)
if item is None:
if package in packages:
integration.add_error(
"requirements", f"Failed to resolve requirements for {package}"
)
continue
to_check.extend(item["dependencies"])
return all_requirements
def install_requirements(integration: Integration, requirements: set[str]) -> bool:
global PIPDEPTREE_CACHE
ensure_cache()
for req in requirements:
match = PIP_REGEX.search(req)
if not match:
integration.add_error(
"requirements",
f"Failed to parse requirement {req} before installation",
)
continue
install_args = match.group(1)
requirement_arg = match.group(2)
is_installed = False
normalized = normalize_package_name(requirement_arg)
if normalized and "==" in requirement_arg:
ver = requirement_arg.split("==")[-1]
item = PIPDEPTREE_CACHE.get(normalized)
is_installed = item and item["installed_version"] == ver
if not is_installed:
try:
is_installed = pkg_util.is_installed(req)
except ValueError:
is_installed = False
if is_installed:
continue
args = [sys.executable, "-m", "pip", "install", "--quiet"]
if install_args:
args.append(install_args)
args.append(requirement_arg)
try:
result = subprocess.run(args, check=True, capture_output=True, text=True)
except subprocess.SubprocessError:
integration.add_error(
"requirements",
f"Requirement {req} failed to install",
)
else:
if "Successfully installed" in result.stdout:
PIPDEPTREE_CACHE = None
if integration.errors:
return False
return True
| true | true |
f72563ebf144b5c4ed5d91972f5d809a8a4c52e3 | 2,010 | py | Python | a02_zapatamezaj.py | 2019-fall-csc-226/a02-loopy-turtles-loopy-languages-zapatamezaj-a02 | 0a390e93a2f32ce7dbaaf963e660e1f98fd5741c | [
"MIT"
] | null | null | null | a02_zapatamezaj.py | 2019-fall-csc-226/a02-loopy-turtles-loopy-languages-zapatamezaj-a02 | 0a390e93a2f32ce7dbaaf963e660e1f98fd5741c | [
"MIT"
] | null | null | null | a02_zapatamezaj.py | 2019-fall-csc-226/a02-loopy-turtles-loopy-languages-zapatamezaj-a02 | 0a390e93a2f32ce7dbaaf963e660e1f98fd5741c | [
"MIT"
] | null | null | null | ######################################################################
# Author: Jose Zapata Meza
# Username: zapatamezaj
# Assignment: A02: Loopy Turtle, Loopy Languages
# Purpose: Practice using the turtle library and loops
######################################################################
# Acknowledgements:
# licensed under a Creative Commons
# Attribution-Noncommercial-Share Alike 3.0 United States License.
######################################################################
# Imports the turtle files
import turtle
# Creates a window with a grey background
wn = turtle.Screen()
wn.bgcolor("grey")
# Creates a turtle that is named Jose
jose = turtle.Turtle()
jose.setheading(0)
# Jose's color is blue
jose.color('blue')
# The origin of Jose is moved
jose.penup()
jose.back(200)
jose.pendown()
# A turtle to make a road is created and placed to where the road should be
road = turtle.Turtle()
road.penup()
road.backward(340)
road.right(90)
road.forward(150)
road.left(90)
road.pendown()
road.forward(700)
# A turtle to draw a sun is created
sun = turtle.Turtle()
sun.penup()
sun.color('yellow')
sun.forward(250)
sun.left(90)
sun.forward(220)
sun.pendown()
# The turtle fills its shape to make the sun bright
sun.begin_fill()
sun.circle(50)
sun.end_fill()
sun.hideturtle()
# A loop is created to draw a rectangle
for i in range(2):
jose.forward(400)
jose.right(90)
jose.forward(100)
jose.right(90)
# Jose moves to draw the rear tire of the car
jose.forward(50)
jose.penup()
jose.right(90)
jose.forward(100)
jose.pendown()
jose.circle(50)
# Jose moves to draw the front tire of the car
jose.left(90)
jose.forward(200)
jose.right(90)
jose.circle(50)
# Jose moves to make the top part of the car
jose.left(90)
jose.forward(100)
jose.left(90)
jose.penup()
jose.forward(100)
jose.pendown()
jose.forward(100)
jose.left(90)
jose.forward(300)
jose.left(90)
jose.forward(100)
jose.hideturtle()
# To end the program, one clicks on the screen
wn.exitonclick()
| 21.157895 | 75 | 0.653234 | true | true | |
f72563f0c25147eb28eb5f134aa0a5390efffad7 | 11,449 | py | Python | client_code/Slider/__init__.py | hugetim/anvil-extras | ca83f6ada5149514c2affbe1ab081a4ca677c7e0 | [
"MIT"
] | null | null | null | client_code/Slider/__init__.py | hugetim/anvil-extras | ca83f6ada5149514c2affbe1ab081a4ca677c7e0 | [
"MIT"
] | null | null | null | client_code/Slider/__init__.py | hugetim/anvil-extras | ca83f6ada5149514c2affbe1ab081a4ca677c7e0 | [
"MIT"
] | null | null | null | # SPDX-License-Identifier: MIT
#
# Copyright (c) 2021 The Anvil Extras project team members listed at
# https://github.com/anvilistas/anvil-extras/graphs/contributors
#
# This software is published at https://github.com/anvilistas/anvil-extras
import anvil.js
from anvil import HtmlPanel as _HtmlPanel
from ..utils._component_helpers import _get_color, _html_injector, _spacing_property
from ._anvil_designer import SliderTemplate
__version__ = "1.7.1"
noui_version = "15.4.0"
_html_injector.cdn(
f"https://cdn.jsdelivr.net/npm/nouislider@{noui_version}/dist/nouislider.min.css"
)
_html_injector.css(
"""
.anvil-slider-container {
padding: 10px 0;
}
.anvil-slider-container.has-pips {
padding-bottom: 40px;
}
.anvil-container-overflow, .anvil-panel-col {
overflow: visible;
}
.noUi-connect {
background: var(--primary);
}
.noUi-horizontal .noUi-handle {
width: 34px;
height: 34px;
right: -17px;
top: -10px;
border-radius: 50%;
}
.noUi-handle::before, .noUi-handle::after {
content: none
}
"""
)
_Slider = anvil.js.import_from(
f"https://cdn.skypack.dev/nouislider@{noui_version}"
).default
import json
def _as_list(x):
return x if isinstance(x, list) else [x]
def _from_list(x):
return x[0] if isinstance(x, list) else x
def _parse(s, force_list=False):
if not isinstance(s, str):
return s
s = s.lower().strip()
if not s:
return None if not force_list else []
if ("," in s or force_list) and s[0] != "[":
s = "[" + s + "]"
try:
return json.loads(s)
except Exception:
return [] if force_list else s
try:
# added in python 3.9 not currently available in skulpt
_removeprefix = str.removeprefix
_removesuffix = str.removesuffix
except AttributeError:
def _removeprefix(s, prefix):
return s[len(prefix) :] if s.startswith(prefix) else s
def _removesuffix(s, suffix):
return s[: len(s) - len(suffix)] if s.endswith(suffix) else s
def _wrap_formatter(formatter):
fto = formatter["to"]
ffrom = formatter["from"]
def wrap_to(f: float, *args) -> str:
s = fto(f)
if not isinstance(s, str):
raise TypeError(
f"Custom formatter returned {type(s).__name__} (expected str)"
)
return s
def wrap_from(s: str, *args) -> float:
#### This function is called from javascript so accept *args
if not isinstance(s, str):
raise TypeError(
f"got an unexpected value when trying to assign a value to the slider, (got {s})"
)
try:
return ffrom(s)
except Exception as e:
try:
# we may have just been give a number so do the obvious thing
res = float(s)
return int(res) if res.is_integer() else res
except Exception:
raise RuntimeError(f"your custom formatter raised an exception: {e!r}")
return {"to": wrap_to, "from": wrap_from, "format_spec": formatter}
def _get_formatter(formatspec: str) -> dict:
"""
Expecting a format spec e.g. '.2f'
Or a simple string '£{:.2f}'
"""
if isinstance(formatspec, dict):
return _wrap_formatter(formatspec)
if not isinstance(formatspec, str):
raise TypeError("expected property format to be of type str")
first = formatspec.find("{")
last = formatspec.find("}")
prefix = "" if first == -1 else formatspec[:first]
suffix = "" if last == -1 else formatspec[last + 1 :]
type = formatspec[len(formatspec) - 1] if last == -1 else formatspec[last - 1]
def to_format(f: float, *args) -> str:
# Used in javascript world so expects extra args
try:
return format(f, formatspec) if first == -1 else formatspec.format(f)
except Exception:
return f # better just to return what was passed to us
# this will raise an error if we have an invalid spec
format(1.1, formatspec) if first == -1 else formatspec.format(1.1)
def from_format(s: str, *args) -> float:
# Used in javascript world so expects extra args
if not isinstance(s, str):
raise TypeError(
f"got an unexpected value when trying to assign a value to the slider, (got {s})"
)
s = (
_removesuffix(_removeprefix(s, prefix), suffix)
.strip()
.replace(",", "")
.replace("_", "")
)
has_percent = type == "%" and s[len(s) - 1] == "%"
if has_percent:
s = s[: len(s) - 1]
try:
f = float(s)
except Exception:
return False
if has_percent:
f = f / 100
return int(f) if f.is_integer() else f
# noUiSlider requires a format like {from: (float) => str, to: (str) => float}
return {"from": from_format, "to": to_format, "format_spec": formatspec}
def _prop_getter(prop, fget=None):
return lambda self: self._props[prop] if fget is None else fget(self._props[prop])
def _slider_prop(prop, fset=None, fget=None):
def setter(self, value):
value = value if fset is None else fset(value)
self._props[prop] = value
if prop == "format":
pips = self._make_pips()
self._slider.updateOptions({prop: value, "pips": pips})
else:
self._slider.updateOptions({prop: value})
return property(_prop_getter(prop, fget), setter)
def _min_max_prop(prop):
def getter(self):
return self._props["range"][prop]
def setter(self, value):
r = self._props["range"]
r[prop] = value
self._slider.updateOptions({"range": r})
return property(getter, setter)
def _pips_prop(prop):
def setter(self, value):
self._props[prop] = value
pips = self._make_pips()
self._toggle_has_pips(pips)
self._slider.updateOptions({"pips": pips})
return property(_prop_getter(prop), setter)
_defaults = {
"animate": True,
"start": 20,
"step": None,
"tooltips": False,
"connect": False,
"behaviour": "tap",
"format": None,
"pips": None,
"pips_mode": None,
"pips_values": [],
"pips_density": -1,
"pips_stepped": True,
"margin": None,
"padding": None,
"limit": None,
"range": None,
"min": 0,
"max": 100,
"visible": True,
"enabled": True,
"spacing_above": "small",
"spacing_below": "small",
"value": None,
"values": None,
"formatted_value": None,
"formatted_values": None,
}
class Slider(SliderTemplate):
def __init__(self, **properties):
# Any code you write here will run when the form opens.
dom_node = self._dom_node = anvil.js.get_dom_node(self)
dom_node.classList.add("anvil-slider-container")
self._slider_node = dom_node.querySelector(".anvil-slider")
# remove the script to stop them loading
while dom_node.firstElementChild:
dom_node.removeChild(dom_node.firstElementChild)
dom_node.append(self._slider_node)
props = self._props = _defaults | properties
for prop in (
"start",
"connect",
"margin",
"padding",
"limit",
"pips_values",
):
props[prop] = _parse(props[prop], prop == "pips_values")
props["range"] = props["range"] or {"min": props["min"], "max": props["max"]}
props["format"] = _get_formatter(props["format"] or ".2f")
pips = self._make_pips()
self._toggle_has_pips(pips)
try:
self._slider = _Slider.create(self._slider_node, props | {"pips": pips})
except Exception as e:
raise RuntimeError(repr(e).replace("noUiSlider", "Slider"))
###### EVENTS ######
self._slider.on("slide", lambda a, h, *e: self.raise_event("slide", handle=h))
self._slider.on("change", lambda a, h, *e: self.raise_event("change", handle=h))
###### PROPS TO INIT ######
always = {p: props[p] for p in ("color", "spacing_above", "spacing_below")}
if_true = {
p: props[p]
for p in ("formatted_value", "formatted_values", "value", "values")
if props[p] is not None
}
if_false = {p: props[p] for p in ("enabled", "visible") if not props[p]}
self.init_components(**always, **if_false, **if_true)
###### VALUE PROPERTIES ######
def _value_setter(self, val):
self._slider.set(val)
def _value(self):
return _from_list(self._slider.get(True))
def _values(self):
return _as_list(self._slider.get(True))
def _formatted_value(self):
return _from_list(self._slider.get())
def _formatted_values(self):
return _as_list(self._slider.get())
value = property(_value, _value_setter)
values = property(_values, _value_setter)
formatted_value = property(_formatted_value, _value_setter)
formatted_values = property(_formatted_values, _value_setter)
###### noUiSlider PROPS ######
connect = _slider_prop("connect") # not dynamic
behaviour = _slider_prop("behaviour") # not dynamic
margin = _slider_prop("margin")
padding = _slider_prop("padding")
limit = _slider_prop("limit")
step = _slider_prop("step")
start = _slider_prop("start")
range = _slider_prop("range")
min = _min_max_prop("min")
max = _min_max_prop("max")
tooltips = _slider_prop("tooltips")
animate = _slider_prop("animate")
format = _slider_prop(
"format", fset=lambda s: _get_formatter(s), fget=lambda d: d["format_spec"]
)
###### PIPS PROPS ######
pips = _pips_prop("pips")
pips_mode = _pips_prop("pips_mode")
pips_values = _pips_prop("pips_values")
pips_density = _pips_prop("pips_density")
pips_stepped = _pips_prop("pips_stepped")
def _toggle_has_pips(self, pips):
self._dom_node.classList.toggle("has-pips", bool(pips))
def _make_pips(self):
props = self._props
pips = props["pips"]
if not pips:
return None
elif pips is True:
return {
"format": props["format"],
"mode": props["pips_mode"],
"values": props["pips_values"],
"density": props["pips_density"],
"stepped": props["pips_stepped"],
}
elif isinstance(pips, dict):
return pips
else:
raise TypeError(f"pips should be a bool or a dict, got {type(pips)}")
###### VISUAL PROPS ######
@property
def enabled(self):
return not self._slider_node.getAttribute("disabled")
@enabled.setter
def enabled(self, value):
if value:
self._slider_node.removeAttribute("disabled")
else:
self._slider_node.setAttribute("disabled", True)
@property
def color(self):
return self._color
@color.setter
def color(self, value):
self._color = value
self._dom_node.style.setProperty("--primary", _get_color(value))
spacing_above = _spacing_property("above")
spacing_below = _spacing_property("below")
visible = _HtmlPanel.visible
###### METHODS ######
def reset(self):
self._slider.reset()
self.raise_event("x-writeback")
| 29.507732 | 97 | 0.599354 |
import anvil.js
from anvil import HtmlPanel as _HtmlPanel
from ..utils._component_helpers import _get_color, _html_injector, _spacing_property
from ._anvil_designer import SliderTemplate
__version__ = "1.7.1"
noui_version = "15.4.0"
_html_injector.cdn(
f"https://cdn.jsdelivr.net/npm/nouislider@{noui_version}/dist/nouislider.min.css"
)
_html_injector.css(
"""
.anvil-slider-container {
padding: 10px 0;
}
.anvil-slider-container.has-pips {
padding-bottom: 40px;
}
.anvil-container-overflow, .anvil-panel-col {
overflow: visible;
}
.noUi-connect {
background: var(--primary);
}
.noUi-horizontal .noUi-handle {
width: 34px;
height: 34px;
right: -17px;
top: -10px;
border-radius: 50%;
}
.noUi-handle::before, .noUi-handle::after {
content: none
}
"""
)
_Slider = anvil.js.import_from(
f"https://cdn.skypack.dev/nouislider@{noui_version}"
).default
import json
def _as_list(x):
return x if isinstance(x, list) else [x]
def _from_list(x):
return x[0] if isinstance(x, list) else x
def _parse(s, force_list=False):
if not isinstance(s, str):
return s
s = s.lower().strip()
if not s:
return None if not force_list else []
if ("," in s or force_list) and s[0] != "[":
s = "[" + s + "]"
try:
return json.loads(s)
except Exception:
return [] if force_list else s
try:
_removeprefix = str.removeprefix
_removesuffix = str.removesuffix
except AttributeError:
def _removeprefix(s, prefix):
return s[len(prefix) :] if s.startswith(prefix) else s
def _removesuffix(s, suffix):
return s[: len(s) - len(suffix)] if s.endswith(suffix) else s
def _wrap_formatter(formatter):
fto = formatter["to"]
ffrom = formatter["from"]
def wrap_to(f: float, *args) -> str:
s = fto(f)
if not isinstance(s, str):
raise TypeError(
f"Custom formatter returned {type(s).__name__} (expected str)"
)
return s
def wrap_from(s: str, *args) -> float:
)
try:
return ffrom(s)
except Exception as e:
try:
res = float(s)
return int(res) if res.is_integer() else res
except Exception:
raise RuntimeError(f"your custom formatter raised an exception: {e!r}")
return {"to": wrap_to, "from": wrap_from, "format_spec": formatter}
def _get_formatter(formatspec: str) -> dict:
if isinstance(formatspec, dict):
return _wrap_formatter(formatspec)
if not isinstance(formatspec, str):
raise TypeError("expected property format to be of type str")
first = formatspec.find("{")
last = formatspec.find("}")
prefix = "" if first == -1 else formatspec[:first]
suffix = "" if last == -1 else formatspec[last + 1 :]
type = formatspec[len(formatspec) - 1] if last == -1 else formatspec[last - 1]
def to_format(f: float, *args) -> str:
try:
return format(f, formatspec) if first == -1 else formatspec.format(f)
except Exception:
return f
format(1.1, formatspec) if first == -1 else formatspec.format(1.1)
def from_format(s: str, *args) -> float:
if not isinstance(s, str):
raise TypeError(
f"got an unexpected value when trying to assign a value to the slider, (got {s})"
)
s = (
_removesuffix(_removeprefix(s, prefix), suffix)
.strip()
.replace(",", "")
.replace("_", "")
)
has_percent = type == "%" and s[len(s) - 1] == "%"
if has_percent:
s = s[: len(s) - 1]
try:
f = float(s)
except Exception:
return False
if has_percent:
f = f / 100
return int(f) if f.is_integer() else f
return {"from": from_format, "to": to_format, "format_spec": formatspec}
def _prop_getter(prop, fget=None):
return lambda self: self._props[prop] if fget is None else fget(self._props[prop])
def _slider_prop(prop, fset=None, fget=None):
def setter(self, value):
value = value if fset is None else fset(value)
self._props[prop] = value
if prop == "format":
pips = self._make_pips()
self._slider.updateOptions({prop: value, "pips": pips})
else:
self._slider.updateOptions({prop: value})
return property(_prop_getter(prop, fget), setter)
def _min_max_prop(prop):
def getter(self):
return self._props["range"][prop]
def setter(self, value):
r = self._props["range"]
r[prop] = value
self._slider.updateOptions({"range": r})
return property(getter, setter)
def _pips_prop(prop):
def setter(self, value):
self._props[prop] = value
pips = self._make_pips()
self._toggle_has_pips(pips)
self._slider.updateOptions({"pips": pips})
return property(_prop_getter(prop), setter)
_defaults = {
"animate": True,
"start": 20,
"step": None,
"tooltips": False,
"connect": False,
"behaviour": "tap",
"format": None,
"pips": None,
"pips_mode": None,
"pips_values": [],
"pips_density": -1,
"pips_stepped": True,
"margin": None,
"padding": None,
"limit": None,
"range": None,
"min": 0,
"max": 100,
"visible": True,
"enabled": True,
"spacing_above": "small",
"spacing_below": "small",
"value": None,
"values": None,
"formatted_value": None,
"formatted_values": None,
}
class Slider(SliderTemplate):
def __init__(self, **properties):
dom_node = self._dom_node = anvil.js.get_dom_node(self)
dom_node.classList.add("anvil-slider-container")
self._slider_node = dom_node.querySelector(".anvil-slider")
while dom_node.firstElementChild:
dom_node.removeChild(dom_node.firstElementChild)
dom_node.append(self._slider_node)
props = self._props = _defaults | properties
for prop in (
"start",
"connect",
"margin",
"padding",
"limit",
"pips_values",
):
props[prop] = _parse(props[prop], prop == "pips_values")
props["range"] = props["range"] or {"min": props["min"], "max": props["max"]}
props["format"] = _get_formatter(props["format"] or ".2f")
pips = self._make_pips()
self._toggle_has_pips(pips)
try:
self._slider = _Slider.create(self._slider_node, props | {"pips": pips})
except Exception as e:
raise RuntimeError(repr(e).replace("noUiSlider", "Slider"))
er.on("change", lambda a, h, *e: self.raise_event("change", handle=h))
for p in ("formatted_value", "formatted_values", "value", "values")
if props[p] is not None
}
if_false = {p: props[p] for p in ("enabled", "visible") if not props[p]}
self.init_components(**always, **if_false, **if_true)
elf):
return _as_list(self._slider.get(True))
def _formatted_value(self):
return _from_list(self._slider.get())
def _formatted_values(self):
return _as_list(self._slider.get())
value = property(_value, _value_setter)
values = property(_values, _value_setter)
formatted_value = property(_formatted_value, _value_setter)
formatted_values = property(_formatted_values, _value_setter)
")
limit = _slider_prop("limit")
step = _slider_prop("step")
start = _slider_prop("start")
range = _slider_prop("range")
min = _min_max_prop("min")
max = _min_max_prop("max")
tooltips = _slider_prop("tooltips")
animate = _slider_prop("animate")
format = _slider_prop(
"format", fset=lambda s: _get_formatter(s), fget=lambda d: d["format_spec"]
)
nsity = _pips_prop("pips_density")
pips_stepped = _pips_prop("pips_stepped")
def _toggle_has_pips(self, pips):
self._dom_node.classList.toggle("has-pips", bool(pips))
def _make_pips(self):
props = self._props
pips = props["pips"]
if not pips:
return None
elif pips is True:
return {
"format": props["format"],
"mode": props["pips_mode"],
"values": props["pips_values"],
"density": props["pips_density"],
"stepped": props["pips_stepped"],
}
elif isinstance(pips, dict):
return pips
else:
raise TypeError(f"pips should be a bool or a dict, got {type(pips)}")
(self, value):
if value:
self._slider_node.removeAttribute("disabled")
else:
self._slider_node.setAttribute("disabled", True)
@property
def color(self):
return self._color
@color.setter
def color(self, value):
self._color = value
self._dom_node.style.setProperty("--primary", _get_color(value))
spacing_above = _spacing_property("above")
spacing_below = _spacing_property("below")
visible = _HtmlPanel.visible
| true | true |
f72564206e9156bd00056bf1ef33a09cb7439d94 | 338 | py | Python | server/twitter/migrations/0004_auto_20200822_2202.py | vanviethieuanh/twitter-clone | bac1be8118514ec8ce169eb0c0f4d5658ab2013a | [
"MIT"
] | 1 | 2021-12-29T00:49:51.000Z | 2021-12-29T00:49:51.000Z | server/twitter/migrations/0004_auto_20200822_2202.py | vanviethieuanh/twitter-clone | bac1be8118514ec8ce169eb0c0f4d5658ab2013a | [
"MIT"
] | null | null | null | server/twitter/migrations/0004_auto_20200822_2202.py | vanviethieuanh/twitter-clone | bac1be8118514ec8ce169eb0c0f4d5658ab2013a | [
"MIT"
] | null | null | null | # Generated by Django 3.0.3 on 2020-08-22 15:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('twitter', '0003_auto_20200822_2127'),
]
operations = [
migrations.AlterUniqueTogether(
name='follow',
unique_together=set(),
),
]
| 18.777778 | 47 | 0.60355 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('twitter', '0003_auto_20200822_2127'),
]
operations = [
migrations.AlterUniqueTogether(
name='follow',
unique_together=set(),
),
]
| true | true |
f72564424a1cdf41e8a72d242dbdc9892b53b527 | 3,217 | py | Python | model/snl_block.py | ustbjdl1021/improved_snl_unet | 7f7bf092153e1a535337b80bd1b673eff3ddec52 | [
"MIT"
] | null | null | null | model/snl_block.py | ustbjdl1021/improved_snl_unet | 7f7bf092153e1a535337b80bd1b673eff3ddec52 | [
"MIT"
] | 2 | 2022-03-30T13:05:27.000Z | 2022-03-31T13:43:22.000Z | model/snl_block.py | ustbjdl1021/improved_snl_unet | 7f7bf092153e1a535337b80bd1b673eff3ddec52 | [
"MIT"
] | 1 | 2022-03-31T13:33:30.000Z | 2022-03-31T13:33:30.000Z | import torch
import torch.nn as nn
class ImprovedSNL(nn.Module):
def __init__(self, in_channels, transfer_channels, stage_num=2):
super(ImprovedSNL, self).__init__()
self.in_channels = in_channels
self.transfer_channels = transfer_channels
self.stage_num = stage_num
self.transform_t = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.transform_p = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.row_transform = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.column_transform = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.w1 = nn.Conv2d(transfer_channels, in_channels, kernel_size=1, stride=1, bias=False)
self.w2 = nn.Conv2d(transfer_channels, in_channels, kernel_size=1, stride=1, bias=False)
self.bn = nn.BatchNorm2d(in_channels)
self._init_params()
def _init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def getAtt(self, x):
t = self.transform_t(x)
p = self.transform_p(x)
b, c, h, w = t.size()
t = t.view(b, c, -1).permute(0, 2, 1)
p = p.view(b, c, -1)
m = torch.bmm(torch.relu(t), torch.relu(p))
m += m.permute(0, 2, 1)
m_hat = m / 2
degree = torch.sum(m_hat, dim=2)
degree[degree != 0] = torch.sqrt(1.0 / degree[degree != 0])
affinity_matrix = m_hat * degree.unsqueeze(1)
affinity_matrix *= degree.unsqueeze(2)
return affinity_matrix
def stage(self, x):
affinity_matrix = self.getAtt(x)
column_features = self.column_transform(x)
b, c, h, w = column_features.size()
column_features = column_features.view(b, c, -1)
column_features = torch.bmm(column_features, affinity_matrix).contiguous().view(b,c,h,w)
column_features = self.w1(column_features)
row_features = self.row_transform(x)
b, c, h, w = row_features.size()
row_features = row_features.view(b, c, -1).permute(0, 2, 1)
row_features = torch.bmm(affinity_matrix, row_features).permute(0, 2, 1).contiguous().view(b,c,h,w)
row_features = self.w2(row_features)
output = column_features + row_features
output = self.bn(output)
output = output + x
return output
def forward(self, x):
for stage in range(self.stage_num):
x = self.stage(x)
return x
| 36.556818 | 111 | 0.587193 | import torch
import torch.nn as nn
class ImprovedSNL(nn.Module):
def __init__(self, in_channels, transfer_channels, stage_num=2):
super(ImprovedSNL, self).__init__()
self.in_channels = in_channels
self.transfer_channels = transfer_channels
self.stage_num = stage_num
self.transform_t = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.transform_p = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.row_transform = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.column_transform = nn.Conv2d(in_channels, transfer_channels, kernel_size=1, stride=1, bias=False)
self.w1 = nn.Conv2d(transfer_channels, in_channels, kernel_size=1, stride=1, bias=False)
self.w2 = nn.Conv2d(transfer_channels, in_channels, kernel_size=1, stride=1, bias=False)
self.bn = nn.BatchNorm2d(in_channels)
self._init_params()
def _init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def getAtt(self, x):
t = self.transform_t(x)
p = self.transform_p(x)
b, c, h, w = t.size()
t = t.view(b, c, -1).permute(0, 2, 1)
p = p.view(b, c, -1)
m = torch.bmm(torch.relu(t), torch.relu(p))
m += m.permute(0, 2, 1)
m_hat = m / 2
degree = torch.sum(m_hat, dim=2)
degree[degree != 0] = torch.sqrt(1.0 / degree[degree != 0])
affinity_matrix = m_hat * degree.unsqueeze(1)
affinity_matrix *= degree.unsqueeze(2)
return affinity_matrix
def stage(self, x):
affinity_matrix = self.getAtt(x)
column_features = self.column_transform(x)
b, c, h, w = column_features.size()
column_features = column_features.view(b, c, -1)
column_features = torch.bmm(column_features, affinity_matrix).contiguous().view(b,c,h,w)
column_features = self.w1(column_features)
row_features = self.row_transform(x)
b, c, h, w = row_features.size()
row_features = row_features.view(b, c, -1).permute(0, 2, 1)
row_features = torch.bmm(affinity_matrix, row_features).permute(0, 2, 1).contiguous().view(b,c,h,w)
row_features = self.w2(row_features)
output = column_features + row_features
output = self.bn(output)
output = output + x
return output
def forward(self, x):
for stage in range(self.stage_num):
x = self.stage(x)
return x
| true | true |
f72565ec076f5d92978bba872dc9b48d63e0a69e | 10,084 | py | Python | regym/rl_algorithms/algorithms/PPO/rnd_loss.py | KnwSondess/Regym | 825c7dacf955a3e2f6c658c0ecb879a0ca036c1a | [
"MIT"
] | 2 | 2020-09-13T15:53:20.000Z | 2020-12-08T15:57:05.000Z | regym/rl_algorithms/algorithms/PPO/rnd_loss.py | KnwSondess/Regym | 825c7dacf955a3e2f6c658c0ecb879a0ca036c1a | [
"MIT"
] | null | null | null | regym/rl_algorithms/algorithms/PPO/rnd_loss.py | KnwSondess/Regym | 825c7dacf955a3e2f6c658c0ecb879a0ca036c1a | [
"MIT"
] | 1 | 2021-09-20T13:48:30.000Z | 2021-09-20T13:48:30.000Z | from typing import Dict, List
import torch
import torch.nn.functional as F
def compute_loss(states: torch.Tensor,
actions: torch.Tensor,
next_states: torch.Tensor,
log_probs_old: torch.Tensor,
ext_returns: torch.Tensor,
ext_advantages: torch.Tensor,
std_ext_advantages: torch.Tensor,
int_returns: torch.Tensor,
int_advantages: torch.Tensor,
std_int_advantages: torch.Tensor,
target_random_features: torch.Tensor,
states_mean: torch.Tensor,
states_std: torch.Tensor,
model: torch.nn.Module,
pred_intr_model: torch.nn.Module,
intrinsic_reward_ratio: float,
ratio_clip: float,
entropy_weight: float,
value_weight: float,
rnd_weight: float,
rnd_obs_clip: float,
summary_writer: object = None,
iteration_count: int = 0,
rnn_states: Dict[str, Dict[str, List[torch.Tensor]]] = None) -> torch.Tensor:
'''
Computes the loss of an actor critic model using the
loss function from equation (9) in the paper:
Proximal Policy Optimization Algorithms: https://arxiv.org/abs/1707.06347
:param states: Dimension: batch_size x state_size: States visited by the agent.
:param actions: Dimension: batch_size x action_size. Actions which the agent
took at every state in :param states: with the same index.
:param log_probs_old: Dimension: batch_size x 1. Log probability of taking
the action with the same index in :param actions:.
Used to compute the policy probability ratio.
Refer to original paper equation (6)
:param ext_returns: Dimension: batch_size x 1. Empirical returns obtained via
calculating the discounted return from the environment's rewards
:param ext_advantages: Dimension: batch_size x 1. Estimated advantage function
for every state and action in :param states: and
:param actions: (respectively) with the same index.
:param std_ext_advantages: Dimension: batch_size x 1. Estimated standardized advantage function
for every state and action in :param states: and
:param actions: (respectively) with the same index.
:param int_returns: Dimension: batch_size x 1. Empirical intrinsic returns obtained via
calculating the discounted intrinsic return from the intrinsic rewards.
:param int_advantages: Dimension: batch_size x 1. Estimated intrisinc advantage function
for every state and action in :param states: and
:param actions: (respectively) with the same index.
:param std_int_advantages: Dimension: batch_size x 1. Estimated standardized intrinsic advantage function
for every state and action in :param states: and
:param actions: (respectively) with the same index.
:param target_random_features: target random features used to compute the intrinsic rewards.
:param states_mean: mean over the previous training step's states.
:param states_std: standard deviation over the previous training step's states.
:param model: torch.nn.Module used to compute the policy probability ratio
as specified in equation (6) of original paper.
:param predict_intr_model: intrinsic reward prediction model.
:param intrinsic_reward_ratio: ratio of intrinsic reward to extrinsic reward.
:param ratio_clip: Epsilon value used to clip the policy ratio's value.
This parameter acts as the radius of the Trust Region.
Refer to original paper equation (7).
:param entropy_weight: Coefficient to be used for the entropy bonus
for the loss function. Refer to original paper eq (9)
:param value_weight: Coefficient to be used for the value loss
for the loss function. Refer to original paper eq (9)
:param rnd_weight: Coefficient to be used for the rnd loss
for the loss function.
:param rnn_states: The :param model: can be made up of different submodules.
Some of these submodules will feature an LSTM architecture.
This parameter is a dictionary which maps recurrent submodule names
to a dictionary which contains 2 lists of tensors, each list
corresponding to the 'hidden' and 'cell' states of
the LSTM submodules. These tensors are used by the
:param model: when calculating the policy probability ratio.
'''
advantages = ext_advantages + intrinsic_reward_ratio*int_advantages
std_advantages = std_ext_advantages + intrinsic_reward_ratio*std_int_advantages
prediction = model(states, actions, rnn_states=rnn_states)
ratio = torch.exp((prediction['log_pi_a'] - log_probs_old))
obj = ratio * std_advantages
obj_clipped = torch.clamp(ratio,
1.0 - ratio_clip,
1.0 + ratio_clip) * std_advantages
policy_val = -torch.min(obj, obj_clipped).mean()
entropy_val = prediction['ent'].mean()
policy_loss = policy_val - entropy_weight * entropy_val # L^{clip} and L^{S} from original paper
#policy_loss = -torch.min(obj, obj_clipped).mean() - entropy_weight * prediction['ent'].mean() # L^{clip} and L^{S} from original paper
# Random Network Distillation loss:
norm_next_states = (next_states-states_mean) / (states_std+1e-8)
if rnd_obs_clip > 1e-1:
norm_next_states = torch.clamp( norm_next_states, -rnd_obs_clip, rnd_obs_clip)
pred_random_features = pred_intr_model(norm_next_states)
# Clamping:
#pred_random_features = torch.clamp(pred_random_features, -1e20, 1e20)
#target_random_features = torch.clamp(target_random_features, -1e20, 1e20)
# Softmax:
#pred_random_features = F.softmax(pred_random_features)
# Losses:
#int_reward_loss = torch.nn.functional.smooth_l1_loss(target_random_features.detach(), pred_random_features)
int_reward_loss = torch.nn.functional.mse_loss( pred_random_features, target_random_features.detach())
#ext_returns = torch.clamp(ext_returns, -1e10, 1e10)
#int_returns = torch.clamp(int_returns, -1e10, 1e10)
#prediction['v'] = torch.clamp(prediction['v'], -1e10, 1e10)
#prediction['int_v'] = torch.clamp(prediction['int_v'], -1e10, 1e10)
#ext_v_loss = torch.nn.functional.smooth_l1_loss(ext_returns, prediction['v'])
#int_v_loss = torch.nn.functional.smooth_l1_loss(int_returns, prediction['int_v'])
ext_v_loss = torch.nn.functional.mse_loss(input=prediction['v'], target=ext_returns)
int_v_loss = torch.nn.functional.mse_loss(input=prediction['int_v'], target=int_returns)
value_loss = (ext_v_loss + int_v_loss)
#value_loss = ext_v_loss
rnd_loss = int_reward_loss
total_loss = policy_loss + rnd_weight * rnd_loss + value_weight * value_loss
#total_loss = policy_loss + value_weight * value_loss
if summary_writer is not None:
summary_writer.add_scalar('Training/RatioMean', ratio.mean().cpu().item(), iteration_count)
#summary_writer.add_histogram('Training/Ratio', ratio.cpu(), iteration_count)
summary_writer.add_scalar('Training/ExtAdvantageMean', ext_advantages.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/IntAdvantageMean', int_advantages.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/AdvantageMean', advantages.mean().cpu().item(), iteration_count)
#summary_writer.add_histogram('Training/ExtAdvantage', ext_advantages.cpu(), iteration_count)
#summary_writer.add_histogram('Training/IntAdvantage', int_advantages.cpu(), iteration_count)
#summary_writer.add_histogram('Training/Advantage', advantages.cpu(), iteration_count)
summary_writer.add_scalar('Training/RNDLoss', int_reward_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/ExtVLoss', ext_v_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/IntVLoss', int_v_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/MeanVValues', prediction['v'].cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/MeanReturns', ext_returns.cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/StdVValues', prediction['v'].cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/StdReturns', ext_returns.cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/MeanIntVValues', prediction['int_v'].cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/MeanIntReturns', int_returns.cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/StdIntVValues', prediction['int_v'].cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/StdIntReturns', int_returns.cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/ValueLoss', value_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/PolicyVal', policy_val.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/EntropyVal', entropy_val.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/PolicyLoss', policy_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/TotalLoss', total_loss.cpu().item(), iteration_count)
return total_loss
| 61.865031 | 139 | 0.673344 | from typing import Dict, List
import torch
import torch.nn.functional as F
def compute_loss(states: torch.Tensor,
actions: torch.Tensor,
next_states: torch.Tensor,
log_probs_old: torch.Tensor,
ext_returns: torch.Tensor,
ext_advantages: torch.Tensor,
std_ext_advantages: torch.Tensor,
int_returns: torch.Tensor,
int_advantages: torch.Tensor,
std_int_advantages: torch.Tensor,
target_random_features: torch.Tensor,
states_mean: torch.Tensor,
states_std: torch.Tensor,
model: torch.nn.Module,
pred_intr_model: torch.nn.Module,
intrinsic_reward_ratio: float,
ratio_clip: float,
entropy_weight: float,
value_weight: float,
rnd_weight: float,
rnd_obs_clip: float,
summary_writer: object = None,
iteration_count: int = 0,
rnn_states: Dict[str, Dict[str, List[torch.Tensor]]] = None) -> torch.Tensor:
advantages = ext_advantages + intrinsic_reward_ratio*int_advantages
std_advantages = std_ext_advantages + intrinsic_reward_ratio*std_int_advantages
prediction = model(states, actions, rnn_states=rnn_states)
ratio = torch.exp((prediction['log_pi_a'] - log_probs_old))
obj = ratio * std_advantages
obj_clipped = torch.clamp(ratio,
1.0 - ratio_clip,
1.0 + ratio_clip) * std_advantages
policy_val = -torch.min(obj, obj_clipped).mean()
entropy_val = prediction['ent'].mean()
policy_loss = policy_val - entropy_weight * entropy_val
states-states_mean) / (states_std+1e-8)
if rnd_obs_clip > 1e-1:
norm_next_states = torch.clamp( norm_next_states, -rnd_obs_clip, rnd_obs_clip)
pred_random_features = pred_intr_model(norm_next_states)
int_reward_loss = torch.nn.functional.mse_loss( pred_random_features, target_random_features.detach())
ext_v_loss = torch.nn.functional.mse_loss(input=prediction['v'], target=ext_returns)
int_v_loss = torch.nn.functional.mse_loss(input=prediction['int_v'], target=int_returns)
value_loss = (ext_v_loss + int_v_loss)
rnd_loss = int_reward_loss
total_loss = policy_loss + rnd_weight * rnd_loss + value_weight * value_loss
if summary_writer is not None:
summary_writer.add_scalar('Training/RatioMean', ratio.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/ExtAdvantageMean', ext_advantages.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/IntAdvantageMean', int_advantages.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/AdvantageMean', advantages.mean().cpu().item(), iteration_count)
summary_writer.add_scalar('Training/RNDLoss', int_reward_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/ExtVLoss', ext_v_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/IntVLoss', int_v_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/MeanVValues', prediction['v'].cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/MeanReturns', ext_returns.cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/StdVValues', prediction['v'].cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/StdReturns', ext_returns.cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/MeanIntVValues', prediction['int_v'].cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/MeanIntReturns', int_returns.cpu().mean().item(), iteration_count)
summary_writer.add_scalar('Training/StdIntVValues', prediction['int_v'].cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/StdIntReturns', int_returns.cpu().std().item(), iteration_count)
summary_writer.add_scalar('Training/ValueLoss', value_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/PolicyVal', policy_val.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/EntropyVal', entropy_val.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/PolicyLoss', policy_loss.cpu().item(), iteration_count)
summary_writer.add_scalar('Training/TotalLoss', total_loss.cpu().item(), iteration_count)
return total_loss
| true | true |
f725663a96725554f28f77d984f7989ab3fbe8af | 4,939 | py | Python | huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/cluster_cert.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/cluster_cert.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/cluster_cert.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ClusterCert:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'server': 'str',
'certificate_authority_data': 'str',
'insecure_skip_tls_verify': 'bool'
}
attribute_map = {
'server': 'server',
'certificate_authority_data': 'certificate-authority-data',
'insecure_skip_tls_verify': 'insecure-skip-tls-verify'
}
def __init__(self, server=None, certificate_authority_data=None, insecure_skip_tls_verify=None):
"""ClusterCert - a model defined in huaweicloud sdk"""
self._server = None
self._certificate_authority_data = None
self._insecure_skip_tls_verify = None
self.discriminator = None
if server is not None:
self.server = server
if certificate_authority_data is not None:
self.certificate_authority_data = certificate_authority_data
if insecure_skip_tls_verify is not None:
self.insecure_skip_tls_verify = insecure_skip_tls_verify
@property
def server(self):
"""Gets the server of this ClusterCert.
服务器地址。
:return: The server of this ClusterCert.
:rtype: str
"""
return self._server
@server.setter
def server(self, server):
"""Sets the server of this ClusterCert.
服务器地址。
:param server: The server of this ClusterCert.
:type: str
"""
self._server = server
@property
def certificate_authority_data(self):
"""Gets the certificate_authority_data of this ClusterCert.
证书授权数据。
:return: The certificate_authority_data of this ClusterCert.
:rtype: str
"""
return self._certificate_authority_data
@certificate_authority_data.setter
def certificate_authority_data(self, certificate_authority_data):
"""Sets the certificate_authority_data of this ClusterCert.
证书授权数据。
:param certificate_authority_data: The certificate_authority_data of this ClusterCert.
:type: str
"""
self._certificate_authority_data = certificate_authority_data
@property
def insecure_skip_tls_verify(self):
"""Gets the insecure_skip_tls_verify of this ClusterCert.
不校验服务端证书,在 cluster 类型为 externalCluster 时,该值为 true。
:return: The insecure_skip_tls_verify of this ClusterCert.
:rtype: bool
"""
return self._insecure_skip_tls_verify
@insecure_skip_tls_verify.setter
def insecure_skip_tls_verify(self, insecure_skip_tls_verify):
"""Sets the insecure_skip_tls_verify of this ClusterCert.
不校验服务端证书,在 cluster 类型为 externalCluster 时,该值为 true。
:param insecure_skip_tls_verify: The insecure_skip_tls_verify of this ClusterCert.
:type: bool
"""
self._insecure_skip_tls_verify = insecure_skip_tls_verify
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ClusterCert):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.224852 | 100 | 0.606803 |
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ClusterCert:
sensitive_list = []
openapi_types = {
'server': 'str',
'certificate_authority_data': 'str',
'insecure_skip_tls_verify': 'bool'
}
attribute_map = {
'server': 'server',
'certificate_authority_data': 'certificate-authority-data',
'insecure_skip_tls_verify': 'insecure-skip-tls-verify'
}
def __init__(self, server=None, certificate_authority_data=None, insecure_skip_tls_verify=None):
self._server = None
self._certificate_authority_data = None
self._insecure_skip_tls_verify = None
self.discriminator = None
if server is not None:
self.server = server
if certificate_authority_data is not None:
self.certificate_authority_data = certificate_authority_data
if insecure_skip_tls_verify is not None:
self.insecure_skip_tls_verify = insecure_skip_tls_verify
@property
def server(self):
return self._server
@server.setter
def server(self, server):
self._server = server
@property
def certificate_authority_data(self):
return self._certificate_authority_data
@certificate_authority_data.setter
def certificate_authority_data(self, certificate_authority_data):
self._certificate_authority_data = certificate_authority_data
@property
def insecure_skip_tls_verify(self):
return self._insecure_skip_tls_verify
@insecure_skip_tls_verify.setter
def insecure_skip_tls_verify(self, insecure_skip_tls_verify):
self._insecure_skip_tls_verify = insecure_skip_tls_verify
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ClusterCert):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f7256728eb65c78928992820c0d53c79800f694d | 483 | py | Python | app/api/migrations/0002_auto_20210201_1602.py | ingjavierpinilla/magentrack-test | 4b5ee34aafbe85c4f536ceafd5efdc9271a26008 | [
"MIT"
] | null | null | null | app/api/migrations/0002_auto_20210201_1602.py | ingjavierpinilla/magentrack-test | 4b5ee34aafbe85c4f536ceafd5efdc9271a26008 | [
"MIT"
] | null | null | null | app/api/migrations/0002_auto_20210201_1602.py | ingjavierpinilla/magentrack-test | 4b5ee34aafbe85c4f536ceafd5efdc9271a26008 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.6 on 2021-02-01 16:02
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='date',
field=models.DateTimeField(default=datetime.datetime(2021, 2, 1, 16, 2, 48, 685488, tzinfo=utc)),
),
]
| 23 | 109 | 0.625259 |
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='date',
field=models.DateTimeField(default=datetime.datetime(2021, 2, 1, 16, 2, 48, 685488, tzinfo=utc)),
),
]
| true | true |
f72568c0fee6e2b462e73799b21aa117bda8f7a5 | 1,562 | py | Python | sdk/python/pulumi_azure_native/management/v20200501/__init__.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/management/v20200501/__init__.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/management/v20200501/__init__.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .get_entity import *
from .get_hierarchy_setting import *
from .get_management_group import *
from .get_management_group_subscription import *
from .hierarchy_setting import *
from .management_group import *
from .management_group_subscription import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from ... import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-native:management/v20200501:HierarchySetting":
return HierarchySetting(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:management/v20200501:ManagementGroup":
return ManagementGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:management/v20200501:ManagementGroupSubscription":
return ManagementGroupSubscription(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-native", "management/v20200501", _module_instance)
_register_module()
| 37.190476 | 101 | 0.707426 |
# Export this package's modules as members:
from .get_entity import *
from .get_hierarchy_setting import *
from .get_management_group import *
from .get_management_group_subscription import *
from .hierarchy_setting import *
from .management_group import *
from .management_group_subscription import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from ... import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-native:management/v20200501:HierarchySetting":
return HierarchySetting(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:management/v20200501:ManagementGroup":
return ManagementGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:management/v20200501:ManagementGroupSubscription":
return ManagementGroupSubscription(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-native", "management/v20200501", _module_instance)
_register_module()
| true | true |
f72569012f6a31830be2670b47bccbad2cee3f8e | 4,367 | py | Python | vimms_gym/viewer_helper.py | glasgowcompbio/vimms-gym | 95cb6fa84ee6e3a64618b7a2a54c3835ad0d7867 | [
"MIT"
] | null | null | null | vimms_gym/viewer_helper.py | glasgowcompbio/vimms-gym | 95cb6fa84ee6e3a64618b7a2a54c3835ad0d7867 | [
"MIT"
] | null | null | null | vimms_gym/viewer_helper.py | glasgowcompbio/vimms-gym | 95cb6fa84ee6e3a64618b7a2a54c3835ad0d7867 | [
"MIT"
] | null | null | null | import os
import sys
import numpy as np
import streamlit as st
from stable_baselines3 import PPO
from vimms.ChemicalSamplers import UniformRTAndIntensitySampler, GaussianChromatogramSampler, \
UniformMZFormulaSampler
from vimms.Common import POSITIVE
from vimms_gym.common import METHOD_PPO, METHOD_TOPN
sys.path.append('..')
from vimms_gym.env import DDAEnv
from vimms_gym.evaluation import Episode, pick_action
@st.experimental_memo
def preset_1():
n_chemicals = (2000, 5000)
mz_range = (100, 600)
rt_range = (200, 1000)
intensity_range = (1E4, 1E10)
min_mz = mz_range[0]
max_mz = mz_range[1]
min_rt = rt_range[0]
max_rt = rt_range[1]
min_log_intensity = np.log(intensity_range[0])
max_log_intensity = np.log(intensity_range[1])
isolation_window = 0.7
rt_tol = 120
mz_tol = 10
ionisation_mode = POSITIVE
enable_spike_noise = True
noise_density = 0.1
noise_max_val = 1E3
mz_sampler = UniformMZFormulaSampler(min_mz=min_mz, max_mz=max_mz)
ri_sampler = UniformRTAndIntensitySampler(min_rt=min_rt, max_rt=max_rt,
min_log_intensity=min_log_intensity,
max_log_intensity=max_log_intensity)
cr_sampler = GaussianChromatogramSampler()
params = {
'chemical_creator': {
'mz_range': mz_range,
'rt_range': rt_range,
'intensity_range': intensity_range,
'n_chemicals': n_chemicals,
'mz_sampler': mz_sampler,
'ri_sampler': ri_sampler,
'cr_sampler': cr_sampler,
},
'noise': {
'enable_spike_noise': enable_spike_noise,
'noise_density': noise_density,
'noise_max_val': noise_max_val,
'mz_range': mz_range
},
'env': {
'ionisation_mode': ionisation_mode,
'rt_range': rt_range,
'isolation_window': isolation_window,
'mz_tol': mz_tol,
'rt_tol': rt_tol,
}
}
return params
@st.experimental_memo
def preset_2():
return None
def load_model_and_params(method, params):
params = dict(params) # make a copy
model = None
N = None
min_ms1_intensity = None
if method == METHOD_PPO:
# TODO: should be uploaded, rather than hardcoded?
in_dir = os.path.abspath(os.path.join('..', 'notebooks', 'simulated_chems', 'results'))
env_name = 'DDAEnv'
model_name = 'PPO'
fname = os.path.join(in_dir, '%s_%s.zip' % (env_name, model_name))
# st.write('Loading model from: ', fname)
model = load_ppo(fname)
elif method == METHOD_TOPN:
min_ms1_intensity = 5000
N = 20 # from optimise_baselines.ipynb
rt_tol = 30 # from optimise_baselines.ipynb
params['env']['rt_tol'] = rt_tol
return N, min_ms1_intensity, model, params
@st.experimental_singleton
def load_ppo(fname):
model = PPO.load(fname)
return model
def run_simulation(N, chems, max_peaks, method, min_ms1_intensity, model, params):
env = DDAEnv(max_peaks, params)
obs = env.reset(chems=chems)
done = False
episode = Episode(obs)
with st.spinner('Wait for it...'):
while not done: # repeat until episode is done
# select an action depending on the observation and method
action, action_probs = pick_action(
method, obs, model, env.features, N, min_ms1_intensity)
# make one step through the simulation
obs, reward, done, info = env.step(action)
# FIXME: seems to slow the simulation a lot!
# image = env.render(mode='rgb_array')
# store new episodic information
if obs is not None:
episode.add_step_data(action, action_probs, obs, reward, info)
if episode.num_steps % 500 == 0:
st.write('Step\t', episode.num_steps, '\tTotal reward\t',
episode.get_total_rewards())
# if episode is finished, break
if done:
msg = f'Episode stored into session: {episode.num_steps} timesteps ' \
f'with total reward {episode.get_total_rewards()}'
st.success(msg)
break
return episode
| 30.971631 | 95 | 0.615068 | import os
import sys
import numpy as np
import streamlit as st
from stable_baselines3 import PPO
from vimms.ChemicalSamplers import UniformRTAndIntensitySampler, GaussianChromatogramSampler, \
UniformMZFormulaSampler
from vimms.Common import POSITIVE
from vimms_gym.common import METHOD_PPO, METHOD_TOPN
sys.path.append('..')
from vimms_gym.env import DDAEnv
from vimms_gym.evaluation import Episode, pick_action
@st.experimental_memo
def preset_1():
n_chemicals = (2000, 5000)
mz_range = (100, 600)
rt_range = (200, 1000)
intensity_range = (1E4, 1E10)
min_mz = mz_range[0]
max_mz = mz_range[1]
min_rt = rt_range[0]
max_rt = rt_range[1]
min_log_intensity = np.log(intensity_range[0])
max_log_intensity = np.log(intensity_range[1])
isolation_window = 0.7
rt_tol = 120
mz_tol = 10
ionisation_mode = POSITIVE
enable_spike_noise = True
noise_density = 0.1
noise_max_val = 1E3
mz_sampler = UniformMZFormulaSampler(min_mz=min_mz, max_mz=max_mz)
ri_sampler = UniformRTAndIntensitySampler(min_rt=min_rt, max_rt=max_rt,
min_log_intensity=min_log_intensity,
max_log_intensity=max_log_intensity)
cr_sampler = GaussianChromatogramSampler()
params = {
'chemical_creator': {
'mz_range': mz_range,
'rt_range': rt_range,
'intensity_range': intensity_range,
'n_chemicals': n_chemicals,
'mz_sampler': mz_sampler,
'ri_sampler': ri_sampler,
'cr_sampler': cr_sampler,
},
'noise': {
'enable_spike_noise': enable_spike_noise,
'noise_density': noise_density,
'noise_max_val': noise_max_val,
'mz_range': mz_range
},
'env': {
'ionisation_mode': ionisation_mode,
'rt_range': rt_range,
'isolation_window': isolation_window,
'mz_tol': mz_tol,
'rt_tol': rt_tol,
}
}
return params
@st.experimental_memo
def preset_2():
return None
def load_model_and_params(method, params):
params = dict(params)
model = None
N = None
min_ms1_intensity = None
if method == METHOD_PPO:
in_dir = os.path.abspath(os.path.join('..', 'notebooks', 'simulated_chems', 'results'))
env_name = 'DDAEnv'
model_name = 'PPO'
fname = os.path.join(in_dir, '%s_%s.zip' % (env_name, model_name))
model = load_ppo(fname)
elif method == METHOD_TOPN:
min_ms1_intensity = 5000
N = 20
rt_tol = 30
params['env']['rt_tol'] = rt_tol
return N, min_ms1_intensity, model, params
@st.experimental_singleton
def load_ppo(fname):
model = PPO.load(fname)
return model
def run_simulation(N, chems, max_peaks, method, min_ms1_intensity, model, params):
env = DDAEnv(max_peaks, params)
obs = env.reset(chems=chems)
done = False
episode = Episode(obs)
with st.spinner('Wait for it...'):
while not done:
action, action_probs = pick_action(
method, obs, model, env.features, N, min_ms1_intensity)
obs, reward, done, info = env.step(action)
if obs is not None:
episode.add_step_data(action, action_probs, obs, reward, info)
if episode.num_steps % 500 == 0:
st.write('Step\t', episode.num_steps, '\tTotal reward\t',
episode.get_total_rewards())
if done:
msg = f'Episode stored into session: {episode.num_steps} timesteps ' \
f'with total reward {episode.get_total_rewards()}'
st.success(msg)
break
return episode
| true | true |
f7256a59d601a2c803274ba2986fc1dd01ff4e55 | 5,081 | py | Python | samples/add_nic_to_vm.py | jm66/pyvmomi-community-samples | 5ca4a50b767500e07b9bce9fba70240bfa963a4e | [
"Apache-2.0"
] | 4 | 2019-05-27T23:36:34.000Z | 2020-11-12T17:08:04.000Z | samples/add_nic_to_vm.py | zhangjiahaol/pyvmomi-community-samples | 905ec34edfbd151531832e98b6a0748fa6ff5e0e | [
"Apache-2.0"
] | 12 | 2019-04-17T02:47:25.000Z | 2021-04-02T09:15:37.000Z | samples/add_nic_to_vm.py | zhangjiahaol/pyvmomi-community-samples | 905ec34edfbd151531832e98b6a0748fa6ff5e0e | [
"Apache-2.0"
] | 15 | 2018-04-26T05:18:12.000Z | 2021-11-06T04:44:58.000Z | #!/usr/bin/env python
"""
Written by nickcooper-zhangtonghao
Github: https://github.com/nickcooper-zhangtonghao
Email: nickcooper-zhangtonghao@opencloud.tech
Note: Example code For testing purposes only
This code has been released under the terms of the Apache-2.0 license
http://opensource.org/licenses/Apache-2.0
"""
from pyVmomi import vim
from pyVmomi import vmodl
from tools import tasks
from pyVim.connect import SmartConnect, SmartConnectNoSSL, Disconnect
import atexit
import argparse
import getpass
def get_args():
parser = argparse.ArgumentParser(
description='Arguments for talking to vCenter')
parser.add_argument('-s', '--host',
required=True,
action='store',
help='vSpehre service to connect to')
parser.add_argument('-o', '--port',
type=int,
default=443,
action='store',
help='Port to connect on')
parser.add_argument('-u', '--user',
required=True,
action='store',
help='User name to use')
parser.add_argument('-p', '--password',
required=False,
action='store',
help='Password to use')
parser.add_argument('--no-ssl',
action='store_true',
help='Skip client SSL verification')
parser.add_argument('-v', '--vm-name',
required=False,
action='store',
help='name of the vm')
parser.add_argument('--uuid',
required=False,
action='store',
help='vmuuid of vm')
parser.add_argument('--port-group',
required=True,
action='store',
help='port group to connect on')
args = parser.parse_args()
if not args.password:
args.password = getpass.getpass(
prompt='Enter password')
return args
def get_obj(content, vimtype, name):
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if c.name == name:
obj = c
break
return obj
def add_nic(si, vm, network_name):
"""
:param si: Service Instance
:param vm: Virtual Machine Object
:param network_name: Name of the Virtual Network
"""
spec = vim.vm.ConfigSpec()
nic_changes = []
nic_spec = vim.vm.device.VirtualDeviceSpec()
nic_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_spec.device = vim.vm.device.VirtualE1000()
nic_spec.device.deviceInfo = vim.Description()
nic_spec.device.deviceInfo.summary = 'vCenter API test'
content = si.RetrieveContent()
network = get_obj(content, [vim.Network], network_name)
if isinstance(network, vim.OpaqueNetwork):
nic_spec.device.backing = \
vim.vm.device.VirtualEthernetCard.OpaqueNetworkBackingInfo()
nic_spec.device.backing.opaqueNetworkType = \
network.summary.opaqueNetworkType
nic_spec.device.backing.opaqueNetworkId = \
network.summary.opaqueNetworkId
else:
nic_spec.device.backing = \
vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_spec.device.backing.useAutoDetect = False
nic_spec.device.backing.deviceName = network
nic_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic_spec.device.connectable.startConnected = True
nic_spec.device.connectable.allowGuestControl = True
nic_spec.device.connectable.connected = False
nic_spec.device.connectable.status = 'untried'
nic_spec.device.wakeOnLanEnabled = True
nic_spec.device.addressType = 'assigned'
nic_changes.append(nic_spec)
spec.deviceChange = nic_changes
e = vm.ReconfigVM_Task(spec=spec)
print("NIC CARD ADDED")
def main():
args = get_args()
# connect this thing
serviceInstance = None
if args.no_ssl:
serviceInstance = SmartConnectNoSSL(
host=args.host,
user=args.user,
pwd=args.password,
port=args.port)
else:
serviceInstance = SmartConnect(
host=args.host,
user=args.user,
pwd=args.password,
port=args.port)
# disconnect this thing
atexit.register(Disconnect, serviceInstance)
vm = None
if args.uuid:
search_index = serviceInstance.content.searchIndex
vm = search_index.FindByUuid(None, args.uuid, True)
elif args.vm_name:
content = serviceInstance.RetrieveContent()
vm = get_obj(content, [vim.VirtualMachine], args.vm_name)
if vm:
add_nic(serviceInstance, vm, args.port_group)
else:
print("VM not found")
# start this thing
if __name__ == "__main__":
main()
| 30.244048 | 75 | 0.605196 |
from pyVmomi import vim
from pyVmomi import vmodl
from tools import tasks
from pyVim.connect import SmartConnect, SmartConnectNoSSL, Disconnect
import atexit
import argparse
import getpass
def get_args():
parser = argparse.ArgumentParser(
description='Arguments for talking to vCenter')
parser.add_argument('-s', '--host',
required=True,
action='store',
help='vSpehre service to connect to')
parser.add_argument('-o', '--port',
type=int,
default=443,
action='store',
help='Port to connect on')
parser.add_argument('-u', '--user',
required=True,
action='store',
help='User name to use')
parser.add_argument('-p', '--password',
required=False,
action='store',
help='Password to use')
parser.add_argument('--no-ssl',
action='store_true',
help='Skip client SSL verification')
parser.add_argument('-v', '--vm-name',
required=False,
action='store',
help='name of the vm')
parser.add_argument('--uuid',
required=False,
action='store',
help='vmuuid of vm')
parser.add_argument('--port-group',
required=True,
action='store',
help='port group to connect on')
args = parser.parse_args()
if not args.password:
args.password = getpass.getpass(
prompt='Enter password')
return args
def get_obj(content, vimtype, name):
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if c.name == name:
obj = c
break
return obj
def add_nic(si, vm, network_name):
spec = vim.vm.ConfigSpec()
nic_changes = []
nic_spec = vim.vm.device.VirtualDeviceSpec()
nic_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_spec.device = vim.vm.device.VirtualE1000()
nic_spec.device.deviceInfo = vim.Description()
nic_spec.device.deviceInfo.summary = 'vCenter API test'
content = si.RetrieveContent()
network = get_obj(content, [vim.Network], network_name)
if isinstance(network, vim.OpaqueNetwork):
nic_spec.device.backing = \
vim.vm.device.VirtualEthernetCard.OpaqueNetworkBackingInfo()
nic_spec.device.backing.opaqueNetworkType = \
network.summary.opaqueNetworkType
nic_spec.device.backing.opaqueNetworkId = \
network.summary.opaqueNetworkId
else:
nic_spec.device.backing = \
vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_spec.device.backing.useAutoDetect = False
nic_spec.device.backing.deviceName = network
nic_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic_spec.device.connectable.startConnected = True
nic_spec.device.connectable.allowGuestControl = True
nic_spec.device.connectable.connected = False
nic_spec.device.connectable.status = 'untried'
nic_spec.device.wakeOnLanEnabled = True
nic_spec.device.addressType = 'assigned'
nic_changes.append(nic_spec)
spec.deviceChange = nic_changes
e = vm.ReconfigVM_Task(spec=spec)
print("NIC CARD ADDED")
def main():
args = get_args()
serviceInstance = None
if args.no_ssl:
serviceInstance = SmartConnectNoSSL(
host=args.host,
user=args.user,
pwd=args.password,
port=args.port)
else:
serviceInstance = SmartConnect(
host=args.host,
user=args.user,
pwd=args.password,
port=args.port)
atexit.register(Disconnect, serviceInstance)
vm = None
if args.uuid:
search_index = serviceInstance.content.searchIndex
vm = search_index.FindByUuid(None, args.uuid, True)
elif args.vm_name:
content = serviceInstance.RetrieveContent()
vm = get_obj(content, [vim.VirtualMachine], args.vm_name)
if vm:
add_nic(serviceInstance, vm, args.port_group)
else:
print("VM not found")
if __name__ == "__main__":
main()
| true | true |
f7256ae570b6c12768a5d1a994314b4d86c179d6 | 894 | py | Python | modules/signatures/windows/antiav_srp.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 24 | 2021-06-21T07:35:37.000Z | 2022-03-22T03:33:59.000Z | modules/signatures/windows/antiav_srp.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 3 | 2021-07-01T08:09:05.000Z | 2022-01-28T03:38:36.000Z | modules/signatures/windows/antiav_srp.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 6 | 2021-06-22T05:32:57.000Z | 2022-02-11T02:05:45.000Z | # Copyright (C) 2014 Optiv, Inc. (brad.spengler@optiv.com), Updated 2016 for cuckoo 2.0
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class AntiAVSRP(Signature):
name = "antiav_srp"
description = "Modifies Software Restriction Policies likely to cripple AV"
severity = 3
categories = ["anti-av"]
authors = ["Optiv"]
minimum = "2.0"
ttp = ["T1089"]
regkeys_re = [
".*\\\\Policies\\\\Microsoft\\\\Windows\\\\Safer\\\\\CodeIdentifiers\\\\0\\\\Paths\\\\.*",
]
def on_complete(self):
for indicator in self.regkeys_re:
for regkey in self.check_key(pattern=indicator, regex=True, actions=["regkey_written"], all=True):
self.mark_ioc("registry", regkey)
return self.has_marks()
| 34.384615 | 110 | 0.651007 |
from lib.cuckoo.common.abstracts import Signature
class AntiAVSRP(Signature):
name = "antiav_srp"
description = "Modifies Software Restriction Policies likely to cripple AV"
severity = 3
categories = ["anti-av"]
authors = ["Optiv"]
minimum = "2.0"
ttp = ["T1089"]
regkeys_re = [
".*\\\\Policies\\\\Microsoft\\\\Windows\\\\Safer\\\\\CodeIdentifiers\\\\0\\\\Paths\\\\.*",
]
def on_complete(self):
for indicator in self.regkeys_re:
for regkey in self.check_key(pattern=indicator, regex=True, actions=["regkey_written"], all=True):
self.mark_ioc("registry", regkey)
return self.has_marks()
| true | true |
f7256b2ee02db620d84d3addaae0bc4e05297053 | 1,900 | py | Python | 06_Transformacoes_do_Conjunto_de_Dados/6.6_Projecao_Aleatoria/6.6.1._O_Lema_de_Johnson-Lindenstrauss.py | BrunoBertti/Scikit_Learning | 4b9e10ff7909f3728ac1e8bba19f5fd779340bc4 | [
"MIT"
] | null | null | null | 06_Transformacoes_do_Conjunto_de_Dados/6.6_Projecao_Aleatoria/6.6.1._O_Lema_de_Johnson-Lindenstrauss.py | BrunoBertti/Scikit_Learning | 4b9e10ff7909f3728ac1e8bba19f5fd779340bc4 | [
"MIT"
] | null | null | null | 06_Transformacoes_do_Conjunto_de_Dados/6.6_Projecao_Aleatoria/6.6.1._O_Lema_de_Johnson-Lindenstrauss.py | BrunoBertti/Scikit_Learning | 4b9e10ff7909f3728ac1e8bba19f5fd779340bc4 | [
"MIT"
] | null | null | null | ########## 6.6.1. O lema de Johnson-Lindenstrauss ##########
# O principal resultado teórico por trás da eficiência da projeção aleatória é o lema de Johnson-Lindenstrauss (citando a Wikipedia):
# Em matemática, o lema de Johnson-Lindenstrauss é um resultado sobre embeddings de baixa distorção de pontos de alta dimensão em espaço euclidiano de baixa dimensão. O lema afirma que um pequeno conjunto de pontos em um espaço de alta dimensão pode ser incorporado em um espaço de dimensão muito menor de tal forma que as distâncias entre os pontos sejam praticamente preservadas. O mapa usado para a incorporação é pelo menos Lipschitz, e pode até ser considerado uma projeção ortogonal.
# Conhecendo apenas o número de amostras, o johnson_lindenstrauss_min_dim estima conservadoramente o tamanho mínimo do subespaço aleatório para garantir uma distorção limitada introduzida pela projeção aleatória:
from sklearn.random_projection import johnson_lindenstrauss_min_dim
johnson_lindenstrauss_min_dim(n_samples=1e6, eps=0.5)
johnson_lindenstrauss_min_dim(n_samples=1e6, eps=[0.5, 0.1, 0.01])
johnson_lindenstrauss_min_dim(n_samples=[1e4, 1e5, 1e6], eps=0.1)
# https://scikit-learn.org/stable/auto_examples/miscellaneous/plot_johnson_lindenstrauss_bound.html
## Exemplos:
## See The Johnson-Lindenstrauss bound for embedding with random projections for a theoretical explication on the Johnson-Lindenstrauss lemma and an empirical validation using sparse random matrices. (https://scikit-learn.org/stable/auto_examples/miscellaneous/plot_johnson_lindenstrauss_bound.html#sphx-glr-auto-examples-miscellaneous-plot-johnson-lindenstrauss-bound-py)
## Referências:
## Sanjoy Dasgupta and Anupam Gupta, 1999. An elementary proof of the Johnson-Lindenstrauss Lemma. ( http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.39.3334&rep=rep1&type=pdf) | 52.777778 | 493 | 0.792105 | true | true | |
f7256c0b18316e9401a8678074fb2dce8d2668b5 | 2,659 | py | Python | ddsp/training/preprocessing.py | jesseengel/ddsp | de195af0a21fba52e6b88c23886c244d8607e49c | [
"Apache-2.0"
] | 7 | 2020-01-18T13:12:52.000Z | 2021-06-24T20:32:19.000Z | ddsp/training/preprocessing.py | jesseengel/ddsp | de195af0a21fba52e6b88c23886c244d8607e49c | [
"Apache-2.0"
] | null | null | null | ddsp/training/preprocessing.py | jesseengel/ddsp | de195af0a21fba52e6b88c23886c244d8607e49c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Library of preprocess functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import ddsp
import gin
import tensorflow.compat.v1 as tf
hz_to_midi = ddsp.core.hz_to_midi
F0_RANGE = ddsp.spectral_ops.F0_RANGE
LD_RANGE = ddsp.spectral_ops.LD_RANGE
# ---------------------- Preprocess Helpers ------------------------------------
def at_least_3d(x):
"""Adds a channel dimension."""
return x[:, :, tf.newaxis] if len(x.shape) == 2 else x
# ---------------------- Preprocess objects ------------------------------------
class Preprocessor(object):
"""Base class for chaining a series of preprocessing functions."""
def __init__(self):
pass
def __call__(self, features, training=True):
"""Get outputs after preprocessing functions.
Args:
features: dict of feature key and tensors
training: boolean for controlling training-specfic preprocessing behavior
Returns:
Dictionary of transformed features
"""
return copy.copy(features)
@gin.register
class DefaultPreprocessor(Preprocessor):
"""Default class that resamples features and adds `f0_hz` key."""
def __init__(self, time_steps=1000):
super(DefaultPreprocessor, self).__init__()
self.time_steps = time_steps
def __call__(self, features, training=True):
super(DefaultPreprocessor, self).__call__(features, training)
return self._default_processing(features)
def _default_processing(self, features):
"""Always resample to `time_steps` and scale 'loudness_db' and 'f0_hz'."""
for k in ['loudness_db', 'f0_hz']:
features[k] = ddsp.core.resample(features[k], n_timesteps=self.time_steps)
features[k] = at_least_3d(features[k])
# For NN training, scale frequency and loudness to the range [0, 1].
# Log-scale f0 features. Loudness from [-1, 0] to [1, 0].
features['f0_scaled'] = hz_to_midi(features['f0_hz']) / F0_RANGE
features['ld_scaled'] = (features['loudness_db'] / LD_RANGE) + 1.0
return features
| 32.426829 | 80 | 0.699511 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import ddsp
import gin
import tensorflow.compat.v1 as tf
hz_to_midi = ddsp.core.hz_to_midi
F0_RANGE = ddsp.spectral_ops.F0_RANGE
LD_RANGE = ddsp.spectral_ops.LD_RANGE
def at_least_3d(x):
return x[:, :, tf.newaxis] if len(x.shape) == 2 else x
class Preprocessor(object):
def __init__(self):
pass
def __call__(self, features, training=True):
return copy.copy(features)
@gin.register
class DefaultPreprocessor(Preprocessor):
def __init__(self, time_steps=1000):
super(DefaultPreprocessor, self).__init__()
self.time_steps = time_steps
def __call__(self, features, training=True):
super(DefaultPreprocessor, self).__call__(features, training)
return self._default_processing(features)
def _default_processing(self, features):
for k in ['loudness_db', 'f0_hz']:
features[k] = ddsp.core.resample(features[k], n_timesteps=self.time_steps)
features[k] = at_least_3d(features[k])
features['f0_scaled'] = hz_to_midi(features['f0_hz']) / F0_RANGE
features['ld_scaled'] = (features['loudness_db'] / LD_RANGE) + 1.0
return features
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.